The zlib module provides compression using Gzip and Deflate/Inflate algorithms. Here's how to use it.
Basic Compression#
1const zlib = require('zlib');
2const { promisify } = require('util');
3
4// Promisified versions
5const gzip = promisify(zlib.gzip);
6const gunzip = promisify(zlib.gunzip);
7const deflate = promisify(zlib.deflate);
8const inflate = promisify(zlib.inflate);
9
10// Compress string
11async function compressString(input) {
12 const buffer = Buffer.from(input, 'utf8');
13 const compressed = await gzip(buffer);
14 console.log(`Original: ${buffer.length} bytes`);
15 console.log(`Compressed: ${compressed.length} bytes`);
16 return compressed;
17}
18
19// Decompress
20async function decompressString(compressed) {
21 const decompressed = await gunzip(compressed);
22 return decompressed.toString('utf8');
23}
24
25// Usage
26async function example() {
27 const text = 'Hello, World! '.repeat(100);
28 const compressed = await compressString(text);
29 const original = await decompressString(compressed);
30 console.log(original === text); // true
31}Sync Methods#
1const zlib = require('zlib');
2
3// Synchronous compression
4const input = 'Data to compress';
5const buffer = Buffer.from(input);
6
7// Gzip
8const gzipped = zlib.gzipSync(buffer);
9const gunzipped = zlib.gunzipSync(gzipped);
10
11// Deflate
12const deflated = zlib.deflateSync(buffer);
13const inflated = zlib.inflateSync(deflated);
14
15// Brotli (Node 11.7+)
16const brotli = zlib.brotliCompressSync(buffer);
17const unbrotli = zlib.brotliDecompressSync(brotli);
18
19console.log(gunzipped.toString()); // 'Data to compress'
20console.log(inflated.toString()); // 'Data to compress'
21console.log(unbrotli.toString()); // 'Data to compress'Stream Compression#
1const zlib = require('zlib');
2const fs = require('fs');
3const { pipeline } = require('stream/promises');
4
5// Compress file
6async function compressFile(inputPath, outputPath) {
7 const gzip = zlib.createGzip();
8 const source = fs.createReadStream(inputPath);
9 const destination = fs.createWriteStream(outputPath);
10
11 await pipeline(source, gzip, destination);
12 console.log(`Compressed ${inputPath} to ${outputPath}`);
13}
14
15// Decompress file
16async function decompressFile(inputPath, outputPath) {
17 const gunzip = zlib.createGunzip();
18 const source = fs.createReadStream(inputPath);
19 const destination = fs.createWriteStream(outputPath);
20
21 await pipeline(source, gunzip, destination);
22 console.log(`Decompressed ${inputPath} to ${outputPath}`);
23}
24
25// Usage
26await compressFile('large-file.txt', 'large-file.txt.gz');
27await decompressFile('large-file.txt.gz', 'restored-file.txt');HTTP Compression#
1const http = require('http');
2const zlib = require('zlib');
3const fs = require('fs');
4
5const server = http.createServer((req, res) => {
6 const acceptEncoding = req.headers['accept-encoding'] || '';
7
8 const raw = fs.createReadStream('index.html');
9
10 // Check client support
11 if (acceptEncoding.includes('br')) {
12 res.writeHead(200, { 'Content-Encoding': 'br' });
13 raw.pipe(zlib.createBrotliCompress()).pipe(res);
14 } else if (acceptEncoding.includes('gzip')) {
15 res.writeHead(200, { 'Content-Encoding': 'gzip' });
16 raw.pipe(zlib.createGzip()).pipe(res);
17 } else if (acceptEncoding.includes('deflate')) {
18 res.writeHead(200, { 'Content-Encoding': 'deflate' });
19 raw.pipe(zlib.createDeflate()).pipe(res);
20 } else {
21 res.writeHead(200);
22 raw.pipe(res);
23 }
24});
25
26server.listen(3000);
27
28// Client request with compression
29async function fetchCompressed(url) {
30 const response = await fetch(url, {
31 headers: {
32 'Accept-Encoding': 'gzip, deflate, br',
33 },
34 });
35
36 // fetch() automatically decompresses
37 return response.text();
38}Compression Options#
1const zlib = require('zlib');
2
3// Gzip with options
4const gzip = zlib.createGzip({
5 level: 9, // 0-9, higher = more compression
6 memLevel: 8, // 1-9, memory usage
7 strategy: zlib.constants.Z_DEFAULT_STRATEGY,
8});
9
10// Compression levels
11const levels = {
12 Z_NO_COMPRESSION: 0,
13 Z_BEST_SPEED: 1,
14 Z_BEST_COMPRESSION: 9,
15 Z_DEFAULT_COMPRESSION: -1,
16};
17
18// Strategies
19const strategies = {
20 Z_FILTERED: zlib.constants.Z_FILTERED,
21 Z_HUFFMAN_ONLY: zlib.constants.Z_HUFFMAN_ONLY,
22 Z_RLE: zlib.constants.Z_RLE,
23 Z_FIXED: zlib.constants.Z_FIXED,
24 Z_DEFAULT_STRATEGY: zlib.constants.Z_DEFAULT_STRATEGY,
25};
26
27// Brotli options
28const brotli = zlib.createBrotliCompress({
29 params: {
30 [zlib.constants.BROTLI_PARAM_QUALITY]: 11, // 0-11
31 [zlib.constants.BROTLI_PARAM_SIZE_HINT]: 1000,
32 },
33});
34
35// Compare compression methods
36async function compareCompression(data) {
37 const buffer = Buffer.from(data);
38
39 const gzipped = zlib.gzipSync(buffer, { level: 9 });
40 const deflated = zlib.deflateSync(buffer, { level: 9 });
41 const brotli = zlib.brotliCompressSync(buffer);
42
43 console.log(`Original: ${buffer.length} bytes`);
44 console.log(`Gzip: ${gzipped.length} bytes`);
45 console.log(`Deflate: ${deflated.length} bytes`);
46 console.log(`Brotli: ${brotli.length} bytes`);
47}Error Handling#
1const zlib = require('zlib');
2const { pipeline } = require('stream/promises');
3const fs = require('fs');
4
5// Stream error handling
6async function safeCompress(inputPath, outputPath) {
7 try {
8 await pipeline(
9 fs.createReadStream(inputPath),
10 zlib.createGzip(),
11 fs.createWriteStream(outputPath)
12 );
13 console.log('Compression complete');
14 } catch (error) {
15 console.error('Compression failed:', error.message);
16
17 // Clean up partial output
18 try {
19 fs.unlinkSync(outputPath);
20 } catch {}
21 }
22}
23
24// Callback error handling
25function compressWithCallback(data, callback) {
26 zlib.gzip(data, (err, result) => {
27 if (err) {
28 callback(err, null);
29 return;
30 }
31 callback(null, result);
32 });
33}
34
35// Handle corrupted data
36async function safeDecompress(compressed) {
37 try {
38 const gunzip = require('util').promisify(zlib.gunzip);
39 return await gunzip(compressed);
40 } catch (error) {
41 if (error.code === 'Z_DATA_ERROR') {
42 throw new Error('Invalid or corrupted compressed data');
43 }
44 throw error;
45 }
46}Chunked Processing#
1const zlib = require('zlib');
2const { Transform } = require('stream');
3
4// Process compressed data in chunks
5class ChunkProcessor extends Transform {
6 constructor(options) {
7 super(options);
8 this.chunks = [];
9 }
10
11 _transform(chunk, encoding, callback) {
12 // Process each chunk
13 console.log(`Processing chunk: ${chunk.length} bytes`);
14 this.chunks.push(chunk);
15 callback(null, chunk);
16 }
17
18 _flush(callback) {
19 console.log(`Total chunks: ${this.chunks.length}`);
20 callback();
21 }
22}
23
24// Usage with compression
25async function processCompressed(inputPath) {
26 const { pipeline } = require('stream/promises');
27
28 await pipeline(
29 fs.createReadStream(inputPath),
30 zlib.createGunzip(),
31 new ChunkProcessor(),
32 fs.createWriteStream('/dev/null')
33 );
34}
35
36// Memory-efficient large file compression
37async function compressLargeFile(inputPath, outputPath) {
38 const gzip = zlib.createGzip({
39 chunkSize: 64 * 1024, // 64KB chunks
40 level: 6,
41 });
42
43 let bytesRead = 0;
44 const input = fs.createReadStream(inputPath, {
45 highWaterMark: 64 * 1024,
46 });
47
48 input.on('data', (chunk) => {
49 bytesRead += chunk.length;
50 process.stdout.write(`\rProcessed: ${bytesRead} bytes`);
51 });
52
53 await pipeline(input, gzip, fs.createWriteStream(outputPath));
54 console.log('\nDone');
55}JSON Compression#
1const zlib = require('zlib');
2const { promisify } = require('util');
3
4const gzip = promisify(zlib.gzip);
5const gunzip = promisify(zlib.gunzip);
6
7// Compress JSON
8async function compressJSON(data) {
9 const json = JSON.stringify(data);
10 const compressed = await gzip(Buffer.from(json));
11 return compressed.toString('base64');
12}
13
14// Decompress JSON
15async function decompressJSON(compressed) {
16 const buffer = Buffer.from(compressed, 'base64');
17 const decompressed = await gunzip(buffer);
18 return JSON.parse(decompressed.toString());
19}
20
21// Usage
22async function example() {
23 const data = {
24 users: Array(100)
25 .fill(null)
26 .map((_, i) => ({
27 id: i,
28 name: `User ${i}`,
29 email: `user${i}@example.com`,
30 })),
31 };
32
33 const compressed = await compressJSON(data);
34 console.log(`Compressed length: ${compressed.length}`);
35
36 const restored = await decompressJSON(compressed);
37 console.log(`Users: ${restored.users.length}`);
38}
39
40// Cache with compression
41class CompressedCache {
42 constructor() {
43 this.cache = new Map();
44 }
45
46 async set(key, value) {
47 const compressed = await compressJSON(value);
48 this.cache.set(key, compressed);
49 }
50
51 async get(key) {
52 const compressed = this.cache.get(key);
53 if (!compressed) return null;
54 return decompressJSON(compressed);
55 }
56
57 has(key) {
58 return this.cache.has(key);
59 }
60}Archive Operations#
1const zlib = require('zlib');
2const tar = require('tar'); // npm package
3const fs = require('fs');
4
5// Create tar.gz archive
6async function createArchive(sourceDir, outputPath) {
7 await tar.create(
8 {
9 gzip: true,
10 file: outputPath,
11 },
12 [sourceDir]
13 );
14 console.log(`Created archive: ${outputPath}`);
15}
16
17// Extract tar.gz archive
18async function extractArchive(archivePath, destDir) {
19 await tar.extract({
20 file: archivePath,
21 cwd: destDir,
22 });
23 console.log(`Extracted to: ${destDir}`);
24}
25
26// Manual tar.gz with streams
27const { pipeline } = require('stream/promises');
28
29async function createGzipArchive(inputPath, outputPath) {
30 await pipeline(
31 tar.create({ gzip: false }, [inputPath]),
32 zlib.createGzip(),
33 fs.createWriteStream(outputPath)
34 );
35}
36
37// List archive contents
38async function listArchive(archivePath) {
39 const files = [];
40
41 await tar.list({
42 file: archivePath,
43 onentry: (entry) => {
44 files.push({
45 path: entry.path,
46 size: entry.size,
47 type: entry.type,
48 });
49 },
50 });
51
52 return files;
53}Performance Optimization#
1const zlib = require('zlib');
2const { Worker, isMainThread, parentPort } = require('worker_threads');
3
4// Worker thread compression
5if (isMainThread) {
6 async function compressInWorker(data) {
7 return new Promise((resolve, reject) => {
8 const worker = new Worker(__filename);
9 worker.postMessage(data);
10 worker.on('message', resolve);
11 worker.on('error', reject);
12 });
13 }
14} else {
15 parentPort.on('message', async (data) => {
16 const compressed = zlib.gzipSync(data);
17 parentPort.postMessage(compressed);
18 });
19}
20
21// Compression pool
22class CompressionPool {
23 constructor(size = 4) {
24 this.workers = [];
25 this.queue = [];
26 this.available = [];
27
28 for (let i = 0; i < size; i++) {
29 const worker = new Worker('./compress-worker.js');
30 this.workers.push(worker);
31 this.available.push(worker);
32 }
33 }
34
35 async compress(data) {
36 const worker = await this.getWorker();
37
38 return new Promise((resolve, reject) => {
39 worker.once('message', (result) => {
40 this.releaseWorker(worker);
41 resolve(result);
42 });
43 worker.once('error', (err) => {
44 this.releaseWorker(worker);
45 reject(err);
46 });
47 worker.postMessage(data);
48 });
49 }
50
51 async getWorker() {
52 if (this.available.length > 0) {
53 return this.available.pop();
54 }
55
56 return new Promise((resolve) => {
57 this.queue.push(resolve);
58 });
59 }
60
61 releaseWorker(worker) {
62 if (this.queue.length > 0) {
63 const next = this.queue.shift();
64 next(worker);
65 } else {
66 this.available.push(worker);
67 }
68 }
69}Best Practices#
Compression Choice:
✓ Gzip for general use
✓ Brotli for web content
✓ Deflate for compatibility
✓ Consider content type
Performance:
✓ Use streams for large files
✓ Choose appropriate level
✓ Consider CPU vs size tradeoff
✓ Use worker threads if needed
Error Handling:
✓ Handle corrupted data
✓ Clean up partial files
✓ Validate before decompressing
✓ Set reasonable timeouts
Avoid:
✗ Sync methods for large data
✗ Maximum compression always
✗ Ignoring memory limits
✗ Re-compressing compressed data
Conclusion#
The zlib module provides efficient compression using Gzip, Deflate, and Brotli algorithms. Use streams for large files, choose appropriate compression levels, and implement proper error handling. Brotli typically achieves better compression for web content, while Gzip offers wider compatibility.