Connection pooling reuses database connections for better performance.
Node.js pg Pool#
1import { Pool } from 'pg';
2
3const pool = new Pool({
4 host: process.env.DB_HOST,
5 database: process.env.DB_NAME,
6 max: 20, // Maximum connections
7 min: 5, // Minimum connections
8 idleTimeoutMillis: 30000,
9 connectionTimeoutMillis: 5000,
10});
11
12async function query(sql: string, params?: any[]) {
13 const client = await pool.connect();
14 try {
15 return await client.query(sql, params);
16 } finally {
17 client.release();
18 }
19}Transaction Helper#
1async function transaction<T>(fn: (client) => Promise<T>): Promise<T> {
2 const client = await pool.connect();
3 try {
4 await client.query('BEGIN');
5 const result = await fn(client);
6 await client.query('COMMIT');
7 return result;
8 } catch (error) {
9 await client.query('ROLLBACK');
10 throw error;
11 } finally {
12 client.release();
13 }
14}PgBouncer for External Pooling#
[pgbouncer]
pool_mode = transaction
max_client_conn = 1000
default_pool_size = 20Pool Sizing#
connections = (cpu_cores * 2) + effective_spindle_count
Start with 10-20 connections per instance and adjust based on monitoring.