Async/await makes asynchronous code readable. Here's how to use it effectively and avoid common pitfalls.
Basic Usage#
1// Async function always returns a Promise
2async function fetchUser(id: string): Promise<User> {
3 const response = await fetch(`/api/users/${id}`);
4 const user = await response.json();
5 return user;
6}
7
8// Equivalent Promise version
9function fetchUserPromise(id: string): Promise<User> {
10 return fetch(`/api/users/${id}`)
11 .then(response => response.json());
12}
13
14// Usage
15const user = await fetchUser('123');
16console.log(user.name);
17
18// Or with .then()
19fetchUser('123').then(user => console.log(user.name));Error Handling#
1// Try-catch for async errors
2async function fetchData(): Promise<Data> {
3 try {
4 const response = await fetch('/api/data');
5
6 if (!response.ok) {
7 throw new Error(`HTTP ${response.status}`);
8 }
9
10 return await response.json();
11 } catch (error) {
12 console.error('Fetch failed:', error);
13 throw error; // Re-throw to propagate
14 }
15}
16
17// Handle at call site
18try {
19 const data = await fetchData();
20} catch (error) {
21 showErrorMessage('Failed to load data');
22}
23
24// Or with .catch()
25fetchData()
26 .then(data => processData(data))
27 .catch(error => handleError(error));
28
29// Finally for cleanup
30async function withCleanup(): Promise<void> {
31 const connection = await openConnection();
32 try {
33 await performOperation(connection);
34 } finally {
35 await connection.close(); // Always runs
36 }
37}Parallel Execution#
1// Sequential (slow)
2async function sequential(): Promise<[User, Posts, Comments]> {
3 const user = await fetchUser(); // Wait...
4 const posts = await fetchPosts(); // Then wait...
5 const comments = await fetchComments(); // Then wait...
6 return [user, posts, comments];
7}
8
9// Parallel (fast)
10async function parallel(): Promise<[User, Posts, Comments]> {
11 const [user, posts, comments] = await Promise.all([
12 fetchUser(),
13 fetchPosts(),
14 fetchComments(),
15 ]);
16 return [user, posts, comments];
17}
18
19// Promise.all fails fast - if one fails, all fail
20try {
21 const results = await Promise.all([
22 fetchUser(), // Succeeds
23 fetchPosts(), // Fails
24 fetchComments(), // Never completes
25 ]);
26} catch (error) {
27 // First error received
28}
29
30// Promise.allSettled - wait for all, get all results
31const results = await Promise.allSettled([
32 fetchUser(),
33 fetchPosts(),
34 fetchComments(),
35]);
36
37results.forEach((result, index) => {
38 if (result.status === 'fulfilled') {
39 console.log(`Request ${index} succeeded:`, result.value);
40 } else {
41 console.log(`Request ${index} failed:`, result.reason);
42 }
43});Racing and Timeouts#
1// First to resolve wins
2const fastest = await Promise.race([
3 fetchFromServer1(),
4 fetchFromServer2(),
5]);
6
7// Timeout pattern
8function withTimeout<T>(
9 promise: Promise<T>,
10 ms: number
11): Promise<T> {
12 const timeout = new Promise<never>((_, reject) => {
13 setTimeout(() => reject(new Error('Timeout')), ms);
14 });
15
16 return Promise.race([promise, timeout]);
17}
18
19// Usage
20try {
21 const data = await withTimeout(fetchData(), 5000);
22} catch (error) {
23 if (error.message === 'Timeout') {
24 console.log('Request timed out');
25 }
26}
27
28// Promise.any - first success wins
29const result = await Promise.any([
30 fetchFromPrimary(),
31 fetchFromBackup(),
32 fetchFromCDN(),
33]);
34// Returns first successful result
35// Only throws if ALL failSequential Processing#
1// Process array items sequentially
2async function processSequentially(items: Item[]): Promise<Result[]> {
3 const results: Result[] = [];
4
5 for (const item of items) {
6 const result = await processItem(item);
7 results.push(result);
8 }
9
10 return results;
11}
12
13// Using reduce
14async function processWithReduce(items: Item[]): Promise<Result[]> {
15 return items.reduce(async (accPromise, item) => {
16 const acc = await accPromise;
17 const result = await processItem(item);
18 return [...acc, result];
19 }, Promise.resolve([] as Result[]));
20}
21
22// For-await-of with async iterators
23async function* fetchPages(url: string): AsyncGenerator<Page> {
24 let nextUrl: string | null = url;
25
26 while (nextUrl) {
27 const response = await fetch(nextUrl);
28 const data = await response.json();
29 yield data.items;
30 nextUrl = data.nextPage;
31 }
32}
33
34for await (const page of fetchPages('/api/items')) {
35 console.log('Got page:', page);
36}Controlled Concurrency#
1// Process with limited concurrency
2async function processWithLimit<T, R>(
3 items: T[],
4 fn: (item: T) => Promise<R>,
5 limit: number
6): Promise<R[]> {
7 const results: R[] = [];
8 const executing: Promise<void>[] = [];
9
10 for (const item of items) {
11 const promise = fn(item).then(result => {
12 results.push(result);
13 });
14
15 executing.push(promise);
16
17 if (executing.length >= limit) {
18 await Promise.race(executing);
19 // Remove completed promises
20 executing.splice(
21 executing.findIndex(p => p === promise),
22 1
23 );
24 }
25 }
26
27 await Promise.all(executing);
28 return results;
29}
30
31// Usage: process 100 items, 5 at a time
32const results = await processWithLimit(
33 items,
34 processItem,
35 5
36);
37
38// Using p-limit library
39import pLimit from 'p-limit';
40
41const limit = pLimit(5);
42const results = await Promise.all(
43 items.map(item => limit(() => processItem(item)))
44);Retry Pattern#
1async function retry<T>(
2 fn: () => Promise<T>,
3 options: {
4 maxAttempts: number;
5 delay: number;
6 backoff?: number;
7 }
8): Promise<T> {
9 const { maxAttempts, delay, backoff = 2 } = options;
10 let lastError: Error;
11 let currentDelay = delay;
12
13 for (let attempt = 1; attempt <= maxAttempts; attempt++) {
14 try {
15 return await fn();
16 } catch (error) {
17 lastError = error as Error;
18 console.log(`Attempt ${attempt} failed:`, error);
19
20 if (attempt < maxAttempts) {
21 await sleep(currentDelay);
22 currentDelay *= backoff;
23 }
24 }
25 }
26
27 throw lastError!;
28}
29
30function sleep(ms: number): Promise<void> {
31 return new Promise(resolve => setTimeout(resolve, ms));
32}
33
34// Usage
35const data = await retry(
36 () => fetchData(),
37 { maxAttempts: 3, delay: 1000, backoff: 2 }
38);Common Pitfalls#
1// ❌ Forgetting await
2async function bad() {
3 const user = fetchUser(); // Returns Promise, not User!
4 console.log(user.name); // undefined
5}
6
7// ❌ Await in forEach (doesn't wait)
8items.forEach(async item => {
9 await processItem(item); // These run in parallel
10});
11
12// ✓ Use for...of for sequential
13for (const item of items) {
14 await processItem(item);
15}
16
17// ✓ Or Promise.all for parallel
18await Promise.all(items.map(item => processItem(item)));
19
20// ❌ Unnecessary async
21async function unnecessary(x: number) {
22 return x * 2; // Don't need async for sync operations
23}
24
25// ✓ Just return the value
26function better(x: number) {
27 return x * 2;
28}
29
30// ❌ Not handling errors
31async function risky() {
32 const data = await fetchData(); // Could throw!
33 return data;
34}
35
36// ✓ Handle errors appropriately
37async function safe() {
38 try {
39 return await fetchData();
40 } catch (error) {
41 return defaultData;
42 }
43}Best Practices#
Error Handling:
✓ Always handle potential errors
✓ Use try-catch for async functions
✓ Propagate errors appropriately
✓ Provide meaningful error messages
Performance:
✓ Use Promise.all for parallel operations
✓ Avoid unnecessary awaits
✓ Consider Promise.allSettled for resilience
✓ Implement timeouts for external calls
Code Quality:
✓ Keep async functions focused
✓ Extract common patterns (retry, timeout)
✓ Use TypeScript for better type safety
✓ Avoid mixing callbacks and async/await
Conclusion#
Async/await simplifies asynchronous code but requires understanding of underlying Promises. Use parallel execution when possible, handle errors properly, and implement patterns like retry and timeout for resilient applications. Avoid common pitfalls like forgetting await or using forEach with async callbacks.