Event Loop & Asynchronous Programming
Master Node.js event loop and asynchronous programming patterns with callbacks, promises, and async/await
60 min•By Priygop Team•Last updated: Feb 2026
Understanding the Event Loop
The event loop is the core of Node.js's asynchronous, non-blocking I/O model. It allows Node.js to perform non-blocking I/O operations despite JavaScript being single-threaded. The event loop continuously checks for pending operations and executes them when they're ready.
Event Loop Phases
- Timers - Execute callbacks scheduled by setTimeout() and setInterval()
- Pending callbacks - Execute I/O callbacks deferred to the next loop iteration
- Idle, prepare - Used internally by Node.js
- Poll - Retrieve new I/O events and execute I/O related callbacks
- Check - Execute setImmediate() callbacks
- Close callbacks - Execute close event callbacks
Asynchronous Programming Patterns
Example
// Callback pattern
const fs = require('fs');
fs.readFile('file.txt', 'utf8', (err, data) => {
if (err) {
console.error('Error reading file:', err);
return;
}
console.log('File content:', data);
});
// Promise pattern
const fs = require('fs').promises;
fs.readFile('file.txt', 'utf8')
.then(data => {
console.log('File content:', data);
})
.catch(err => {
console.error('Error reading file:', err);
});
// Async/await pattern
const fs = require('fs').promises;
async function readFile() {
try {
const data = await fs.readFile('file.txt', 'utf8');
console.log('File content:', data);
} catch (err) {
console.error('Error reading file:', err);
}
}
readFile();
// Event emitter pattern
const EventEmitter = require('events');
class MyEmitter extends EventEmitter {}
const myEmitter = new MyEmitter();
myEmitter.on('event', (arg) => {
console.log('Event occurred with argument:', arg);
});
myEmitter.emit('event', 'Hello World!');Common Asynchronous Patterns
- Callback Hell: Nested callbacks that become hard to read and maintain
- Promise Chaining: Using .then() to chain multiple asynchronous operations
- Async/Await: Modern syntax that makes asynchronous code look synchronous
- Parallel Execution: Running multiple async operations simultaneously
- Sequential Execution: Running async operations one after another
- Error Handling: Proper error handling in asynchronous code
Performance Considerations
Example
// Performance comparison of different async patterns
const fs = require('fs').promises;
const path = require('path');
// Sequential execution (slower)
async function readFilesSequentially() {
const start = Date.now();
const file1 = await fs.readFile('file1.txt', 'utf8');
const file2 = await fs.readFile('file2.txt', 'utf8');
const file3 = await fs.readFile('file3.txt', 'utf8');
const end = Date.now();
console.log('Sequential time:', end - start, 'ms');
return [file1, file2, file3];
}
// Parallel execution (faster)
async function readFilesParallel() {
const start = Date.now();
const [file1, file2, file3] = await Promise.all([
fs.readFile('file1.txt', 'utf8'),
fs.readFile('file2.txt', 'utf8'),
fs.readFile('file3.txt', 'utf8')
]);
const end = Date.now();
console.log('Parallel time:', end - start, 'ms');
return [file1, file2, file3];
}
// Error handling in async operations
async function robustFileOperation() {
try {
const data = await fs.readFile('file.txt', 'utf8');
return data;
} catch (error) {
if (error.code === 'ENOENT') {
console.log('File not found, creating default content');
return 'Default content';
}
throw error; // Re-throw if it's not a file not found error
}
}Mini-Project: Async File Processor
Example
// Create a file processor that handles multiple async operations
const fs = require('fs').promises;
const path = require('path');
class AsyncFileProcessor {
constructor(inputDir, outputDir) {
this.inputDir = inputDir;
this.outputDir = outputDir;
}
async processFiles() {
try {
// Read all files in input directory
const files = await fs.readdir(this.inputDir);
console.log('Found files:', files);
// Process files in parallel
const results = await Promise.all(
files.map(file => this.processFile(file))
);
// Generate summary report
await this.generateReport(results);
console.log('All files processed successfully');
return results;
} catch (error) {
console.error('Error processing files:', error);
throw error;
}
}
async processFile(filename) {
const inputPath = path.join(this.inputDir, filename);
const outputPath = path.join(this.outputDir, `processed_${filename}`);
try {
// Read file content
const content = await fs.readFile(inputPath, 'utf8');
// Process content (example: convert to uppercase)
const processedContent = content.toUpperCase();
// Write processed content
await fs.writeFile(outputPath, processedContent);
return {
filename,
originalSize: content.length,
processedSize: processedContent.length,
status: 'success'
};
} catch (error) {
return {
filename,
error: error.message,
status: 'error'
};
}
}
async generateReport(results) {
const report = {
timestamp: new Date().toISOString(),
totalFiles: results.length,
successful: results.filter(r => r.status === 'success').length,
failed: results.filter(r => r.status === 'error').length,
results: results
};
const reportPath = path.join(this.outputDir, 'processing_report.json');
await fs.writeFile(reportPath, JSON.stringify(report, null, 2));
console.log('Report generated:', reportPath);
}
}
// Usage
async function main() {
const processor = new AsyncFileProcessor('./input', './output');
try {
await processor.processFiles();
} catch (error) {
console.error('Processing failed:', error);
}
}
// Run the processor
main();