Blog>
Snippets

Using Stream API for Large File Processing

Show how to use Node.js streams to read and process a large file without blocking the event loop.
const fs = require('fs');
const readline = require('readline');

// Create a read stream from the large file
const readStream = fs.createReadStream('largefile.txt', {
  encoding: 'utf-8'
});

// Create an interface for reading the file line by line
const rl = readline.createInterface({
  input: readStream,
  output: process.stdout,
  terminal: false
});

// Event handler for 'line' event, triggered for each line read from the stream
rl.on('line', (line) => {
  processLine(line);
});

// Event handler for 'close' event, triggered when all lines have been read
rl.on('close', () => {
  console.log('File processing completed.');
});

function processLine(line) {
  // Process the line (this function needs to be customized according to your needs)
  console.log(line); // Placeholder for actual line processing logic
}
This snippet creates a readable stream for a file named 'largefile.txt' and processes it line by line, without loading the entire file into memory.