Home

Pipeline

pipeline, from the stream module, passes data from a readable stream to a/many transform streams, to a final stream.
This could be good to use

Read A File, Uppercase The Content, Write To A File

// dependencies
const { pipeline } = require('stream');
const { join } = require('path');
const { createReadStream, createWriteStream } = require('fs');
const { Transform } = require('stream');


// variables
const STREAM_INPUT_FILENAME = __filename;
const UPPERCASE_OUTPUT_FILENAME = 'out-upper-stream.txt'

console.log(`reading a stream from ${STREAM_INPUT_FILENAME} to ${UPPERCASE_OUTPUT_FILENAME}`);

// will get called after the pipeline finishes
function onPipelineDone(err) {
  if(err){
    console.error(err);
    return;
  }
  console.log('finished writing!')
}

// the transform stream
function createUpperCaseStream(){
  return new Transform({
    transform(chunk, enc, nxt) {
      const upd = chunk.toString().toUpperCase()
      nxt(null,upd);
    }
  })
}

pipeline(
  createReadStream(__filename),
  createUpperCaseStream(),
  createWriteStream(join(__dirname, UPPERCASE_OUTPUT_FILENAME)),
  onPipelineDone
);

Read A File, Compress The Content, Write To A File

This combines the native createReadStream module, the native createWriteStream module, and the native zlib createGzip module.

// Dependencies
const { createGzip } = require('zlib');
const { pipeline } = require('stream')
const {
  createReadStream,
  createWriteStream
} = require('fs');

// Vars
const SOURCE_FILE = 'big-text.txt';
const DEST_FILE = 'big-text.txt.gz';

// streams
const SRC_STREAM = createReadStream(SOURCE_FILE);
const DEST_STREAM = createWriteStream(DEST_FILE);
const gz = createGzip()

function onPipelineDone(err){
  if(err){
    console.error(`PIPELINE ERR: `, err)
    process.exitCode = 1;
  }
}

pipeline(
  SRC_STREAM,
  gz,
  DEST_STREAM,
  onPipelineDone
)
Tags: