SlideShare ist ein Scribd-Unternehmen logo
1 von 192
Downloaden Sie, um offline zu lesen
K Y I V   2 0 1 9
Luciano Mammino (@loige)
IT’S ABOUT TIME TOIT’S ABOUT TIME TO
EMBRACE STREAMSEMBRACE STREAMS
  
loige.link/streams-kyiv
May 18th
1
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
@loige2
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
const {
readFileSync,
writeFileSync
} = require('fs')
// buffer-copy.js1
2
3
4
5
6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
// write that content somewhere else13
writeFileSync(dest, content)14
@loige2
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
const {
readFileSync,
writeFileSync
} = require('fs')
// buffer-copy.js1
2
3
4
5
6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
// write that content somewhere else13
writeFileSync(dest, content)14
// read entire file content
const content = readFileSync(src)
// buffer-copy.js1
2
const {3
readFileSync,4
writeFileSync5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
11
12
// write that content somewhere else13
writeFileSync(dest, content)14
@loige2
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
const {
readFileSync,
writeFileSync
} = require('fs')
// buffer-copy.js1
2
3
4
5
6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
// write that content somewhere else13
writeFileSync(dest, content)14
// read entire file content
const content = readFileSync(src)
// buffer-copy.js1
2
const {3
readFileSync,4
writeFileSync5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
11
12
// write that content somewhere else13
writeFileSync(dest, content)14
// write that content somewhere else
writeFileSync(dest, content)
// buffer-copy.js1
2
const {3
readFileSync,4
writeFileSync5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
13
14
@loige2
@loige3
WE DO THIS ALL THE TIMEWE DO THIS ALL THE TIME
@loige3
WE DO THIS ALL THE TIMEWE DO THIS ALL THE TIME
AND IT'S OKAND IT'S OK
@loige3
WE DO THIS ALL THE TIMEWE DO THIS ALL THE TIME
AND IT'S OKAND IT'S OK
BUT SOMETIMES ...BUT SOMETIMES ...
@loige3
@loige
 ERR_FS_FILE_TOO_LARGE!  ERR_FS_FILE_TOO_LARGE! 
File size is greater than possible Buffer
4
BUT WHY?BUT WHY?
@loige5
IF BYTES IF BYTES WEREWERE BLOCKS... BLOCKS...@loige
6
MARIO CAN LIFTMARIO CAN LIFT
FEW BLOCKSFEW BLOCKS
@loige
7
BUT NOT TOO MANY...BUT NOT TOO MANY...@loige
?!
8
WHAT CAN WE DO IF WE HAVE TOWHAT CAN WE DO IF WE HAVE TO
MOVE MANY BLOCKS?MOVE MANY BLOCKS?
@loige9
WE CAN MOVE THEM ONE BY ONE!WE CAN MOVE THEM ONE BY ONE!
@loige
we stream them...
10
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
Cloud Architect
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
Cloud Architect
Blog: 
Twitter: 
GitHub:   
loige.co
@loige
@lmammino
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
Cloud Architect
Blog: 
Twitter: 
GitHub:   
loige.co
@loige
@lmammino
11
code: loige.link/streams-examples
loige.link/streams-kyiv
12
01. BUFFERS VS01. BUFFERS VS  
        STREAMS        STREAMS
@loige13
BUFFERBUFFER: DATA STRUCTURE TO STORE AND: DATA STRUCTURE TO STORE AND
TRANSFER ARBITRARY BINARY DATATRANSFER ARBITRARY BINARY DATA
@loige
*Note that this is loading all the content of the file in memory
*
14
STREAMSTREAM: ABSTRACT INTERFACE FOR: ABSTRACT INTERFACE FOR
WORKING WITH STREAMING DATAWORKING WITH STREAMING DATA
@loige
*It does not load all the data straight away
*
15
FILE COPY: FILE COPY: THE BUFFER WAYTHE BUFFER WAY
@loige
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
const content = readFileSync(src)
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
16
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
@loige
* Careful: this implementation is not optimal
*
17
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
createReadStream,
createWriteStream
// stream-copy.js1
2
const {3
4
5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
srcStream.on('data', (data) => destStream.write(data))11
@loige
* Careful: this implementation is not optimal
*
17
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
createReadStream,
createWriteStream
// stream-copy.js1
2
const {3
4
5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
srcStream.on('data', (data) => destStream.write(data))11
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
// stream-copy.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
srcStream.on('data', (data) => destStream.write(data))11
@loige
* Careful: this implementation is not optimal
*
17
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
createReadStream,
createWriteStream
// stream-copy.js1
2
const {3
4
5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
srcStream.on('data', (data) => destStream.write(data))11
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
// stream-copy.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
srcStream.on('data', (data) => destStream.write(data))11 srcStream.on('data', (data) => destStream.write(data))
// stream-copy.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
11
@loige
* Careful: this implementation is not optimal
*
17
MEMORY COMPARISON (~600MB FILE)MEMORY COMPARISON (~600MB FILE)
node ­­inspect­brk buffer­copy.js assets/poster.psd ~/Downloads/poster.psd
@loige18
MEMORY COMPARISON (~600MB FILE)MEMORY COMPARISON (~600MB FILE)
node ­­inspect­brk stream­copy.js assets/poster.psd ~/Downloads/poster.psd
@loige19
LET'S TRY WITH A BIG FILE (~10GB)LET'S TRY WITH A BIG FILE (~10GB)
@loige20
LET'S TRY WITH A BIG FILE (~10GB)LET'S TRY WITH A BIG FILE (~10GB)
node ­­inspect­brk stream­copy.js assets/the­matrix­hd.mkv ~/Downloads/the­matrix­hd.mkv
@loige21
 STREAMS VS BUFFERS  STREAMS VS BUFFERS 
Streams keep a low memory footprint
even with large amounts of data 
 
Streams allows you to process data as
soon as it arrives
@loige22
03. STREAM TYPES03. STREAM TYPES  
       & APIS       & APIS
@loige23
ALL STREAMS ARE ALL STREAMS ARE EVENT EMITTERSEVENT EMITTERS
A stream instance is an object that emits events when its internal
state changes, for instance:
s.on('readable', () => {}) // ready to be consumed
s.on('data', (chunk) => {}) // new data is available
s.on('error', (err) => {}) // some error happened
s.on('end', () => {}) // no more data available
The events available depend from the type of stream
@loige24
READABLEREADABLE STREAMS STREAMS
A readable stream represents a source from which data is consumed.
Examples:
fs readStream
process.stdin
HTTP response (client-side)
HTTP request (server-side)
AWS S3 GetObject (data field)
It supports two modes for data consumption: flowing and paused (or non-
flowing) mode.
@loige25
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
@loige26
@loige
1
2
3
Source data
Readable stream in
flowing mode
data listener
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
27
@loige
12
3
Source data
Readable stream in
flowing mode
Read
data listener
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
28
@loige
12
3
Source data
Readable stream in
flowing mode
data listener
data
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
29
@loige
2
3
Source data
Readable stream in
flowing mode
data listener
Read
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
30
@loige
2
3
Source data
Readable stream in
flowing mode
data listener
data
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
31
@loige
3
Source data
Readable stream in
flowing mode
data listener
Read
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
32
@loige
3
Source data
Readable stream in
flowing mode
data listener
data
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
33
@loige
Source data
Readable stream in
flowing mode
Read
data listener
(end)
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
34
@loige
Source data
Readable stream in
flowing mode
data listener
end
(end)
When no more data is available, end is emitted.
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
35
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
file.on('data', chunk => {
})
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
file.on('data', chunk => {
})
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
for (let char of chunk.toString('utf8')) {
}
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
12
if (emojis.includes(char)) {13
counter++14
}15
16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
file.on('data', chunk => {
})
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
for (let char of chunk.toString('utf8')) {
}
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
12
if (emojis.includes(char)) {13
counter++14
}15
16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
if (emojis.includes(char)) {
counter++
}
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
13
14
15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
loige.link/up-emojiart
@loige37
READABLE STREAMS AREREADABLE STREAMS ARE
ALSO ALSO ASYNC ITERATORSASYNC ITERATORS  
((NODE.JS 10+)NODE.JS 10+)
@loige38
// count-emojis-async-iterator.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
async function main () {
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
for await (let chunk of file) {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
}
console.log(`Found ${counter} emojis`)
}
main()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 @loige39
// count-emojis-async-iterator.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
async function main () {
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
for await (let chunk of file) {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
}
console.log(`Found ${counter} emojis`)
}
main()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
for await (let chunk of file) {
}
// count-emojis-async-iterator.js1
const { createReadStream } = require('fs')2
const { EMOJI_MAP } = require('emoji') // from npm3
4
async function main () {5
const emojis = Object.keys(EMOJI_MAP)6
const file = createReadStream(process.argv[2])7
let counter = 08
9
10
for (let char of chunk.toString('utf8')) {11
if (emojis.includes(char)) {12
counter++13
}14
}15
16
17
console.log(`Found ${counter} emojis`)18
}19
20
main()21 @loige39
// count-emojis-async-iterator.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
async function main () {
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
for await (let chunk of file) {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
}
console.log(`Found ${counter} emojis`)
}
main()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
for await (let chunk of file) {
}
// count-emojis-async-iterator.js1
const { createReadStream } = require('fs')2
const { EMOJI_MAP } = require('emoji') // from npm3
4
async function main () {5
const emojis = Object.keys(EMOJI_MAP)6
const file = createReadStream(process.argv[2])7
let counter = 08
9
10
for (let char of chunk.toString('utf8')) {11
if (emojis.includes(char)) {12
counter++13
}14
}15
16
17
console.log(`Found ${counter} emojis`)18
}19
20
main()21
async function main () {
}
// count-emojis-async-iterator.js1
const { createReadStream } = require('fs')2
const { EMOJI_MAP } = require('emoji') // from npm3
4
5
const emojis = Object.keys(EMOJI_MAP)6
const file = createReadStream(process.argv[2])7
let counter = 08
9
for await (let chunk of file) {10
for (let char of chunk.toString('utf8')) {11
if (emojis.includes(char)) {12
counter++13
}14
}15
}16
17
console.log(`Found ${counter} emojis`)18
19
20
main()21 @loige39
WRITABLEWRITABLE STREAMS STREAMS
A writable stream is an abstraction that allows to write data over a destination
 
Examples:
fs writeStream
process.stdout, process.stderr
HTTP request (client-side)
HTTP response (server-side)
AWS S3 PutObject (body parameter)
@loige40
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
@loige41
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const req = http.request(
)
// writable-http-request.js1
const http = require('http')2
3
4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
@loige41
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const req = http.request(
)
// writable-http-request.js1
const http = require('http')2
3
4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
// writable-http-request.js1
const http = require('http')2
3
const req = http.request(4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
)12
13
14
15
16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
@loige41
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const req = http.request(
)
// writable-http-request.js1
const http = require('http')2
3
4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
// writable-http-request.js1
const http = require('http')2
3
const req = http.request(4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
)12
13
14
15
16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
req.write('writing some content...n')
req.end('last write & close the stream')
// writable-http-request.js1
const http = require('http')2
3
const req = http.request(4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
)12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
18
19
@loige41
@loige42
loige.link/writable-http-req
@loige43
BACKPRESSUREBACKPRESSURE
When writing large amounts of data you
should make sure you handle the stop write
signal and the drain event
 
loige.link/backpressure
@loige44
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
// we are overflowing the destination, we should pause
srcStream.pause()
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
12
13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
// we are overflowing the destination, we should pause
srcStream.pause()
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
12
13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
14
15
}16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
// we are overflowing the destination, we should pause
srcStream.pause()
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
12
13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
14
15
}16
})17
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
11
12
13
14
15
16
})17
@loige45
OTHER TYPES OF STREAMOTHER TYPES OF STREAM
Duplex Stream 
streams that are both Readable and Writable.  
(net.Socket) 
 
Transform Stream 
Duplex streams that can modify or transform the data as it is written
and read. 
(zlib.createGzip(), crypto.createCipheriv())
@loige46
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
transform stream
@loige47
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
1. write data
transform stream
(readable stream)
@loige47
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
1. write data
transform stream
2. transform the data
(readable stream)
@loige47
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige47
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige48
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige
Uncompressed data
48
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige
Uncompressed data
compress
zlib.createGzip()
48
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige
Uncompressed data Compressed data
compress
zlib.createGzip()
48
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write()
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drain
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
⚡ drain
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
⚡ drainresume()
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
⚡ drainresume()
49@loige
(Backpressure) (Backpressure)
You also have to handle end & error events!
gzipStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
gzipStream.pause()
destStream.once('drain', () => {
gzipStream.resume()
})
}
})
gzipStream.on('end', () => {
destStream.end()
})
// ⚠ TODO: handle errors!
// stream-copy-gzip.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = gzipStream.write(data)
if (!canContinue) {
srcStream.pause()
gzipStream.once('drain', () => {
srcStream.resume()
})
}
})
srcStream.on('end', () => {
// check if there's buffered data left
const remainingData = gzipStream.read()
if (remainingData !== null) {
destStream.write()
}
gzipStream.end()
})
@loige50
gzipStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
gzipStream.pause()
destStream.once('drain', () => {
gzipStream.resume()
})
}
})
gzipStream.on('end', () => {
destStream.end()
})
// ⚠ TODO: handle errors!
// stream-copy-gzip.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = gzipStream.write(data)
if (!canContinue) {
srcStream.pause()
gzipStream.once('drain', () => {
srcStream.resume()
})
}
})
srcStream.on('end', () => {
// check if there's buffered data left
const remainingData = gzipStream.read()
if (remainingData !== null) {
destStream.write()
}
gzipStream.end()
})
@loige50
03. PIPE()03. PIPE()
@loige51
readable
.pipe(tranform1)
.pipe(transform2)
.pipe(transform3)
.pipe(writable)
readable.pipe(writableDest)
@loige
Connects a readable stream to a writable stream
A transform stream can be used as a destination as well
It returns the destination stream allowing for a chain of pipes
52
// stream-copy-gzip-pipe.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream
.pipe(gzipStream)
.pipe(destStream)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
@loige53
// stream-copy-gzip-pipe.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream
.pipe(gzipStream)
.pipe(destStream)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
srcStream
.pipe(gzipStream)
.pipe(destStream)
// stream-copy-gzip-pipe.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
const { createGzip } = require('zlib')7
8
const [, , src, dest] = process.argv9
const srcStream = createReadStream(src)10
const gzipStream = createGzip()11
const destStream = createWriteStream(dest)12
13
14
15
16
@loige53
readable
.pipe(decompress)
.pipe(decrypt)
.pipe(convert)
.pipe(encrypt)
.pipe(compress)
.pipe(writeToDisk)
Setup complex pipelines with pipe
@loige
This is the most common way to use streams
54
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
Handling errors (correctly)
@loige55
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
readable
.pipe(decompress)
.pipe(decrypt)
.pipe(convert)
.pipe(encrypt)
.pipe(compress)
.pipe(writeToDisk)
1
.on('error', handleErr)2
3
.on('error', handleErr)4
5
.on('error', handleErr)6
7
.on('error', handleErr)8
9
.on('error', handleErr)10
11
.on('error', handleErr)12
13
.on('error', handleErr)14
Handling errors (correctly)
@loige55
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
readable
.pipe(decompress)
.pipe(decrypt)
.pipe(convert)
.pipe(encrypt)
.pipe(compress)
.pipe(writeToDisk)
1
.on('error', handleErr)2
3
.on('error', handleErr)4
5
.on('error', handleErr)6
7
.on('error', handleErr)8
9
.on('error', handleErr)10
11
.on('error', handleErr)12
13
.on('error', handleErr)14
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
readable1
2
.pipe(decompress)3
4
.pipe(decrypt)5
6
.pipe(convert)7
8
.pipe(encrypt)9
10
.pipe(compress)11
12
.pipe(writeToDisk)13
14
Handling errors (correctly)
@loige
 
handleErr should end and destroy the streams
(it doesn't happen automatically)
 
55
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
readable
.pipe(decompress)
.pipe(decrypt)
.pipe(convert)
.pipe(encrypt)
.pipe(compress)
.pipe(writeToDisk)
1
.on('error', handleErr)2
3
.on('error', handleErr)4
5
.on('error', handleErr)6
7
.on('error', handleErr)8
9
.on('error', handleErr)10
11
.on('error', handleErr)12
13
.on('error', handleErr)14
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
readable1
2
.pipe(decompress)3
4
.pipe(decrypt)5
6
.pipe(convert)7
8
.pipe(encrypt)9
10
.pipe(compress)11
12
.pipe(writeToDisk)13
14
Handling errors (correctly)
@loige
 
handleErr should end and destroy the streams
(it doesn't happen automatically)
 
55
04. STREAM UTILITIES04. STREAM UTILITIES
@loige56
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const { pipeline } = require('stream')
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const { pipeline } = require('stream')
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige
You can pass multiple streams (they will be piped)
57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const { pipeline } = require('stream')
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
function onEnd (err) {
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige
You can pass multiple streams (they will be piped)
57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const { pipeline } = require('stream')
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
function onEnd (err) {
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
20
)21
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
14
15
16
17
18
19
}20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige
You can pass multiple streams (they will be piped)
The last argument is a callback. If invoked with an
error, it means the pipeline failed at some point.
All the streams are ended and destroyed correctly.
57
// stream-copy-gzip-pump.js
const pump = require('pump') // from npm
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pump( // just swap pipeline with pump!
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
For Node.js < 10: pump - npm.im/pump
@loige58
// stream-copy-gzip-pump.js
const pump = require('pump') // from npm
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pump( // just swap pipeline with pump!
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const pump = require('pump') // from npm
pump( // just swap pipeline with pump!
)
// stream-copy-gzip-pump.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
For Node.js < 10: pump - npm.im/pump
@loige58
pumpify(...streams) - 
Create reusable pieces of pipeline
npm.im/pumpify
@loige
Let's create EncGz, an application that
helps us to read and write encrypted-
gzipped files
59
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
18
19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
}25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
18
19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
}25
const stream = pumpify(encryptStream, gzipStream)
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
21
stream.initVect = initVect22
23
return stream24
}25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
18
19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
}25
const stream = pumpify(encryptStream, gzipStream)
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
21
stream.initVect = initVect22
23
return stream24
}25
return stream
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
24
}25
@loige60
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
5
6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
5
6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const stream = pumpify(gunzipStream, decryptStream)
return stream
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
8
9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
5
6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const stream = pumpify(gunzipStream, decryptStream)
return stream
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
8
9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
module.exports = {
createEncgz,
createDecgz
}
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
12
13
14
15
@loige61
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const encgz = createEncgz(secret)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const encgz = createEncgz(secret)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const encgz = createEncgz(secret)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21
process.stdin,
encgz,
process.stdout,
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
12
13
14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige62
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 @loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21 @loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21
process.stdin,
decgz,
process.stdout,
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
12
13
14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige63
@loige64
readable-stream - 
Npm package that contains the latest version of Node.js stream library.
It also makes Node.js streams compatible with the browser (can be used with
Webpack and Broswserify)
npm.im/readable-stream
@loige
* yeah, the name is misleading. The package offers all the functionalities in the official 'stream'
package, not just readable streams.
*
65
04. WRITING CUSTOM   04. WRITING CUSTOM   
        STREAMS        STREAMS
@loige66
@loige
EmojiStream Uppercasify DOMAppend
67
@loige
EmojiStream Uppercasify DOMAppend
 Lemon
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON Banana
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON BANANA
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
 BANANA
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
class EmojiStream
extends Readable {
_read() {
// ...
}
}
 BANANA
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
class EmojiStream
extends Readable {
_read() {
// ...
}
}
class Uppercasify
extends Transform {
_transform(
chunk,
enc,
done
) {
// ...
}
}
 BANANA
67
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019)

Weitere ähnliche Inhalte

Was ist angesagt?

Basic command for linux
Basic command for linuxBasic command for linux
Basic command for linux
gt0ne
 
mapserver_install_linux
mapserver_install_linuxmapserver_install_linux
mapserver_install_linux
tutorialsruby
 
Devinsampa nginx-scripting
Devinsampa nginx-scriptingDevinsampa nginx-scripting
Devinsampa nginx-scripting
Tony Fabeen
 

Was ist angesagt? (17)

PSR-7 and PSR-15, why can't you ignore them
PSR-7 and PSR-15, why can't you ignore themPSR-7 and PSR-15, why can't you ignore them
PSR-7 and PSR-15, why can't you ignore them
 
Bash in theory and in practice - part two
Bash in theory and in practice - part twoBash in theory and in practice - part two
Bash in theory and in practice - part two
 
Containers for sysadmins
Containers for sysadminsContainers for sysadmins
Containers for sysadmins
 
Bash in theory and in practice - part one
Bash in theory and in practice - part oneBash in theory and in practice - part one
Bash in theory and in practice - part one
 
Basic command for linux
Basic command for linuxBasic command for linux
Basic command for linux
 
Intro to pl/PHP Oscon2007
Intro to pl/PHP Oscon2007Intro to pl/PHP Oscon2007
Intro to pl/PHP Oscon2007
 
REST in peace @ IPC 2012 in Mainz
REST in peace @ IPC 2012 in MainzREST in peace @ IPC 2012 in Mainz
REST in peace @ IPC 2012 in Mainz
 
Getting groovy (ODP)
Getting groovy (ODP)Getting groovy (ODP)
Getting groovy (ODP)
 
Piratte installation
Piratte installationPiratte installation
Piratte installation
 
JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)
JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)
JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)
 
rtwerewr
rtwerewrrtwerewr
rtwerewr
 
mapserver_install_linux
mapserver_install_linuxmapserver_install_linux
mapserver_install_linux
 
Drupal and Open shift (and php)
Drupal and Open shift (and php)Drupal and Open shift (and php)
Drupal and Open shift (and php)
 
Devinsampa nginx-scripting
Devinsampa nginx-scriptingDevinsampa nginx-scripting
Devinsampa nginx-scripting
 
Adventures in infrastructure as code
Adventures in infrastructure as codeAdventures in infrastructure as code
Adventures in infrastructure as code
 
Os Treat
Os TreatOs Treat
Os Treat
 
Linux Commands
Linux CommandsLinux Commands
Linux Commands
 

Ähnlich wie It’s about time to embrace Streams (Node Ukraine 2019)

Ähnlich wie It’s about time to embrace Streams (Node Ukraine 2019) (20)

It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJSIt’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
 
It’s about time to embrace Node.js Streams - Austin Node.js meetup
It’s about time to embrace Node.js Streams - Austin Node.js meetupIt’s about time to embrace Node.js Streams - Austin Node.js meetup
It’s about time to embrace Node.js Streams - Austin Node.js meetup
 
It's about time to embrace Node.js streams
It's about time to embrace Node.js streamsIt's about time to embrace Node.js streams
It's about time to embrace Node.js streams
 
It’s about time to embrace Node.js Streams
It’s about time to embrace Node.js StreamsIt’s about time to embrace Node.js Streams
It’s about time to embrace Node.js Streams
 
Downloading a Billion Files in Python
Downloading a Billion Files in PythonDownloading a Billion Files in Python
Downloading a Billion Files in Python
 
DIWE - File handling with PHP
DIWE - File handling with PHPDIWE - File handling with PHP
DIWE - File handling with PHP
 
Php files
Php filesPhp files
Php files
 
Topic - File operation.pptx
Topic - File operation.pptxTopic - File operation.pptx
Topic - File operation.pptx
 
Will iPython replace Bash?
Will iPython replace Bash?Will iPython replace Bash?
Will iPython replace Bash?
 
Will iPython replace bash?
Will iPython replace bash?Will iPython replace bash?
Will iPython replace bash?
 
File handling in cpp
File handling in cppFile handling in cpp
File handling in cpp
 
Chap 5 php files part 1
Chap 5 php files part 1Chap 5 php files part 1
Chap 5 php files part 1
 
File handling-dutt
File handling-duttFile handling-dutt
File handling-dutt
 
SEQFILE1.PPT
SEQFILE1.PPTSEQFILE1.PPT
SEQFILE1.PPT
 
File Handling in c.ppt
File Handling in c.pptFile Handling in c.ppt
File Handling in c.ppt
 
file_handling_in_c.ppt
file_handling_in_c.pptfile_handling_in_c.ppt
file_handling_in_c.ppt
 
FILES IN C
FILES IN CFILES IN C
FILES IN C
 
C 檔案輸入與輸出
C 檔案輸入與輸出C 檔案輸入與輸出
C 檔案輸入與輸出
 
PHP File Handling
PHP File Handling PHP File Handling
PHP File Handling
 
File Handling
File HandlingFile Handling
File Handling
 

Mehr von Luciano Mammino

Mehr von Luciano Mammino (20)

Did you know JavaScript has iterators? DublinJS
Did you know JavaScript has iterators? DublinJSDid you know JavaScript has iterators? DublinJS
Did you know JavaScript has iterators? DublinJS
 
What I learned by solving 50 Advent of Code challenges in Rust - RustNation U...
What I learned by solving 50 Advent of Code challenges in Rust - RustNation U...What I learned by solving 50 Advent of Code challenges in Rust - RustNation U...
What I learned by solving 50 Advent of Code challenges in Rust - RustNation U...
 
Building an invite-only microsite with Next.js & Airtable - ReactJS Milano
Building an invite-only microsite with Next.js & Airtable - ReactJS MilanoBuilding an invite-only microsite with Next.js & Airtable - ReactJS Milano
Building an invite-only microsite with Next.js & Airtable - ReactJS Milano
 
From Node.js to Design Patterns - BuildPiper
From Node.js to Design Patterns - BuildPiperFrom Node.js to Design Patterns - BuildPiper
From Node.js to Design Patterns - BuildPiper
 
Let's build a 0-cost invite-only website with Next.js and Airtable!
Let's build a 0-cost invite-only website with Next.js and Airtable!Let's build a 0-cost invite-only website with Next.js and Airtable!
Let's build a 0-cost invite-only website with Next.js and Airtable!
 
Everything I know about S3 pre-signed URLs
Everything I know about S3 pre-signed URLsEverything I know about S3 pre-signed URLs
Everything I know about S3 pre-signed URLs
 
Serverless for High Performance Computing
Serverless for High Performance ComputingServerless for High Performance Computing
Serverless for High Performance Computing
 
Serverless for High Performance Computing
Serverless for High Performance ComputingServerless for High Performance Computing
Serverless for High Performance Computing
 
JavaScript Iteration Protocols - Workshop NodeConf EU 2022
JavaScript Iteration Protocols - Workshop NodeConf EU 2022JavaScript Iteration Protocols - Workshop NodeConf EU 2022
JavaScript Iteration Protocols - Workshop NodeConf EU 2022
 
Building an invite-only microsite with Next.js & Airtable
Building an invite-only microsite with Next.js & AirtableBuilding an invite-only microsite with Next.js & Airtable
Building an invite-only microsite with Next.js & Airtable
 
Let's take the monolith to the cloud 🚀
Let's take the monolith to the cloud 🚀Let's take the monolith to the cloud 🚀
Let's take the monolith to the cloud 🚀
 
A look inside the European Covid Green Certificate - Rust Dublin
A look inside the European Covid Green Certificate - Rust DublinA look inside the European Covid Green Certificate - Rust Dublin
A look inside the European Covid Green Certificate - Rust Dublin
 
Monoliths to the cloud!
Monoliths to the cloud!Monoliths to the cloud!
Monoliths to the cloud!
 
The senior dev
The senior devThe senior dev
The senior dev
 
Node.js: scalability tips - Azure Dev Community Vijayawada
Node.js: scalability tips - Azure Dev Community VijayawadaNode.js: scalability tips - Azure Dev Community Vijayawada
Node.js: scalability tips - Azure Dev Community Vijayawada
 
A look inside the European Covid Green Certificate (Codemotion 2021)
A look inside the European Covid Green Certificate (Codemotion 2021)A look inside the European Covid Green Certificate (Codemotion 2021)
A look inside the European Covid Green Certificate (Codemotion 2021)
 
AWS Observability Made Simple
AWS Observability Made SimpleAWS Observability Made Simple
AWS Observability Made Simple
 
Semplificare l'observability per progetti Serverless
Semplificare l'observability per progetti ServerlessSemplificare l'observability per progetti Serverless
Semplificare l'observability per progetti Serverless
 
Finding a lost song with Node.js and async iterators - NodeConf Remote 2021
Finding a lost song with Node.js and async iterators - NodeConf Remote 2021Finding a lost song with Node.js and async iterators - NodeConf Remote 2021
Finding a lost song with Node.js and async iterators - NodeConf Remote 2021
 
Finding a lost song with Node.js and async iterators - EnterJS 2021
Finding a lost song with Node.js and async iterators - EnterJS 2021Finding a lost song with Node.js and async iterators - EnterJS 2021
Finding a lost song with Node.js and async iterators - EnterJS 2021
 

Kürzlich hochgeladen

Architecting Cloud Native Applications
Architecting Cloud Native ApplicationsArchitecting Cloud Native Applications
Architecting Cloud Native Applications
WSO2
 
Why Teams call analytics are critical to your entire business
Why Teams call analytics are critical to your entire businessWhy Teams call analytics are critical to your entire business
Why Teams call analytics are critical to your entire business
panagenda
 

Kürzlich hochgeladen (20)

Emergent Methods: Multi-lingual narrative tracking in the news - real-time ex...
Emergent Methods: Multi-lingual narrative tracking in the news - real-time ex...Emergent Methods: Multi-lingual narrative tracking in the news - real-time ex...
Emergent Methods: Multi-lingual narrative tracking in the news - real-time ex...
 
Strategize a Smooth Tenant-to-tenant Migration and Copilot Takeoff
Strategize a Smooth Tenant-to-tenant Migration and Copilot TakeoffStrategize a Smooth Tenant-to-tenant Migration and Copilot Takeoff
Strategize a Smooth Tenant-to-tenant Migration and Copilot Takeoff
 
Connector Corner: Accelerate revenue generation using UiPath API-centric busi...
Connector Corner: Accelerate revenue generation using UiPath API-centric busi...Connector Corner: Accelerate revenue generation using UiPath API-centric busi...
Connector Corner: Accelerate revenue generation using UiPath API-centric busi...
 
Web Form Automation for Bonterra Impact Management (fka Social Solutions Apri...
Web Form Automation for Bonterra Impact Management (fka Social Solutions Apri...Web Form Automation for Bonterra Impact Management (fka Social Solutions Apri...
Web Form Automation for Bonterra Impact Management (fka Social Solutions Apri...
 
Polkadot JAM Slides - Token2049 - By Dr. Gavin Wood
Polkadot JAM Slides - Token2049 - By Dr. Gavin WoodPolkadot JAM Slides - Token2049 - By Dr. Gavin Wood
Polkadot JAM Slides - Token2049 - By Dr. Gavin Wood
 
Powerful Google developer tools for immediate impact! (2023-24 C)
Powerful Google developer tools for immediate impact! (2023-24 C)Powerful Google developer tools for immediate impact! (2023-24 C)
Powerful Google developer tools for immediate impact! (2023-24 C)
 
Boost Fertility New Invention Ups Success Rates.pdf
Boost Fertility New Invention Ups Success Rates.pdfBoost Fertility New Invention Ups Success Rates.pdf
Boost Fertility New Invention Ups Success Rates.pdf
 
Corporate and higher education May webinar.pptx
Corporate and higher education May webinar.pptxCorporate and higher education May webinar.pptx
Corporate and higher education May webinar.pptx
 
Apidays New York 2024 - The value of a flexible API Management solution for O...
Apidays New York 2024 - The value of a flexible API Management solution for O...Apidays New York 2024 - The value of a flexible API Management solution for O...
Apidays New York 2024 - The value of a flexible API Management solution for O...
 
Artificial Intelligence Chap.5 : Uncertainty
Artificial Intelligence Chap.5 : UncertaintyArtificial Intelligence Chap.5 : Uncertainty
Artificial Intelligence Chap.5 : Uncertainty
 
Manulife - Insurer Transformation Award 2024
Manulife - Insurer Transformation Award 2024Manulife - Insurer Transformation Award 2024
Manulife - Insurer Transformation Award 2024
 
ProductAnonymous-April2024-WinProductDiscovery-MelissaKlemke
ProductAnonymous-April2024-WinProductDiscovery-MelissaKlemkeProductAnonymous-April2024-WinProductDiscovery-MelissaKlemke
ProductAnonymous-April2024-WinProductDiscovery-MelissaKlemke
 
ICT role in 21st century education and its challenges
ICT role in 21st century education and its challengesICT role in 21st century education and its challenges
ICT role in 21st century education and its challenges
 
A Beginners Guide to Building a RAG App Using Open Source Milvus
A Beginners Guide to Building a RAG App Using Open Source MilvusA Beginners Guide to Building a RAG App Using Open Source Milvus
A Beginners Guide to Building a RAG App Using Open Source Milvus
 
MS Copilot expands with MS Graph connectors
MS Copilot expands with MS Graph connectorsMS Copilot expands with MS Graph connectors
MS Copilot expands with MS Graph connectors
 
Architecting Cloud Native Applications
Architecting Cloud Native ApplicationsArchitecting Cloud Native Applications
Architecting Cloud Native Applications
 
"I see eyes in my soup": How Delivery Hero implemented the safety system for ...
"I see eyes in my soup": How Delivery Hero implemented the safety system for ..."I see eyes in my soup": How Delivery Hero implemented the safety system for ...
"I see eyes in my soup": How Delivery Hero implemented the safety system for ...
 
TrustArc Webinar - Stay Ahead of US State Data Privacy Law Developments
TrustArc Webinar - Stay Ahead of US State Data Privacy Law DevelopmentsTrustArc Webinar - Stay Ahead of US State Data Privacy Law Developments
TrustArc Webinar - Stay Ahead of US State Data Privacy Law Developments
 
FWD Group - Insurer Innovation Award 2024
FWD Group - Insurer Innovation Award 2024FWD Group - Insurer Innovation Award 2024
FWD Group - Insurer Innovation Award 2024
 
Why Teams call analytics are critical to your entire business
Why Teams call analytics are critical to your entire businessWhy Teams call analytics are critical to your entire business
Why Teams call analytics are critical to your entire business
 

It’s about time to embrace Streams (Node Ukraine 2019)

  • 1. K Y I V   2 0 1 9 Luciano Mammino (@loige) IT’S ABOUT TIME TOIT’S ABOUT TIME TO EMBRACE STREAMSEMBRACE STREAMS    loige.link/streams-kyiv May 18th 1
  • 2. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 @loige2
  • 3. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 const { readFileSync, writeFileSync } = require('fs') // buffer-copy.js1 2 3 4 5 6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 // write that content somewhere else13 writeFileSync(dest, content)14 @loige2
  • 4. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 const { readFileSync, writeFileSync } = require('fs') // buffer-copy.js1 2 3 4 5 6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 // write that content somewhere else13 writeFileSync(dest, content)14 // read entire file content const content = readFileSync(src) // buffer-copy.js1 2 const {3 readFileSync,4 writeFileSync5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 11 12 // write that content somewhere else13 writeFileSync(dest, content)14 @loige2
  • 5. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 const { readFileSync, writeFileSync } = require('fs') // buffer-copy.js1 2 3 4 5 6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 // write that content somewhere else13 writeFileSync(dest, content)14 // read entire file content const content = readFileSync(src) // buffer-copy.js1 2 const {3 readFileSync,4 writeFileSync5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 11 12 // write that content somewhere else13 writeFileSync(dest, content)14 // write that content somewhere else writeFileSync(dest, content) // buffer-copy.js1 2 const {3 readFileSync,4 writeFileSync5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 13 14 @loige2
  • 17. 11
  • 29. FILE COPY: FILE COPY: THE BUFFER WAYTHE BUFFER WAY @loige // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv const content = readFileSync(src) writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 16
  • 30. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 @loige * Careful: this implementation is not optimal * 17
  • 31. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 createReadStream, createWriteStream // stream-copy.js1 2 const {3 4 5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 srcStream.on('data', (data) => destStream.write(data))11 @loige * Careful: this implementation is not optimal * 17
  • 32. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 createReadStream, createWriteStream // stream-copy.js1 2 const {3 4 5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 srcStream.on('data', (data) => destStream.write(data))11 const srcStream = createReadStream(src) const destStream = createWriteStream(dest) // stream-copy.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 srcStream.on('data', (data) => destStream.write(data))11 @loige * Careful: this implementation is not optimal * 17
  • 33. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 createReadStream, createWriteStream // stream-copy.js1 2 const {3 4 5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 srcStream.on('data', (data) => destStream.write(data))11 const srcStream = createReadStream(src) const destStream = createWriteStream(dest) // stream-copy.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 srcStream.on('data', (data) => destStream.write(data))11 srcStream.on('data', (data) => destStream.write(data)) // stream-copy.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 11 @loige * Careful: this implementation is not optimal * 17
  • 40. ALL STREAMS ARE ALL STREAMS ARE EVENT EMITTERSEVENT EMITTERS A stream instance is an object that emits events when its internal state changes, for instance: s.on('readable', () => {}) // ready to be consumed s.on('data', (chunk) => {}) // new data is available s.on('error', (err) => {}) // some error happened s.on('end', () => {}) // no more data available The events available depend from the type of stream @loige24
  • 52. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 @loige36
  • 53. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 54. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 55. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 56. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 file.on('data', chunk => { }) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 57. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 file.on('data', chunk => { }) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 for (let char of chunk.toString('utf8')) { } // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 12 if (emojis.includes(char)) {13 counter++14 }15 16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 58. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 file.on('data', chunk => { }) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 for (let char of chunk.toString('utf8')) { } // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 12 if (emojis.includes(char)) {13 counter++14 }15 16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 if (emojis.includes(char)) { counter++ } // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 13 14 15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 61. // count-emojis-async-iterator.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm async function main () { const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 for await (let chunk of file) { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } } console.log(`Found ${counter} emojis`) } main() 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 @loige39
  • 62. // count-emojis-async-iterator.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm async function main () { const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 for await (let chunk of file) { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } } console.log(`Found ${counter} emojis`) } main() 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 for await (let chunk of file) { } // count-emojis-async-iterator.js1 const { createReadStream } = require('fs')2 const { EMOJI_MAP } = require('emoji') // from npm3 4 async function main () {5 const emojis = Object.keys(EMOJI_MAP)6 const file = createReadStream(process.argv[2])7 let counter = 08 9 10 for (let char of chunk.toString('utf8')) {11 if (emojis.includes(char)) {12 counter++13 }14 }15 16 17 console.log(`Found ${counter} emojis`)18 }19 20 main()21 @loige39
  • 63. // count-emojis-async-iterator.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm async function main () { const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 for await (let chunk of file) { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } } console.log(`Found ${counter} emojis`) } main() 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 for await (let chunk of file) { } // count-emojis-async-iterator.js1 const { createReadStream } = require('fs')2 const { EMOJI_MAP } = require('emoji') // from npm3 4 async function main () {5 const emojis = Object.keys(EMOJI_MAP)6 const file = createReadStream(process.argv[2])7 let counter = 08 9 10 for (let char of chunk.toString('utf8')) {11 if (emojis.includes(char)) {12 counter++13 }14 }15 16 17 console.log(`Found ${counter} emojis`)18 }19 20 main()21 async function main () { } // count-emojis-async-iterator.js1 const { createReadStream } = require('fs')2 const { EMOJI_MAP } = require('emoji') // from npm3 4 5 const emojis = Object.keys(EMOJI_MAP)6 const file = createReadStream(process.argv[2])7 let counter = 08 9 for await (let chunk of file) {10 for (let char of chunk.toString('utf8')) {11 if (emojis.includes(char)) {12 counter++13 }14 }15 }16 17 console.log(`Found ${counter} emojis`)18 19 20 main()21 @loige39
  • 65. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 @loige41
  • 66. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const req = http.request( ) // writable-http-request.js1 const http = require('http')2 3 4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 @loige41
  • 67. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const req = http.request( ) // writable-http-request.js1 const http = require('http')2 3 4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) // writable-http-request.js1 const http = require('http')2 3 const req = http.request(4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 )12 13 14 15 16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 @loige41
  • 68. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const req = http.request( ) // writable-http-request.js1 const http = require('http')2 3 4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) // writable-http-request.js1 const http = require('http')2 3 const req = http.request(4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 )12 13 14 15 16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 req.write('writing some content...n') req.end('last write & close the stream') // writable-http-request.js1 const http = require('http')2 3 const req = http.request(4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 )12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 18 19 @loige41
  • 72. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 @loige45
  • 73. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 @loige45
  • 74. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 @loige45
  • 75. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 // we are overflowing the destination, we should pause srcStream.pause() // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 12 13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 @loige45
  • 76. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 // we are overflowing the destination, we should pause srcStream.pause() // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 12 13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 14 15 }16 })17 @loige45
  • 77. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 // we are overflowing the destination, we should pause srcStream.pause() // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 12 13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 14 15 }16 })17 const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 11 12 13 14 15 16 })17 @loige45
  • 95. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() 49@loige (Backpressure)
  • 96. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() 49@loige (Backpressure) (Backpressure)
  • 97. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() 49@loige (Backpressure) (Backpressure)
  • 98. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() ⚡ drain 49@loige (Backpressure) (Backpressure)
  • 99. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() ⚡ drainresume() 49@loige (Backpressure) (Backpressure)
  • 100. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() ⚡ drainresume() 49@loige (Backpressure) (Backpressure) You also have to handle end & error events!
  • 101. gzipStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { gzipStream.pause() destStream.once('drain', () => { gzipStream.resume() }) } }) gzipStream.on('end', () => { destStream.end() }) // ⚠ TODO: handle errors! // stream-copy-gzip.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = gzipStream.write(data) if (!canContinue) { srcStream.pause() gzipStream.once('drain', () => { srcStream.resume() }) } }) srcStream.on('end', () => { // check if there's buffered data left const remainingData = gzipStream.read() if (remainingData !== null) { destStream.write() } gzipStream.end() }) @loige50
  • 102. gzipStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { gzipStream.pause() destStream.once('drain', () => { gzipStream.resume() }) } }) gzipStream.on('end', () => { destStream.end() }) // ⚠ TODO: handle errors! // stream-copy-gzip.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = gzipStream.write(data) if (!canContinue) { srcStream.pause() gzipStream.once('drain', () => { srcStream.resume() }) } }) srcStream.on('end', () => { // check if there's buffered data left const remainingData = gzipStream.read() if (remainingData !== null) { destStream.write() } gzipStream.end() }) @loige50
  • 105. // stream-copy-gzip-pipe.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream .pipe(gzipStream) .pipe(destStream) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 @loige53
  • 106. // stream-copy-gzip-pipe.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream .pipe(gzipStream) .pipe(destStream) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 srcStream .pipe(gzipStream) .pipe(destStream) // stream-copy-gzip-pipe.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 const { createGzip } = require('zlib')7 8 const [, , src, dest] = process.argv9 const srcStream = createReadStream(src)10 const gzipStream = createGzip()11 const destStream = createWriteStream(dest)12 13 14 15 16 @loige53
  • 108. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 Handling errors (correctly) @loige55
  • 109. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 readable .pipe(decompress) .pipe(decrypt) .pipe(convert) .pipe(encrypt) .pipe(compress) .pipe(writeToDisk) 1 .on('error', handleErr)2 3 .on('error', handleErr)4 5 .on('error', handleErr)6 7 .on('error', handleErr)8 9 .on('error', handleErr)10 11 .on('error', handleErr)12 13 .on('error', handleErr)14 Handling errors (correctly) @loige55
  • 110. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 readable .pipe(decompress) .pipe(decrypt) .pipe(convert) .pipe(encrypt) .pipe(compress) .pipe(writeToDisk) 1 .on('error', handleErr)2 3 .on('error', handleErr)4 5 .on('error', handleErr)6 7 .on('error', handleErr)8 9 .on('error', handleErr)10 11 .on('error', handleErr)12 13 .on('error', handleErr)14 .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) readable1 2 .pipe(decompress)3 4 .pipe(decrypt)5 6 .pipe(convert)7 8 .pipe(encrypt)9 10 .pipe(compress)11 12 .pipe(writeToDisk)13 14 Handling errors (correctly) @loige   handleErr should end and destroy the streams (it doesn't happen automatically)   55
  • 111. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 readable .pipe(decompress) .pipe(decrypt) .pipe(convert) .pipe(encrypt) .pipe(compress) .pipe(writeToDisk) 1 .on('error', handleErr)2 3 .on('error', handleErr)4 5 .on('error', handleErr)6 7 .on('error', handleErr)8 9 .on('error', handleErr)10 11 .on('error', handleErr)12 13 .on('error', handleErr)14 .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) readable1 2 .pipe(decompress)3 4 .pipe(decrypt)5 6 .pipe(convert)7 8 .pipe(encrypt)9 10 .pipe(compress)11 12 .pipe(writeToDisk)13 14 Handling errors (correctly) @loige   handleErr should end and destroy the streams (it doesn't happen automatically)   55
  • 113. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 114. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const { pipeline } = require('stream') pipeline( ) // stream-copy-gzip-pipeline.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 115. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const { pipeline } = require('stream') pipeline( ) // stream-copy-gzip-pipeline.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige You can pass multiple streams (they will be piped) 57
  • 116. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const { pipeline } = require('stream') pipeline( ) // stream-copy-gzip-pipeline.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 function onEnd (err) { } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige You can pass multiple streams (they will be piped) 57
  • 117. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const { pipeline } = require('stream') pipeline( ) // stream-copy-gzip-pipeline.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 function onEnd (err) { } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 20 )21 if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 14 15 16 17 18 19 }20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige You can pass multiple streams (they will be piped) The last argument is a callback. If invoked with an error, it means the pipeline failed at some point. All the streams are ended and destroyed correctly. 57
  • 118. // stream-copy-gzip-pump.js const pump = require('pump') // from npm const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pump( // just swap pipeline with pump! createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 For Node.js < 10: pump - npm.im/pump @loige58
  • 119. // stream-copy-gzip-pump.js const pump = require('pump') // from npm const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pump( // just swap pipeline with pump! createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const pump = require('pump') // from npm pump( // just swap pipeline with pump! ) // stream-copy-gzip-pump.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 For Node.js < 10: pump - npm.im/pump @loige58
  • 121. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 @loige60
  • 122. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 @loige60
  • 123. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 18 19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 }25 @loige60
  • 124. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 18 19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 }25 const stream = pumpify(encryptStream, gzipStream) // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 21 stream.initVect = initVect22 23 return stream24 }25 @loige60
  • 125. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 18 19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 }25 const stream = pumpify(encryptStream, gzipStream) // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 21 stream.initVect = initVect22 23 return stream24 }25 return stream // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 24 }25 @loige60
  • 126. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 @loige61
  • 127. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 @loige61
  • 128. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 5 6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 @loige61
  • 129. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 5 6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const stream = pumpify(gunzipStream, decryptStream) return stream // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 8 9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 @loige61
  • 130. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 5 6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const stream = pumpify(gunzipStream, decryptStream) return stream // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 8 9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 module.exports = { createEncgz, createDecgz } // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 12 13 14 15 @loige61
  • 131. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 @loige62
  • 132. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige62
  • 133. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const encgz = createEncgz(secret) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige62
  • 134. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const encgz = createEncgz(secret) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 @loige62
  • 135. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const encgz = createEncgz(secret) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 process.stdin, encgz, process.stdout, // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 12 13 14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige62
  • 136. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 @loige63
  • 137. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige63
  • 138. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige63
  • 139. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 @loige63
  • 140. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 process.stdin, decgz, process.stdout, // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 12 13 14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige63
  • 151. @loige EmojiStream Uppercasify DOMAppend  LEMON class EmojiStream extends Readable { _read() { // ... } }  BANANA 67
  • 152. @loige EmojiStream Uppercasify DOMAppend  LEMON class EmojiStream extends Readable { _read() { // ... } } class Uppercasify extends Transform { _transform( chunk, enc, done ) { // ... } }  BANANA 67