footer: © Node.University, 2018 theme: Simple slidenumbers: true build-lists: true [.hide-footer] [.slidenumbers: false]
(Hint: He wrote "You Don't Know JS" series)
^You are looking for Kyle Simpson, you're in a wrong room
Azat Mardan and numbers
- 📚 14 books (not counting Korean, Chinese, Polish and Russian translations)
- 🎤 20+ conferences talks in 2016-17
- 👨💻 200+ blog posts on Webapplog: https://webapplog.com
- ⌨ 239 top most active GitHub contributor, higher Paul Irish, Todd Motto, TJ Holowaychuk, John Papa, etc. (source)
- 🎓 19 online courses on Node University https://node.university
^Macy's, Intuit, Northwestern Mutual, Apple, DocuSign, UC Davis, Salesforce, The University of Arizona, The Orchard, M3, Twilio, Fox Studios, Michael Kors
- Read ~270 books in last 4 years
- Check social media only once per week
- Prefer coffee with butter instead of milk
- Live in San Francisco Bay Area
- Storify - startup which we sold
- DocuSign - 50M users
- Capital One - Fortune 500
Software helps people
+
Node is a good tool to build good software
=
I need to teach as many people Node as possible
Spike your interest in core Node features
^Docs are not very ilustrative
^Good but too much tech details
^This allows processing other tasks while IO calls are unfinished like this ^Nginx vs. Apache ^Blocking I/O is expensive!
System.out.println("Step: 1");
System.out.println("Step: 2");
Thread.sleep(1000);
System.out.println("Step: 3");console.log('Step: 1')
setTimeout(function () {
console.log('Step: 3')
}, 1000)
console.log('Step: 2')console.log('Step: 1')
setTimeout(function () {
console.log('Step: 3')
// console.log('Step 5')
}, 1000);
console.log('Step: 2')
// console.log('Step 4')^This is in contrast to today's more common concurrency model where OS threads are employed. Thread-based networking is relatively inefficient and very difficult to use. Furthermore, users of Node are free from worries of dead-locking the process --- there are no locks
[Multi-threading] is the software equivalent of a nuclear device because if it is used incorrectly, it can blow up in your face.
// blocking.js
console.log('Step: 1')
for (var i = 1; i<1000000000; i++) {
// This will take 100-1000ms
}
console.log('Step: 2')var fs = require('fs')
var contents = fs.readFileSync('accounts.txt','utf8')
console.log(contents)
console.log('Hello Ruby\n')
var contents = fs.readFileSync('ips.txt','utf8')
console.log(contents)
console.log('Hello Node!')
//accounts.txt->Hello Ruby->ips.txt->Hello Node!var fs = require('fs')
fs.readFile('accounts.txt','utf8', function(error, contents){
console.log(contents)
})
console.log('Hello Ruby\n')
fs.readFile('ips.txt','utf8', function(error, contents){
console.log(contents)
})
console.log('Hello Node!')
//Hello Ruby->Hello Node->... accounts.txt->ips.txt or ips.txt->accounts.txt- Think faster
- Reuse code
- Learn quicker
- Array
- String
- Primitives
- Functions
- Objects
Now available everywhere in your code
- Access CLI input?
- Get system info: OS, platform, memory usage, versions, etc.?
- Read env vars (passwords!)?
fs.readdir(source, function (err, files) {
if (err) {
console.log('Error finding files: ' + err)
} else {
files.forEach(function (filename, fileIndex) {
console.log(filename)
gm(source + filename).size(function (err, values) {
if (err) {
console.log('Error identifying file size: ' + err)
} else {
console.log(filename + ' : ' + values)
aspect = (values.width / values.height)
widths.forEach(function (width, widthIndex) {
height = Math.round(width / aspect)
console.log('resizing ' + filename + 'to ' + height + 'x' + height)
this.resize(width, height).write(dest + 'w' + width + '_' + filename, function(err) {
if (err) console.log('Error writing file: ' + err)
})
}.bind(this))
}
})
})
}
})Events are part of core and supported by most of the core modules while more advanced patterns such as promises, generators, async/await are not.
- Subject
- Observers (event listeners) on a subject
- Event triggers
var events = require('events')
var emitter = new events.EventEmitter()In node.js an event can be described simply as a string with a corresponding callback.
emitter.on('done', function(results) {
console.log('Done: ', results)
})var events = require('events')
var emitter = new events.EventEmitter()
emitter.on('knock', function() {
console.log('Who\'s there?')
})
emitter.on('knock', function() {
console.log('Go away!')
})
emitter.emit('knock')// job.js
var util = require('util')
var Job = function Job() {
// ...
this.process = function() {
// ...
job.emit('done', { completedOn: new Date() })
}
}
util.inherits(Job, require('events').EventEmitter)
module.exports = Job// weekly.js
var Job = require('./job.js')
var job = new Job()
job.on('done', function(details){
console.log('Job was completed at', details.completedOn)
job.removeAllListeners()
})
job.process()emitter.listeners(eventName)
emitter.on(eventName, listener)
emitter.once(eventName, listener)
emitter.removeListener(eventName, listener)- Node Patterns: From Callbacks to Observer: http://webapplog.com/node-patterns-from-callbacks-to-observer
- https://github.com/azat-co/node-patterns
- Node.js Design Patterns, Second Edition by Mario Casciaro, Luciano Mammino
- Speed: Too slow because has to load all
- Buffer limit: ~1Gb
- Overhyped (JK)
- Readable
- Writable
- Duplex
- Transform
- HTTP requests and responses
- Standard input/output (stdin&stdout)
- File reads and writes
process.stdin
Standard input streams contain data going into applications.
To listen in on data from stdin, use the data and end events:
// stdin.js
process.stdin.resume()
process.stdin.setEncoding('utf8')
process.stdin.on('data', function (chunk) {
console.log('chunk: ', chunk)
})
process.stdin.on('end', function () {
console.log('--- END ---')
})$ node stdin.js
var readable = getReadableStreamSomehow()
readable.on('readable', () => {
var chunk
while (null !== (chunk = readable.read())) {
console.log('got %d bytes of data', chunk.length)
}
})^readable.read is sync but the chunks are small
process.stdout
Standard output streams contain data going out of the applications.
To write to stdout, use the write function:
process.stdout.write('A simple message\n')const http = require('http')
var server = http.createServer( (req, res) => {
req.setEncoding('utf8')
req.on('data', (chunk) => {
transform(chunk) // This functions is defined somewhere else
})
req.on('end', () => {
var data = JSON.parse(body)
res.end()
})
})
server.listen(1337)var r = fs.createReadStream('file.txt')
var z = zlib.createGzip()
var w = fs.createWriteStream('file.txt.gz')
r.pipe(z).pipe(w)^Readable.pipe takes writable and returns destination
Binary data type, to create:
Buffer.alloc(size)Buffer.from(array)Buffer.from(buffer)Buffer.from(str[, encoding])
Docs: http://bit.ly/1IeAcZ1
// buf.js
var buf = Buffer.alloc(26)
for (var i = 0 ; i < 26 ; i++) {
buf[i] = i + 97 // 97 is ASCII a
}
console.log(buf) // <Buffer 61 62 63 64 65 66 67 68 69 6a 6b 6c 6d 6e 6f 70 71 72 73 74 75 76 77 78 79 7a>
console.log(buf.toString('utf8')) // abcdefghijklmnopqrstuvwxyzbuf.toString('ascii') // outputs: abcdefghijklmnopqrstuvwxyz
buf.toString('ascii', 0, 5) // outputs: abcde
buf.toString('utf8', 0, 5) // outputs: abcde
buf.toString(undefined, 0, 5) // encoding defaults to 'utf8', outputs abcdefs.readFile('/etc/passwd', function (err, data) {
if (err) return console.error(err)
console.log(data)
});data is buffer!
$ node server-stream
// server-stream.js
app.get('/stream', function(req, res) {
var stream = fs.createReadStream(largeImagePath)
stream.pipe(res)
})$ node server-stream
http://localhost:3000/stream http://localhost:3000/non-stream
/stream responds faster!
X-Response-Time
~300ms vs. 3-5s
Stream automated workshop: https://github.com/substack/stream-adventure
$ sudo npm install -g stream-adventure
$ stream-adventure
https://github.com/substack/stream-handbook
- Master: starts workers
- Worker: do the job, e.g., HTTP server
Number of processes = number of CPUs
var cluster = require('cluster')
var numCPUs = require('os').cpus().length
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
cluster.fork()
}
} else if (cluster.isWorker) {
// your server code
})- Run
code/cluster.jswith node ($ node cluster.js). - Install
loadtestwith npm:$ npm install -g loadtest - Run load testing with:
$ loadtest http://localhost:3000 -t 20 —c 10
Press control+c on the server terminal
- Core cluster: lean and mean
- strong-cluster-control (https://github.com/strongloop/strong-cluster-control), or
$ slc run: good choice - pm2 (https://github.com/Unitech/pm2): good choice
https://github.com/Unitech/pm2
Advantages:
- Load-balancer and other features
- 0s reload down-time, i.e., forever alive
- Good test coverage
var express = require('express')
var port = 3000
global.stats = {}
console.log('worker (%s) is now listening to http://localhost:%s',
process.pid, port)
var app = express()
app.get('*', function(req, res) {
if (!global.stats[process.pid]) global.stats[process.pid] = 1
else global.stats[process.pid] += 1;
var l ='cluser '
+ process.pid
+ ' responded \n';
console.log(l, global.stats)
res.status(200).send(l)
})
app.listen(port)Using server.js:
$ pm2 start server.js -i 0
In a new window:
$ loadtest http://localhost:3000 -t 20 -c 10
$ pm2 list
require('child_process').spawn()- large data, stream, no new V8 instancerequire('child_process').fork()- new V8 instance, multiple workersrequire('child_process').exec()- buffer, async, all the data at once
fs = require('fs')
process = require('child_process')
var p = process.spawn('node', 'program.js')
p.stdout.on('data', function(data)) {
console.log('stdout: ' + data)
})fs = require('fs')
process = require('child_process')
var p = process.fork('program.js')
p.stdout.on('data', function(data)) {
console.log('stdout: ' + data)
})fs = require('fs')
process = require('child_process')
var p = process.exec('node program.js', function (error, stdout, stderr) {
if (error) console.log(error.code)
})- child_process.execFile()
- child_process.execSync()
- child_process.execFileSync()
^child_process.execFile(): similar to child_process.exec() except that it spawns the command directly without first spawning a shell. child_process.execSync(): a synchronous version of child_process.exec() that will block the Node.js event loop. child_process.execFileSync(): a synchronous version of child_process.execFile() that will block the Node.js event loop.
Event Loop: Async errors are harder to handle/debug, because system loses context of the error. Then, application crashes.
Try/catch is not good enough.
try {
throw new Error('Fail!')
} catch (e) {
console.log('Custom Error: ' + e.message)
}For sync errors try/catch works fine.
try {
setTimeout(function () {
throw new Error('Fail!')
}, Math.round(Math.random()*100))
} catch (e) {
console.log('Custom Error: ' + e.message)
}The app crashes!