Nodejs Microsservices
gulp.js is workflow management for node.js like azkaban for hadoop $ node >1 + 2 3 >function add(a,b){return a+b} undefined > add(1,2) 3 html boilerplate app.param $ npm search # list all available packages$ npm search markdown # will list all markdown packages $ npm init # to initialize package.json $ npm update # to update modules for this project $ sudo npm update -g # update all global modules $ npm prune # remove any packages you've removed from your dependency list //print attributes and functions of an object
Non-blocking I/OWhenever we want to do a blocking I/O we register a call-back function - hence make it non-blocking! So we don't actually wait for bit.ly to return with a response. Reactor is what handles the non-blocking
In java script you have document which is dom but in node.j you have process. server $ vim web-server.js var http = require("http") var s = http.createServer(function(req, res){ //has a call back funciton which is called whenever a request is received at teh server. res.writeHead(200, {"content-type" : "text/plain"}) res.end("hello world.\n") }) s.listen(8000) $ node web-server.js $ curl http://localhost:8000/ hello world. $ curl -i http://localhost:8000/ HTTP/1.1 200 OK content-type: text/plain Date: Wed, 18 Sep 2013 17:51:27 GMT Connection: keep-alive there is persistent connection to the webserver Transfer-Encoding: chunked hello world. Transfer-Encoding: chunked $ vim web-server-chunked.js var http = require("http") var
s = http.createServer(function(req, res){ res.writeHead(200, {"content-type" : "text/plain"}) res.write("hello\n") //writes and doesn't wait for all to be ready setTimeout(function(){ res.end("wold.\n") }, 2000) }) if we set content-length=12 it didn't work. we don't want to buffer it. but proxy it to client. send directly back to client. if we make two curls at the same time both of them work simulaneously. otherwise if we were sleeping one would have to wait for hte other to finish. TCP sevrer net = require('net') var sockets = [] var s = net.Server(function(socket){ sockets.push(socket) socket.on('data', function(d){ for(var i = 0; i < sockets.length; i++){ if(sockets[i] == socket) continue socket[i].write(d) } }) socket.on('end', function(){ var i = sockets.indexOf(socket) sockets.splice(i, 1) }) }) //createServer/ $ nc localhost 8000 ================ npm (node package manager) is equivalent t gem for ruby express.js is web framework. array.join(" ") //concat array element
Modulemodule.exports a javascript file. module.exports = function(){} ........... //created exported fcn Then when we say var my_fcn = require('./myfilename') //require: if there is a file with that name or a folder with that name and an index.js file in it we can do my_fcn() exports.mypropertyname exports.myfcn1 = function(){} exports.myfcn2 = function(){} then: var a = require('./myfilename') a.myfcn1() a.myfcn2() Any variable defined in js fiels will be global and when you require a module you can reset those variables. to make it local define it using var you can also pass a parameter to module: like // lib.jsmodule.exports = function(options) {var app = options.app;var param2 = options.param2;};// somefilerequire("lib.js")(params);OR// lib.jsmodule.exports = function(app, param2) { }// somefilerequire("lib.js")(app, param2) Create a module to get and set flight data var number, origin, destination; exports.setNumber = function (num) { number = num; }; exports.setOrigin = function (o) { origin = o; }; exports.setDestination = function (d) { destination = d; }; exports.getInfo = function() { return { //in line literal object number: number, origin: origin, destination: destination }; }; later we can do var a = require('./myfile'); a.setDestination('temp') var os = require('os'); console.log('This code is running on: ' + os.type());
for static variables of an object define object in a file and use vars in the file which are local to the file (module) for handling static stuff Stream file systemvar fs = require('fs'); var stream = fs.createReadStream('data.json'); //create a stream out of data.json instead of reading all at once and storing it in memory stream.on('data', function (chunk) { console.log('----------------begin chunk----------------'); console.log(chunk.toString()); console.log('----------------end chunk----------------'); }); stream.on('data', function (chunk) { console.log('CHUNK LENGTH WAS: ' + chunk.length); }); stream.on('end', function () { console.log('----------------reached file end----------------'); }); Pause a stream stream.pause() setTimeout(function() { console.log('resuming...'); stream.resume(); }, 1000); Piping readable data into writable streams create a copy of the data var fs = require('fs'); var stream = fs.createReadStream('data.json'), writable = fs.createWriteStream('copy.json'); stream.pipe(process.stdout); stream.pipe(writable); Duplex (both read and writable streams) var net = require('net'), fs = require('fs'); var server = net.createServer(function (connect) { var log = fs.createWriteStream('eli.log'); console.log('Connection established'); connect.on('end', function() { console.log('Connection ended'); }); connect.write("Welcome to our airline customer hotline.\r\n"); connect.write("We call it ELI: the Electronic Listening Interface.\r\n"); connect.write("We'll repeat back your message and log it for further review.\r\n"); connect.pipe(connect).pipe(log); }); server.listen(7777, function() { console.log('Server ready on port 7777'); }); $ telnet localhost 7777 # will let you write stuff for it |