It is a general view of Node.JS with main commands and concepts

JavaScript Prerequisites Buffer and Streams Control flow (12%)
Child Process Diagnostics Error Handling
Node.JS CLI Events File System
Module System Process/Operating System Package.json
Unit Testing

Introduction

Install: from the official page or from some package manager (apt-get/brew)

nvm: Node version manager

$ nvm install 20                        // install version 20
$ node -v                               // check node version
$ npm -v                                // version of npm installed with node
$ node --help                           // list of possible options
$ node --v8-options                     // options to change engine
$ node --check myApp.js                 // check the syntax 
$ node -p "2+2"                         // Dynamic evaluation: p -> print -> print 4
$ node -e "2+2"                         // Dynamic evaluation: e -> eval  -> evaluate but not print     
$ node --require ./jsmodule.js myApp.js // preloading CommonJS module used in myApp.js
$ node --stack-trace-limit=101 myApp.js // print more lines from error (default is 10) 


JavaScript Prerequisites (7%)

Primitives in JS: null, undefined, Number, BigInt [1n], String, Boolean, Symbol [Symbol('description'), Symbol.for('namespace')]

  • null: absence of an object
  • undefined: absence of a defined value.
  • string: can use single or double quotes, or backticks (it is a template string)
  • Symbols: built-in object whose constructor returns a symbol which is unique. It can be used as unique identifier keys in objects[1]

Prototype: it is the built-in property that every object has. It is a mechanism by which JavaScript objects inherit features from one another. [2][3][4]

  function doSomething() {}
  doSomething.prototype.foo = "bar"; 
  const doSomeInstancing = new doSomething();
  doSomeInstancing.prop = "some value"; 
  console.log(doSomeInstancing);
  // Result
  { prop: "some value",
  [[Prototype]]: {
    foo: "bar",
    constructor: ƒ doSomething(),
    [[Prototype]]: {
      constructor: ƒ Object(),
      hasOwnProperty: ƒ hasOwnProperty(),
      isPrototypeOf: ƒ isPrototypeOf(),
      propertyIsEnumerable: ƒ propertyIsEnumerable(),
      toLocaleString: ƒ toLocaleString(),
      toString: ƒ toString(),
      valueOf: ƒ valueOf()
    }}}
  

Function: is an object

  • Function assigned to an object: 'this' refers the function called. 'this' context changes to the object that call internal funcion.
  • The call method that exist to every function can be used to set the 'this' context
  • Lambda functions (arrow function) do not have their own this context. 'this' inside the lambda function is the context of the nearest parent non-lambda function
  • Lambda function does not have prototype: typeof normalFunc.prototype is 'object' and typeof lambdaFunc.prototype is 'undefined'
// Ex1: creating as funcion
const A = function(att1, att2) {
    this.att2 = att1;
    this.att2 = att2;
}
A.prototype.myMethod = function () {
    this.att2 += 10;
    console.log(`${this.att1} :: ${this.att2}`);
}
const objA = new A('A is', 2);
objA.myMethod();

// Ex2: creating as class
class B {
  constructor(att1,att2){
    this.att2 = att1;
    this.att2 = att2;   
  }
  myMethod() {
    this.att2 += 10;
    console.log(`${this.att1} :: ${this.att2}`);   
  }
}
const objB = new B('B is', 3);
objB.myMethod();

Prototypal Inheritance: it is a chain of prototype. It can be created by different approache like: functional, contructor functions and class-syntax contructor. When use MyObject.prototype.myFunciont, it will be one object myFunction to all objectos from MyObject. Bad practices create a method inside the function constructor.

Functional

'use strict'
// the prototype of a is Object.prototype
const a = { attA: function () { console.log(this.name + ': A') } }

// a is a prototype of b.
// b.__proto__ ===  a
const b = Object.create(a, {attB: { value: function() { console.log(this.name + ': B') } } })

// b is a prototype of c.
const c = Object.create(b, { name: {value: 'C'} })

c.attB() // prints "C: B"
c.attA() // prints "C: A"

console.log(Object.getPrototypeOf(c) === b) //true
console.log(Object.getPrototypeOf(b) === a) //true

// Object.create -> First argument is the prototype. 
// Second argument is Properties Descriptor
// Object.getOwnPropertyDescriptor
// property value can be 'value', 'get' and 'set'.

Constructor function: Can be attiched by the use of the 'new' keywod

'use strict'
function A (name){ this.name = name}
A.prototype.attA = function() {console.log(this.name + ':A')}

function B(name){A.call(this, name + ':B-A')}
B.prototype.attB = function() {console.log(this.name + ':B')}

// Set the `[[Prototype]]` of `B.prototype` to `A.prototype`
Object.setPrototypeOf(B.prototype, A.prototype)

// c -> B.prototype -> A.prototype -> Object.prototype
// {} <--> new B()
const c = new B('NEW'); 

c.attB() // NEW:B-A:B
c.attA() // NEW:B-A:A

console.log(Object.getPrototypeOf(B) === A.prototype) //false
console.log(Object.getPrototypeOf(c) === B.prototype) //true
console.log(Object.getPrototypeOf(c) === A.prototype) //false
console.log(Object.getPrototypeOf(B) === Object.getPrototypeOf(A)) //true

// B.prototype is prototype of linked objects
// __proto__ is how JS knows internally that c object is connected to B.prototype
console.log(c.__proto__ === B.protptype) //true
console.log(B.prototype.isPrototypeOf(c)) //true
console.log(B.prototype.isPrototypeOf(B)) //false
console.dir(B.prototype.constructor)

// [[Prototype]] is used to designate the prototype of someObject
// func.prototype specifies the [[Prototype]] to be assigned to all instances of objects created by the given function when used as a constructor.

Class-Syntax Constructor: Class is a syntactic sugar to creates a function. it creates prototype chains. All method created in the class is added to the prototype object.

class A{}
class B extends A{}
const c = new B();

console.log(Object.getPrototypeOf(c) === B.prototype) //true
console.log(Object.getPrototypeOf(B.prototype) === A.prototype) //true

// function constructor  vs constructor
A(att){this.att = att}  <--> class A {constructor(att){this.att = att}}
A.call(this, name + ':A') <--> super(name + ':A')


Buffer and Streams (11%)

Buffer

Definition:Buffer objects are used to represent a fixed-length sequence of bytes. [1]

Allocation

// allocate a buffer of 10 bytes with zeros
$ node -p "Buffer.alloc(10)"       // <Buffer 00 00 00 00 00 00 00 00 00 00> :: hexadecimal
// allocate a buffer of 10 bytes with 1
$ node -p "Buffer.alloc(10,1)"     // <Buffer 01 01 01 01 01 01 01 01 01 01> :: hexadecimal
// Creates a Buffer containing the bytes [1, 2, 3]
$ node -p "Buffer.from([1, 2, 3])" // <Buffer 01 02 03>  

The 'new' to create a buffer is deprecated. It used to have the Buffer.unsafeAlloc which means unallocated memory. The buffer can contain fragments of previously deleted data. It is used to performance porpose.

Arrays

Different views of a buffer: [2][3]

  • ArrayBuffer -> Float64Array -> 8 bytes -> 64-bit floating point number
  • Int32Array -> 4 bytes -> 32 bits -> signed integer
  • Uint8Array -> each byte is unsigned integer (0-255)
  • Buffer is subclass of Uint8Array: an object is Buffer and Uint8Array
  • Buffer.prototype.slice (buffer instance with reference to original data) overrides Uint8Array.prototype.slice (do a copy)
const arr = new Uint16Array(2);
// Copies the contents of `arr`.
const buf1 = Buffer.from(arr);
// Shares memory with `arr`.
const buf2 = Buffer.from(arr.buffer);

// Iteration
const buf = Buffer.from([1, 2, 3]);
for (const b of buf) { console.log(b); }

Strings

Created from string:

// the default encode is UTF8
// Cannot assume the string length match the converted buffer size
$ node -p "Buffer.from('Hello')"  // <Buffer 48 65 6c 6c 6f> :: charaters converted to byte

Converting Buffers to String:

const buffer = Buffer.from('Hello')
console.log(buffer) // prints <Buffer 48 65 6c 6c 6f>
console.log(buffer.toString()) // prints Hello
console.log(buffer + '') // prints Hello
console.log(buffer.toString('hex')) // prints 48656c6c6f
console.log(buffer.toString('base64')) // prints SGVsbG8=

Buffer instance can be represented by a JSON:

$ node -p "Buffer.from('Hello').toJSON()"          // { type: 'Buffer', data: [ 72, 101, 108, 108, 111 ] }
$ node -p "JSON.stringify(Buffer.from('Hello'))"   // {"type":"Buffer","data":[72,101,108,108,111]}

Streams

Definition: A stream is an abstract interface for working with streaming data in Node.js. Streams can be readable, writable, or both. All streams are instances of EventEmitter.[4][5]

  • Types of Streams: Readable, Writable, Duplex, Transform
  • Examples APIs that expose streams: process, net, http and fs, child_process expose.
  • The Stream constructor is the default export of the stream module and inherits from the EventEmitter constructor from the events module.
  • The Stream constructor implements the pipe method
  • Events emmited: data (Readable), end (Readable), finish (Writable), close (when destroyed), error
  • Stream Mode (option when stream is instantiated): (1) Binary Streams: default, only read or write Buffer instances; (2) Object stream, which can read/write JS objects and primitives (but not null)

Readble Stream:

  • EventEmitter > Stream > Readable
  • As data becomes available, a readable stream emits a data event.
  • Readable streams emit buffers by default
  • Set objectMode to true to not use buffer
// USING FILE

'use strict'
const fs = require('fs')
// create an readable instance and emit data event for each chunk of the file that is readed (16kb of data readed before data event)
const readable = fs.createReadStream(_ _ filename)
readable.on('data', (data) => { console.log(' got data', data) })
readable.on('end', () => { console.log(' finished reading') })

// UTILITY METHOD
// Set objectMode to try by defaul
Readable.from(['some', 'data', 'to', 'read'])

Writeble Streams:

  • EventEmitter > Stream > Writable
  • Emit the finish event when the stream is ended
  • Take the string inputs, convert them to Buffer instance and then write them
  • The default objectMode is false and the strings written to writable instance is converted to buffer
  • The objectMode have to be set to true to support string
'use strict'
const fs = require('fs')
const writable = fs.createWriteStream('./out')
writable.on('finish', () => { console.log('closing the file') })
writable.write('First Line\n')
writable.write('Second Line\n')
// writable.write(99) -> If use this line it will throw an error because objectMode is false
writable.end('The End!')

// Reading the result
$ node app.js
~ closing the file
$ node cat out
First Line
Second Line
The End!%  

Readable-Writable Streams:

  • Duplex, Transform[6][7] and PassThrough
  • Readable > Duplex. Deuplex mix functionalities from Writable
  • Duplex > Transform -> Casual relatioship between read and write: a data is written in the transform instace, then a data event is emitted on readable side, then the data in buffer is converted to string
  • Transform > PassThrough. Transform stream where use Transform is not possible.

End of Streams: it can be identified by some events as close, error, finish or end. Then the resource can be deallocated.

Piping Streams:

  • Example a common Bash command: cat some-file | grep find-something
  • The pipe [8] return a stream then can be done a chain of pipe calls.
  • Pipeline[9] is the correct way to chain pipes to avoid memory leaks in case of fails.
  • readable streams -> writes to the writable streams
const fs = require("fs");
const { Transform } = require("stream");
const fileStream= fs.createReadStream("./file.txt");
const transformedData= fs.createWriteStream("./transformedData.txt");

const uppercase = new Transform({
  transform(chunk, encoding, callback) {
    callback(null, chunk.toString().toUpperCase());
  },
});

fileStream.pipe(uppercase).pipe(transformedData);


Control flow (12%)

Sync: the thread is blocked until the process is finished. Async: A second process can run while the first process wait for a return. It uses the callback functions. [1][2][3]

Event Loop [4][5]: node is single thread. It is responsible for executing the code, collecting and processing events, and executing queued sub-tasks.


Child Process (8%)

Methods in child_process modules that result in a process: exec & execSync; spawn & spawnSync; execFile & execFileSync (variations of exec and execSync); fork (specialization of the spawn)[1][2][3][4]. For all of them is possible to use env and cwd as arguments.

execSync: execute a command and return a buffer with the chould process output.

'use strict'
const { execSync } = require('child_process')
const output = execSync(`node -e "console.log('Hello!')"`)
console.log(output.toString())

// The use of the process.execPath make the subrocess execute in the same version of Node
execSync(`${process.execPath} -e "console.error('Hello!')"`)

exec: it is an async funcion split STDOUT and STDERR and pass then to the callback

exec(`"${process.execPath}" -e "console.log('SubprocessSTDOUT');console.error('SubprocessSTDERR')"`,
  (err, stdout, stderr) => {
    console.log('err', err) // it's null becaise exit wit '0'
    console.log('subprocess stdout: ', stdout.toString())
    console.log('subprocess stderr: ', stderr.toString())
  }

// If change from 'console.error' to 'throw Error' then the err in callback will not be null; and STDOUT and STDERR buffer is not there.
// Also, error will have the stacks from the subprocess and from the parent process.

spawn & spawnSync: it uses executable path and an array of flags, and return info about the process that was spawned. The spawn not accept callback; spawn doesn't buffer child process output

'use strict'
const { spawnSync } = require('child_process')
const result = spawnSync(
  process.execPath,
  ['-e', `console.log('Hello Suubprocess!!!')`]
)
console.log(result)
console.log(result.stdout.toString())

// In case use process.exit(1) the stderr buffer will be empty, but if use throw exception it will not be empty

***PS: The asynchronous methos retun a ChildProcess instance with stdin, sdtout and stderr streams of subprocess. However, this behaviour can be changed.[5][6][7][8]


Diagnostics (6%)

debug: inspect code [1][2][3][4][5][6][7]

// Chrome detect the debugger process
// chrome://inspect -> Remote Target -> Inspect -> DevTools is open connected with Node process -> Sources tab
// "Pause on caught exceptions" can be checked to stop in point of the exception
$ node --inspect app.js.        

// stop in active breakpoints
// breakpoints can be added by selecting a line in the code by DevTools or adding the key word debugger
$ node --inspect-brk app.js.    


Error Handling (8%)

Error is a runtime errors or an object created programatically [1][2][3][4]

Inherit from Error: EvalError, SyntaxError, RangeError, ReferenceError, TypeError, URIError

// 1 Throwing an error: 
throw new Error('Error!')

// 2 Trace
--trace-uncaught to track the exception

// 3 Check the type
err instanceof RangeError

// 4 Customize attributes of the Error
const err = Error('ERROR!!!')
err.code = 'ERR_MUST_BE_EVEN'
throw err

// 5 Customize by extention
class MyError extends Error {
  constructor (varName = '') {
    super(varName + ' is an error')
  }
  get name () { return 'MyError' }
}

// 6 Try/Catch: synchronous function

// 7 Rejections: asynchrouns
new Promise((resolve, reject) => {
    if (....) {
      reject(new TypeError('Error!!!'))
      return
    }
    resolve(xxx)
  })
}

// 7 Rejection with catch
myTask().then((result) => {
    throw Error('Error!')
  }).catch((err) => {...})

// 8 Async Try/Catch
async function run () {
  try {
    cawait myTask()
  } catch (err) {
    ...
  }
}


Node.JS CLI (4%)

The CLI [1][2] is the command line that make possible, e.g., to debug and execute scripts.

// Manual page in terminal
$ man node


Events (11%)

Event [1][2] is an action on a computer. It can be fire, create or listen by the Event Module. To assign an event you can use the EventEmitter object.

const { EventEmitter } = require('events')

myEmitter.on('add', (a, b) => { console.log(a + b) }) // 1. assign a listener
// myEmitter.addListener('add', () => {})             // 2. assign a listener
// myEmitter.prependListener('add', () => {})         // 3. assign and put listener on top position

myEmitter.emit('add', 7, 6)
myEmitter.emit('add', 1, 2)

myEmitter.removeAllListeners('add')                   // remove all listener 
//myEmitter.removeListener('my-event', myListenerRef) // Remove a specific listener

myEmitter.emit('error', new Error('Error')).          // throw an exception is not exist a listener
myEmitter.on('error', ()=>{})                         // It makes the process not crash

// If you want create your own event
// class MyEmitter extends EventEmitter {}

Listeners are called in the order that they are registered and they will be invoked every time the named event is emitted. To run once you can use myEmitter.once(...) method. We can use an AbortController to cancel the promisifed listener. It's useful when a event can not be emitted or spend too long.

...
const ac = new AbortController()
const { signal } = ac

setTimeout(500).then(() => ac.abort())

try {
  await once(myEmitter, 'my-event', { signal })
  ...
} catch (err) {
  ...
  if (err.code !== 'ABORT_ERR') throw err
  console.log('canceled')
}

File System (8%)

Node.js File System is in FS module used to handle file operations

Path module: path manipulation and normalization across platforms [1]

path.join('foo', 'bar', 'baz') -> 'foo/bar/baz' or 'foo\\bar\\baz' 
path.isAbsolute
path.relative
path.resolve 
path.normalize
path.format

file located: variables that are always present in every module: _ _ filename (absolute path to the current file) and _ _ dirname (absolute path to the current directory).

const { basename, dirname} = require('path')
console.log('filename basename:', basename(_ _ filename))
console.log('filename dirname:', dirname( _ _ filename))

Watching: fs.watch method is in Node core to tap into file system events.[2]

watch('.', (evt, filename) => {
  console.log(evt, filename)
})

File Metadata: fs.stat, fs.statSync, fs/promises stat, fs.lstat, fs.lstatSync, fs/promises lstat. Ex: statSync(filename)

Reading Directories: it is a type of file. The fs module also provides multiple ways to read a directory: Synchronous, Callback-based, Promise-based. Ex: readdirSync, readdir[3] which list the files inside the folder.

fs module: provides APIs to deal with the business of reading, writing, file system meta-data and file system watching [4]

1 Synchronous: block anything else until the process is finished.

  • readFileSync(_ _ filename) -> read content into a buffer
  • const contents = readFileSync(_ _ filename, {encoding: 'utf8'})
  • writeFileSync(join(_ _ dirname, 'out.txt'), contents.toUpperCase())
  • For a better performance with Node to manage I/O in background while the process is in execution, the alternatives are the callback and promise based filesystem APIs.

2 Callback based the execution is free to continue while the operation is performed. The callback function is called when the operation is completed.

readFile(_ _ filename, {encoding: 'utf8'}, (err, contents) => {
  ...
  writeFile(out, contents.toUpperCase(), (err) => {...})
})

3 Promise based: it s the same async process but using async/await that let the code better to read. The methods return promises.

const { join } = require('path')
const { readFile, writeFile } = require('fs/promises')
async function myFunc () {
  const contents = await readFile(_ _ filename, {encoding: 'utf8'})
  await writeFile(join(_ _ dirname, 'out.txt'), contents.toUpperCase())
}
myFunc().catch(console.error)

4 Stream based: It is good for large files. The fs module has createReadStream(createReadStream(__filename))[5] and createWriteStream('out.txt')[6] methods to read and write files in chunks. The memory usage is constant as the file is read and write in small chunks. [7][8]


Process/Operating System (6%)

The process object provides information about, and control over, the current Node.js process.[1][2]. Node.js is a single-threaded, non-blocking performance and works for a single process.

  • process.stdin - Readable stream for process input
  • process.stdout - Writable stream for process output
  • process.stderr - Writable stream for process error output
  • process.exit - process finish ok. Any non-zero code indicates failure
  • process info - ID (process.pid) , Platform (process.platform), Directory (process.cwd()), Environment variables (process.env.HOME)
  • process stats - resource usage (process.uptime(), process.cpuUsage(), process.memoryUsage())

System Info: os module used to get ingo about Operating System

  • os.hostname(), os.homedir(), os.tmpdir(), os.platform, os.type
  • os.uptime - time the system has been running
  • os.freemem - vailable system memory
  • os.totalmem - total system memory

Module System (7%)

The modules are block of code that can be exposed to be reused.[1][2]

module.exports will be the value that is returned when the module is required.

  • Modules expose functionalities
  • File can be a module
  • ESM: language-native EcmaScript Modules
  • CJS: CommonJS
  • An important difference between CJS and ESM is that CJS loads every module synchronously and ESM loads every module asynchronously.
  • Faux-ESM is transpiled with Babel. In Node it is compiles to CommonJS. In browser use synchronous loader.
  • Whereas CJS modifies a module.exports object, ESM introduces native syntax.
  • npm ecosystem is built upon CommonJS format.

Package.json (6%)

npm is a CLI tool installed with Node.JS and used as a package manager. Another well known is yarn.

A package[1][2] is a folder with a package.json file in it. It is used to inicialized a Node.JS project.

The package-lock.json file is used to install the exact same dependencies. It has the exact versions.

npm test and start is alias to npm run test and npm run start.

SemVer: the format is three numbers separated by dots (Major.Minor.Patch). The Major is the changes break, the Minor is add new something and the patch is bugfix. Ex: ^8.14.1 is the same 8.x.x and it means that will match MINOR and PATCH positions.

scripts is a field inside the package.json file with alias to execute commands.

$ npm init    // create packet.json

$ npm init -y // shorter way to accept defaut values

$ npm install pino // install dependency pino

npm install --save-dev standard // install dependency standard as dev dependency
npm install --omit=dev // ignore dev dependency

$ npm ls // describe the dependency tree of a package
$ npm ls --depth=999 // only see production dependencies.  dev dependencies are never installed

$ npm run lint // run a command in script field inside the package.json file
$ npm test     // execute test fron scripts. 'test' and 'start' don't need the 'run' command

Unit Testing (6%)

The core assert module exports a function that will throw an AssertionError when the sentence is not valid. The categories are Thruthness, equality and Pattern Match, deep equality, Errors and Unreacheability. [1][2][3][4]

assert.ok(val)                                // same as assert(val)
assert.equal(val1, val2)                      // val1 == val2
assert.notEqual(val1, val2)                   // val1 != val2
assert.strictEqual(val1, val2)                // val1 === val2
assert.notStrictEqual(val1, val2)             // val1 !== val2
assert.deepEqual(obj1, obj2)                  // all values in an object
assert.notDeepEqual(obj1, obj2)               // for all values in an object
assert.deepStrictEqual(obj1, obj2)            // for all values in an object
assert.notDeepStrictEqual(obj1, obj2)         // for all values in an object
assert.throws(function)                       // a function should throws
assert.doesNotThrow(function)                 // a function should not throws
assert.rejects(promise|async function)        // assert promise or returned promise rejects
assert.doesNotReject(promise|async function)  // assert promise or returned promise resolves
assert.ifError(err)                           // check that an error object is falsy (null or undefined)
assert.match(string, regex)                   // test a string against a regular expression
assert.doesNotMatch(string, regex)            // test that a string fails a regular expression
assert.fail()                                 // force an AssertionError to be thrown

expect(add(2, 2)).toStrictEqual(4)            // Using library 'expect' (https://jestjs.io/docs/expect)

Examples of frameworks to test: tap, jasminie, mocka, jest.

// tap
test('check numbers', async ({ throws }) => {
  throws(() => add('5', '5'), Error('inputs must be numbers'))
  equal(add(5, 5), 10)
})

// in package.json add into scripts section:
"test": "tap"

// jest
test('check numbers', async () => {
  expect(() => add('5', '5')).toThrowError(
    Error('inputs must be numbers')
  )
  expect(add(5, 5)).toStrictEqual(10)
})

// in package.json add into scripts section:
"test": "jest --coverage"

References