# Module System
# Export
Set the properties on the module that exports object directly.
// module1.js
module.exports.CONST1 = 1;
module.exports.func1 = function func1();
module.exports.func2 = function func2();
Use shorthand syntax.
// module1.js
exports.CONST1 = 1;
exports.func1 = function func1();
exports.func2 = function func2();
We could also set module exports objects all in one step.
// module1.js
CONST1 = 1;
func1 = function func1();
func2 = function func2();
module.exports = {
CONST1,
func1,
func2,
}
# Import
// module2.js
const {
func1,
func2
} = require('path/to/module1');
func1();
func2();
# ECMAScript Modules.
Set "type: module" in the package.json
Replace require function calls with import statement, and module exports with export statement.
// module1.js
export {
func1,
func2,
};
// module2.js
import {
func1,
func2
} from 'path/to/module1';
# Package Management
# NPM Shorthands
Intall package.json dependencies
npm install
Update npm
npm install -g npm@latest
Install package
npm install <package-name>
Update package
npm outdated -g --depth=0
Uninstall package
npm uninstall <package-name>
Run available scripts
npm run <script-name>
List versions of installed packages
npm list
# Initialize
npm init
Get all the default options:
npm init -y
# Development Dependencies
Saved as development dependencies to minimize the amount of packages needed to install.
npm install <package-name> --save-dev
# Middleware
Middleward works with the incoming request and the coming out response. next controls the flow of the flow of the middleware.
app.use(func(req, res, next) {
})
# Web Server
Initialize
const PORT = 8000;
const http = require('http');
const server = http.createServer((req, res) => {
});
server.listen(PORT, res);
# Parameterized URLs
Get from server.
server.on('request', (req, res) => {
const items = req.url.split('/');
if (req.method === 'GET' && items[1] === 'friends') {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
if (items.length === 3) {
const friendIndex = Number(items[2]);
res.end(JSON.stringify(friends[friendIndex]));
} else {
res.end(JSON.stringify(friends));
}
} else {
}
});
Post to server.
server.on('request', (req, res) => {
const items = req.url.split('/');
if (req.method === 'POST' && items[1] === 'friends') {
req.on('data', (data) => {
const friend = data.toString();
console.log('Request:', friend);
friends.push(JSON.parse(friend));
});
req.pipe(res);
} else {
}
});
# Send files
const path = require('path');
function func(req, res) {
res.sendFile(path.join(_-dirname, 'path', 'to', 'file.extension'));
}
# Server website
app.use(express.static('foler-name'));
app.use('/url', express.static(path.join(__dirname, 'path', 'to', 'folder')));
# Setting BUILD_PATH
set BUILD_PATH=../path/to/foler&& react-scripts build
# Node Performance
# Cluster Module
Cluster module enable creating copies of node process that run on server code side by side in paralle. The main process (master process) is created, node takes a copy of the process (worker process) by fork function. Thus multiple work processes are attached to a single master.
const {
isMainThread,
workerData,
Worker,
} = require('worker_threads');
if (isMainThread) {
console.log(`Main thread process id: ${process.pid}`);
new Worker(__filename, {
workerData: [7, 6, 2, 3]
});
new Worker(__filename, {
workerData: [1, 3, 4, 3]
});
} else {
console.log(`Worker process id: ${process.pid}`);
console.log(`${workerData} sorted is ${workerData.sort()}`);
}
# Maximum cluster performance
const NUM = os.cpus().length;
# Database
Mongo Database (opens new window)
mongodb+srv://<username>:<password>@nasacluster.t33ik.mongodb.net/myFirstDatabase?retryWrites=true&w=majority
await mongoose.connect(MONGO_URL, {
useNewUrlParser: true,
useFindAndModify: false,
useCreateIndex: true,
useUnifiedTopology: true,
});
# Some Modules
# Express (opens new window)
Fast, unopinionated, minimalist web framework for node.
const PORT = 8000;
const express = require('express');
const app = express();
// Define routes
app.get('/route1', (req, res) => {});
app.get('/route2', (req, res) => {});
app.post('/route3', (req, res) => {});
app.listen(PORT, () => {});
# Koa (opens new window)
Expressive HTTP middleware framework for node.js to make web applications and APIs more enjoyable to write. Koa's middleware stack flows in a stack-like manner, allowing you to perform actions downstream then filter and manipulate the response upstream.
Only methods that are common to nearly all HTTP servers are integrated directly into Koa's small ~570 SLOC codebase. This includes things like content negotiation, normalization of node inconsistencies, redirection, and a few others.
const PORT = 8000;
const koa = require('koa');
const app = new koa();
app.use(ctx => {
ctx.body = "";
});
app.listen(PORT, () => {});
# Nodemon (opens new window)
nodemon is a tool that helps develop Node.js based applications by automatically restarting the node application when file changes in the directory are detected.
nodemon does not require any additional changes to your code or method of development. nodemon is a replacement wrapper for node. To use nodemon, replace the word node on the command line when executing your script.
Install nodemon
npm install nodemon
Run nodemon
<path-to-nodemon-module> <file-name>
Or add to "scripts"
"nodem": "nodemon <file-name>"
Running request by nodemon
npm run nodem
Add "-g" flag to install globally
npm install -g nodemon
No need to restart server to get the latest feedback.
# pm2 (opens new window)
PM2 is a production process manager for Node.js applications with a built-in load balancer. It allows you to keep applications alive forever, to reload them without downtime and to facilitate common system admin tasks.
npm install pm2 -g
pm2 start <file-name>
Shorthands
pm2 list
pm2 logs
pm2 stop
pm2 reload
pm2 restart
pm2 delete
# csv-parse (opens new window)
The csv-parse package is a parser converting CSV text input into arrays or objects. It is part of the CSV project.
It implements the Node.js stream.Transform API. It also provides a simple callback-based API for convenience. It is both extremely easy to use and powerful. It was first released in 2010 and is used against big data sets by a large community.
Install csv-parse
npm install csv-parse
Pipe function connects a readable stream source to a readable stream destination.
const csvParse = require('csv-parse');
const fs = require('fs');
const result = [];
// Pass in and read the file.
fs.createReadStream('csv-file')
.pipe(parse({
comment: '//',
columns: true,
delimiter: ':'
}))
.on('data', (data) => {
result.push(data);
})
.on('error', (err) => {
console.log(err);
})
.on('end', () => {
console.log(results);
});
# mongoose (opens new window)
Mongoose is a MongoDB object modeling tool designed to work in an asynchronous environment. Mongoose supports both promises and callbacks.
npm install mongoose