Core Modules, File System, HTTP Server, Events & Streams, Async Patterns, Package Management, Performance — server-side JavaScript.
// ── ES Modules (recommended) ──
// package.json: { "type": "module" }
import fs from 'node:fs/promises';
import path from 'node:path';
import http from 'node:http';
import { fileURLToPath } from 'node:url';
export function greet(name) {
return `Hello, ${name}!`;
}
export default function main() {
console.log('Main function');
}
// ── CommonJS (legacy) ──
const fs = require('fs');
const path = require('path');
module.exports = { greet, main };import path from 'node:path';
import { fileURLToPath } from 'node:url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// ── Path Methods ──
path.join('/foo', 'bar', 'baz/asdf', 'quux'); // '/foo/bar/baz/asdf/quux'
path.resolve('foo/bar', '/tmp/file/', '..', 'a'); // '/tmp/a'
path.normalize('/foo/bar//baz/asdf/quux/..'); // '/foo/bar/baz/asdf'
path.dirname('/foo/bar/baz.txt'); // '/foo/bar'
path.basename('/foo/bar/baz.txt'); // 'baz.txt'
path.basename('/foo/bar/baz.txt', '.txt'); // 'baz'
path.extname('/foo/bar/baz.txt'); // '.txt'
path.parse('/home/user/dir/file.txt');
// { root: '/', dir: '/home/user/dir', base: 'file.txt', ext: '.txt', name: 'file' }
path.format({ root: '/', dir: '/home', base: 'file.txt' }); // '/home/file.txt'
path.relative('/data/orandea/test/aaa', '/data/orandea/impl/bbb'); // '../../impl/bbb'
path.isAbsolute('/foo/bar'); // true
path.isAbsolute('./quux'); // falseimport url from 'node:url';
const myURL = new URL('https://user:pass@example.com:8080/p/a/t/h?query=string#hash');
myURL.href; // 'https://user:pass@example.com:8080/p/a/t/h?query=string#hash'
myURL.origin; // 'https://example.com:8080'
myURL.protocol; // 'https:'
myURL.username; // 'user'
myURL.password; // 'pass'
myURL.hostname; // 'example.com'
myURL.port; // '8080'
myURL.pathname; // '/p/a/t/h'
myURL.search; // '?query=string'
myURL.hash; // '#hash'
myURL.searchParams.get('query'); // 'string'
myURL.searchParams.append('page', '1');
myURL.searchParams.toString(); // 'query=string&page=1'import { Buffer } from 'node:buffer';
// ── Creating Buffers ──
const buf1 = Buffer.from('hello'); // <Buffer 68 65 6c 6c 6f>
const buf2 = Buffer.alloc(10); // 10 zero-filled bytes
const buf3 = Buffer.allocUnsafe(10); // 10 uninitialized bytes (faster)
const buf4 = Buffer.from([1, 2, 3, 4]); // from byte array
const buf5 = Buffer.from('7468697320697320612074', 'hex'); // from hex
// ── Buffer Operations ──
buf1.toString(); // 'hello'
buf1.toString('hex'); // '68656c6c6f'
buf1.toString('base64'); // 'aGVsbG8='
buf1.length; // 5
buf1[0]; // 104 (UTF-8 code for 'h')
Buffer.concat([buf1, Buffer.from(' world')]).toString(); // 'hello world'
Buffer.isBuffer(buf1); // true
Buffer.byteLength('hello'); // 5
Buffer.from('hello').equals(Buffer.from('hello')); // true| Module | Purpose | Key Features |
|---|---|---|
| fs | File system | readFile, writeFile, mkdir, watch |
| path | Path manipulation | join, resolve, dirname, basename |
| http | HTTP server/client | createServer, request, response |
| https | HTTPS server/client | TLS/SSL HTTP |
| url | URL parsing | URL class, searchParams |
| events | Event emitter | EventEmitter, on, emit, once |
| stream | Stream handling | Readable, Writable, Transform, pipeline |
| buffer | Binary data | Buffer, alloc, from, concat |
| crypto | Cryptography | randomBytes, hash, encrypt, sign |
| os | OS info | hostname, platform, cpus, freemem |
| util | Utilities | promisify, inspect, types |
| child_process | Child processes | spawn, exec, fork |
| cluster | Process clustering | fork workers, share port |
| worker_threads | Worker threads | postMessage, receiveMessageOnPort |
| Object | Description |
|---|---|
| process | Current process (env, argv, exit, cwd) |
| globalThis | Global scope object |
| __dirname | Directory of current module (CJS) |
| __filename | Filename of current module (CJS) |
| setTimeout | Timer (non-blocking) |
| setInterval | Repeated timer |
| setImmediate | Execute after I/O callbacks |
| queueMicrotask | Queue a microtask |
| AbortController | Cancel async operations |
| URL | URL constructor (global in Node 10+) |
| structuredClone | Deep clone objects |
| TextEncoder/Decoder | Encode/decode text |
"type": "module" to your package.json. Import core modules with the node: prefix (import fs from 'node:fs') to distinguish them from npm packages and for future compatibility.import fs from 'node:fs/promises';
import path from 'node:path';
// ── Reading Files ──
const data = await fs.readFile('file.txt', 'utf-8');
const buffer = await fs.readFile('image.png'); // Buffer for binary
// ── Writing Files ──
await fs.writeFile('file.txt', 'Hello World', 'utf-8');
await fs.appendFile('file.txt', '\nAppended line');
// ── Directories ──
await fs.mkdir('dir/subdir', { recursive: true });
const files = await fs.readdir('dir');
const filesDetailed = await fs.readdir('dir', { withFileTypes: true });
// [Dir, File, Dir, ...] — filter with entry.isDirectory()
await fs.rmdir('dir', { recursive: true }); // remove empty dir
await fs.rm('dir', { recursive: true, force: true }); // remove non-empty
// ── File Info ──
const stats = await fs.stat('file.txt');
stats.isFile(); // true
stats.isDirectory(); // false
stats.size; // 1234 bytes
stats.mtime; // Modified timestamp
stats.birthtime; // Created timestamp
// ── Copy & Rename ──
await fs.copyFile('source.txt', 'dest.txt');
await fs.rename('old.txt', 'new.txt');import { createReadStream, createWriteStream } from 'node:fs';
import { pipeline } from 'node:stream/promises';
import { createGzip } from 'node:zlib';
// ── Readable Stream (for large files) ──
const readStream = createReadStream('large-file.txt', {
highWaterMark: 64 * 1024, // 64KB chunks
encoding: 'utf-8',
});
readStream.on('data', (chunk) => {
console.log('Chunk:', chunk.length);
});
// ── Writable Stream ──
const writeStream = createWriteStream('output.txt');
// ── Pipe Streams ──
await pipeline(
createReadStream('input.txt'),
createGzip(),
createWriteStream('output.txt.gz'),
);
// ── Process line by line ──
import { readlines } from 'node:fs';
for await (const line of readlines('file.txt')) {
console.log(line);
}import { watch } from 'node:fs';
// ── Watch for File Changes ──
const watcher = watch('./src', { recursive: true }, (eventType, filename) => {
console.log(`${eventType}: ${filename}`);
// eventType: 'change' or 'rename'
});
// Stop watching
watcher.close();
// ── fs.promises.watch (async iterator) ──
import { watch as asyncWatch } from 'node:fs/promises';
for await (const { eventType, filename } of asyncWatch('./src')) {
console.log(`${eventType}: ${filename}`);
}| Operation | Callback (legacy) | Promise (recommended) |
|---|---|---|
| Read file | fs.readFile(path, cb) | await fs.readFile(path) |
| Write file | fs.writeFile(path, data, cb) | await fs.writeFile(path, data) |
| Append | fs.appendFile(path, data, cb) | await fs.appendFile(path, data) |
| Read dir | fs.readdir(path, cb) | await fs.readdir(path) |
| Stat | fs.stat(path, cb) | await fs.stat(path) |
| Mkdir | fs.mkdir(path, cb) | await fs.mkdir(path) |
| Remove | fs.unlink(path, cb) | await fs.unlink(path) |
| Rename | fs.rename(old, new, cb) | await fs.rename(old, new) |
| Exists | fs.existsSync(path) | try { await fs.stat() } catch |
import fs from 'node:fs/promises';
import path from 'node:path';
// Recursively get all files
async function getAllFiles(dir, files = []) {
const entries = await fs.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
await getAllFiles(fullPath, files);
} else {
files.push(fullPath);
}
}
return files;
}
// Ensure directory exists
async function ensureDir(dir) {
await fs.mkdir(dir, { recursive: true });
}fs/promises instead of the callback-based fs module. The promise-based API is cleaner with async/await and avoids callback hell. For very large files, use streams instead of reading entire files into memory.import http from 'node:http';
const server = http.createServer((req, res) => {
console.log(`${req.method} ${req.url}`);
// Route handling
if (req.method === 'GET' && req.url === '/') {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Hello World');
} else if (req.method === 'GET' && req.url === '/api/users') {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify([{ id: 1, name: 'Alice' }]));
} else if (req.method === 'POST' && req.url === '/api/users') {
let body = '';
req.on('data', (chunk) => { body += chunk; });
req.on('end', () => {
const data = JSON.parse(body);
res.writeHead(201, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ id: 2, ...data }));
});
} else {
res.writeHead(404, { 'Content-Type': 'text/plain' });
res.end('Not Found');
}
});
server.listen(3000, () => {
console.log('Server running at http://localhost:3000');
});import http from 'node:http';
// ── HTTP Request ──
const postData = JSON.stringify({ title: 'Hello', body: 'World' });
const options = {
hostname: 'jsonplaceholder.typicode.com',
port: 443,
path: '/posts',
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(postData),
},
};
const req = http.request(options, (res) => {
let data = '';
res.on('data', (chunk) => { data += chunk; });
res.on('end', () => {
console.log('Status:', res.statusCode);
console.log('Body:', data);
});
});
req.on('error', (err) => console.error('Error:', err));
req.write(postData);
req.end();
// ── Fetch API (built-in, Node 18+) ──
const response = await fetch('https://api.example.com/data', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ key: 'value' }),
});
const json = await response.json();
console.log(json);import https from 'node:https';
import fs from 'node:fs';
// ── HTTPS Server ──
const options = {
key: fs.readFileSync('./certs/key.pem'),
cert: fs.readFileSync('./certs/cert.pem'),
};
const server = https.createServer(options, (req, res) => {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Secure Hello World');
});
server.listen(443, () => {
console.log('HTTPS server running on port 443');
});| Property | Description |
|---|---|
| req.method | HTTP method (GET, POST, etc.) |
| req.url | Full URL path + query string |
| req.headers | Incoming request headers |
| req.httpVersion | HTTP version (1.1, 2.0) |
| req.socket.remoteAddress | Client IP address |
| res.writeHead(status, headers) | Set status & headers |
| res.setHeader(name, value) | Set individual header |
| res.getHeader(name) | Get header value |
| res.statusCode | Response status code |
| res.end(data) | Send response & close connection |
| Code | Meaning | Use Case |
|---|---|---|
| 200 | OK | Successful request |
| 201 | Created | Resource created (POST) |
| 204 | No Content | Successful, no body |
| 301 | Moved Permanently | Permanent redirect |
| 302 | Found | Temporary redirect |
| 304 | Not Modified | Cache hit |
| 400 | Bad Request | Invalid input |
| 401 | Unauthorized | Not authenticated |
| 403 | Forbidden | Not authorized |
| 404 | Not Found | Resource missing |
| 500 | Internal Error | Server crash |
fetch() API in Node.js 18+ instead of third-party libraries like axios or node-fetch. It's globally available, supports streaming, and is based on the Web API standard.import { EventEmitter } from 'node:events';
// ── Custom EventEmitter ──
class TaskManager extends EventEmitter {
constructor() {
super();
}
async runTask(taskId) {
this.emit('start', { taskId, timestamp: Date.now() });
try {
const result = await this.execute(taskId);
this.emit('complete', { taskId, result });
} catch (error) {
this.emit('error', { taskId, error });
}
}
async execute(taskId) {
// simulate async work
return { status: 'done' };
}
}
const manager = new TaskManager();
manager.on('start', (data) => console.log(`Task ${data.taskId} started`));
manager.on('complete', (data) => console.log(`Task ${data.taskId} done`));
manager.on('error', (data) => console.error(`Task ${data.taskId} failed:`, data.error));
manager.once('start', () => console.log('First task started!')); // fires once only
// Remove listener
const handler = (data) => console.log(data);
manager.on('complete', handler);
manager.off('complete', handler);
// Get listener count
console.log(manager.listenerCount('complete'));
// List all events
console.log(manager.eventNames());
manager.runTask('task-001');import { Readable, Writable, Transform } from 'node:stream';
import { pipeline } from 'node:stream/promises';
import { createReadStream, createWriteStream } from 'node:fs';
import { createGzip, createGunzip } from 'node:zlib';
import { pipeline as streamPipeline } from 'node:stream';
// ── Readable Stream (custom) ──
class NumberStream extends Readable {
constructor(max = 10) {
super();
this.current = 1;
this.max = max;
}
_read() {
if (this.current > this.max) {
this.push(null); // signal end
} else {
this.push(String(this.current++) + '\n');
}
}
}
// ── Transform Stream ──
class UpperCaseTransform extends Transform {
_transform(chunk, encoding, callback) {
callback(null, chunk.toString().toUpperCase());
}
}
// ── Writable Stream (custom) ──
class LogWriter extends Writable {
_write(chunk, encoding, callback) {
console.log('LOG:', chunk.toString().trim());
callback();
}
}
// ── Pipeline (error handling + cleanup) ──
await pipeline(
new NumberStream(5),
new UpperCaseTransform(),
new LogWriter(),
);
// ── File Stream Pipeline ──
await pipeline(
createReadStream('access.log'),
createGzip(),
createWriteStream('access.log.gz'),
);
// ── Process CSV line by line ──
import { createInterface } from 'node:readline';
const rl = createInterface({
input: createReadStream('data.csv'),
crlfDelay: Infinity,
});
for await (const line of rl) {
const [name, age, city] = line.split(',');
console.log(`${name} is ${age} years old`);
}| Type | Class | Use Case |
|---|---|---|
| Readable | Readable | Read data (files, HTTP request) |
| Writable | Writable | Write data (files, HTTP response) |
| Duplex | Duplex | Both read & write (TCP socket) |
| Transform | Transform | Modify data (compress, encrypt) |
| Event | Stream Type | Fired When |
|---|---|---|
| data | Readable | Chunk available to read |
| end | Readable | No more data |
| error | All | Error occurred |
| close | All | Stream closed |
| finish | Writable | All data flushed |
| drain | Writable | Buffer empty, write again |
| pipe | Readable | Piped to writable |
| unpipe | Readable | Unpiped from writable |
pipeline() instead of .pipe(). Pipeline properly handles errors, cleans up resources on failure, and forwards errors through the chain. The .pipe() method does not forward errors between streams.// ── Promises ──
const promise = new Promise((resolve, reject) => {
setTimeout(() => resolve('Done!'), 1000);
});
const result = await promise; // 'Done!'
// ── Promise combinators ──
const [r1, r2, r3] = await Promise.all([p1, p2, p3]); // Wait for ALL
const first = await Promise.any([p1, p2, p3]); // First to RESOLVE
const allSettled = await Promise.allSettled([p1, p2]); // Wait for ALL (no reject)
const raced = await Promise.race([p1, p2]); // First to settle
// ── allSettled result format ──
// [
// { status: 'fulfilled', value: 42 },
// { status: 'rejected', reason: Error }
// ]
// ── Async/Await Error Handling ──
try {
const data = await fetchData();
} catch (error) {
console.error('Failed:', error.message);
} finally {
console.log('Always runs');
}// ── Timeout wrapper ──
function withTimeout(promise, ms) {
const timeout = new Promise((_, reject) =>
setTimeout(() => reject(new Error('Timeout after ' + ms + 'ms')), ms)
);
return Promise.race([promise, timeout]);
}
// Usage
const data = await withTimeout(fetch('/api/data'), 5000);
// ── Retry with exponential backoff ──
async function retry(fn, retries = 3, delay = 1000) {
for (let i = 0; i <= retries; i++) {
try {
return await fn();
} catch (error) {
if (i === retries) throw error;
const wait = delay * Math.pow(2, i);
console.log(`Retry ${i + 1}/${retries} in ${wait}ms`);
await new Promise(r => setTimeout(r, wait));
}
}
}
// Usage
const data = await retry(() => fetch('/api/data').then(r => r.json()));
// ── Async queue (concurrency limit) ──
async function asyncQueue(tasks, concurrency = 3) {
const results = [];
let index = 0;
async function worker() {
while (index < tasks.length) {
const currentIndex = index++;
results[currentIndex] = await tasks[currentIndex]();
}
}
const workers = Array.from(
{ length: Math.min(concurrency, tasks.length) },
() => worker()
);
await Promise.all(workers);
return results;
}import { exec, spawn, fork } from 'node:child_process';
import { promisify } from 'node:util';
const execAsync = promisify(exec);
// ── exec: run command, get output ──
const { stdout, stderr } = await execAsync('ls -la');
console.log(stdout);
// ── spawn: streaming output (for long processes) ──
const child = spawn('find', ['.', '-name', '*.js']);
child.stdout.on('data', (data) => process.stdout.write(data));
child.stderr.on('data', (data) => process.stderr.write(data));
child.on('close', (code) => console.log('Exit code:', code));
// ── fork: run Node.js module in child process ──
const worker = fork('./worker.js');
worker.send({ task: 'compute', data: [1, 2, 3] });
worker.on('message', (result) => console.log('Result:', result));
worker.on('exit', (code) => console.log('Worker exited:', code));
// ── Worker Threads (CPU-intensive tasks) ──
import { Worker } from 'node:worker_threads';
function runInWorker(filename, data) {
return new Promise((resolve, reject) => {
const worker = new Worker(filename, { workerData: data });
worker.on('message', resolve);
worker.on('error', reject);
worker.on('exit', (code) => {
if (code !== 0) reject(new Error('Worker stopped with code ' + code));
});
});
}
// Usage: const result = await runInWorker('./heavy-task.js', { limit: 1000000 });| Method | Short-circuit? | On Reject | Use Case |
|---|---|---|---|
| Promise.all() | First reject | Rejects immediately | All must succeed |
| Promise.allSettled() | Never | Collects all | Check each result |
| Promise.any() | First resolve | Ignores rejects | First success wins |
| Promise.race() | First settle | Either resolve/reject | Timeout pattern |
| Method | Best For | Output |
|---|---|---|
| exec(cmd) | Short commands | Buffer (stdout/stderr) |
| execFile(file, args) | File execution | Buffer (safer than exec) |
| spawn(cmd, args) | Long/streaming | Stream (real-time) |
| fork(module) | Node.js workers | IPC (message passing) |
worker_threads for CPU-bound tasks and child_process for process isolation. Node.js is single-threaded, so CPU-intensive operations block the event loop. Worker threads run in separate threads with their own V8 instance.{
"name": "my-app",
"version": "1.0.0",
"type": "module",
"description": "A Node.js application",
"main": "src/index.js",
"bin": {
"my-cli": "./bin/cli.js"
},
"scripts": {
"dev": "node --watch src/index.js",
"start": "node src/index.js",
"build": "tsc",
"test": "vitest",
"lint": "eslint src/",
"prepare": "husky"
},
"engines": {
"node": ">=20.0.0"
},
"dependencies": {
"express": "^4.21.0"
},
"devDependencies": {
"typescript": "^5.6.0",
"eslint": "^9.0.0"
}
}# ── npm (Node Package Manager) ──
npm init -y # Create package.json
npm install express # Add dependency
npm install -D typescript # Add dev dependency
npm install -g pm2 # Global install
npm uninstall express # Remove dependency
npm update # Update all packages
npm outdated # Check for updates
npm run dev # Run script
npm run build -- --production # Pass args to script
npm ls # List installed packages
npm ls --depth=0 # Top-level only
npm root # Show node_modules path
npm pack # Create tarball
npm publish # Publish to registry
# ── npx: run packages without installing ──
npx create-next-app@latest my-app
npx prisma generate
npx tsx src/index.ts
# ── npm scripts lifecycle ──
# preinstall → install → postinstall
# prestart → start → poststart
# pretest → test → posttest# ── pnpm (fast, disk-efficient) ──
pnpm install # Install all deps
pnpm add express # Add dependency
pnpm add -D typescript # Add dev dependency
pnpm remove express # Remove dependency
pnpm add lodash@4.17.21 # Install specific version
pnpm add "express@^4.18.0" # Install with range
pnpm run dev # Run script
pnpm dlx create-next-app # Like npx but faster
# ── Workspace (monorepo) ──
# pnpm-workspace.yaml
# packages:
# - 'apps/*'
# - 'packages/*'
pnpm --filter @app/web dev # Run in specific package
pnpm --filter @app/web build # Build specific package
pnpm -r run build # Run in all packages recursively
pnpm -r exec "tsc --noEmit" # Execute command in all packages| Range | Meaning | Example |
|---|---|---|
| ^1.2.3 | >=1.2.3 <2.0.0 | Minor updates allowed |
| ~1.2.3 | >=1.2.3 <1.3.0 | Patch updates only |
| 1.2.x | >=1.2.0 <1.3.0 | Patch range |
| >=1.0.0 | 1.0.0 or higher | Any newer version |
| 1.2.3 | Exact version | Only 1.2.3 |
| * | Any version | No restriction |
| latest | Latest published | Registry default |
| Feature | npm | pnpm | yarn |
|---|---|---|---|
| Install speed | Normal | Fast | Fast |
| Disk usage | High | Low (symlinks) | Medium |
| Lock file | package-lock.json | pnpm-lock.yaml | yarn.lock |
| Workspaces | Yes | Yes (native) | Yes (native) |
| Monorepo | Basic | Excellent | Good |
| Caching | Global | Content-addressable | Global |
| Registry | npmjs.com | npmjs.com | npmjs.com |
pnpm for new projects. It uses content-addressable storage with symlinks, saving 50-70% disk space in monorepos. Install times are significantly faster due to its efficient package resolution and global store.import { performance, PerformanceObserver } from 'node:perf_hooks';
// ── Measuring Execution Time ──
const start = performance.now();
await expensiveOperation();
const end = performance.now();
console.log(`Took ${(end - start).toFixed(2)}ms`);
// ── Performance Observer ──
const obs = new PerformanceObserver((list) => {
for (const entry of list.getEntries()) {
console.log(`${entry.name}: ${entry.duration}ms`);
}
});
obs.observe({ entryTypes: ['function', 'measure'] });
// ── process.memoryUsage() ──
const memory = process.memoryUsage();
console.log({
rss: Math.round(memory.rss / 1024 / 1024) + 'MB', // Total memory
heapTotal: Math.round(memory.heapTotal / 1024 / 1024) + 'MB', // Heap allocated
heapUsed: Math.round(memory.heapUsed / 1024 / 1024) + 'MB', // Heap used
external: Math.round(memory.external / 1024 / 1024) + 'MB', // C++ objects
});
// ── Cluster (multi-process) ──
import cluster from 'node:cluster';
import os from 'node:os';
if (cluster.isPrimary) {
const cpus = os.cpus().length;
console.log(`Primary ${process.pid} is running`);
for (let i = 0; i < cpus; i++) {
cluster.fork();
}
cluster.on('exit', (worker) => {
console.log(`Worker ${worker.process.pid} died. Restarting...`);
cluster.fork(); // Auto-restart
});
} else {
// Worker: start HTTP server
import('./server.js');
}// ── Node.js Inspector (built-in debugger) ──
// Start with: node --inspect server.js
// Open: chrome://inspect in Chrome
// ── Debug with breakpoints in code ──
debugger; // Pauses execution when inspector is attached
// ── Worker Inspector ──
// node --inspect=9229 --inspect-brk server.js
// --inspect-brk: break on first line
// ── Environment Variables for Debugging ──
NODE_DEBUG=http // Debug HTTP module
NODE_DEBUG=net,fs // Debug multiple modules
NODE_OPTIONS="--inspect" // Enable inspector via env
// ── process object utilities ──
process.env.NODE_ENV; // 'production' | 'development'
process.argv; // [node, script.js, ...args]
process.pid; // Process ID
process.ppid; // Parent process ID
process.cwd(); // Current working directory
process.chdir('/tmp'); // Change directory
process.exit(0); // Exit with code 0
process.nextTick(() => {}); // Run callback before I/O
// ── Uncaught Exception Handler ──
process.on('uncaughtException', (err) => {
console.error('Uncaught:', err);
// Log and gracefully shutdown
process.exit(1);
});
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled rejection:', reason);
});
// ── Graceful Shutdown ──
const shutdown = (signal) => {
console.log(`${signal} received. Shutting down gracefully...`);
server.close(() => {
console.log('HTTP server closed');
db.close(() => {
console.log('DB connection closed');
process.exit(0);
});
});
};
process.on('SIGTERM', () => shutdown('SIGTERM'));
process.on('SIGINT', () => shutdown('SIGINT'));| Tip | Impact | Implementation |
|---|---|---|
| Use clustering | 4x throughput | cluster.fork() per CPU |
| Stream large data | Lower memory | pipeline() instead of read file |
| Avoid sync I/O | Non-blocking | fs.promises instead of fsSync |
| Cache DB queries | Fewer queries | Map/LRU cache in memory |
| Use worker_threads | CPU parallelism | Offload CPU-bound tasks |
| Enable compression | Smaller responses | compression middleware |
| Pool connections | Reduce latency | DB/HTTP connection pooling |
# ── Useful Node.js CLI Flags ──
node --watch src/index.js # Auto-restart on file change
node --inspect server.js # Enable inspector
node --enable-source-maps # Source maps for stack traces
node --max-old-space-size=4096 # Increase heap to 4GB
node --trace-warnings # Show warning stack traces
node --trace-event-categories node,file # Tracing
node --abort-on-uncaught-exception # Crash on unhandled
node --dns-result-order=ipv4first # Prefer IPv4
node --experimental-detect-module # Auto-detect ESM
node --test # Run test runnerprocess.on('unhandledRejection', handler) and process.on('uncaughtException', handler) in production. Unhandled async errors can cause silent failures and memory leaks.The event loop is the mechanism that allows Node.js to perform non-blocking I/O operations despite JavaScript being single-threaded. It continuously checks the call stack and processes callbacks from the event queue. The phases are:timers → pending callbacks → idle/prepare →poll → check → close callbacks. Microtasks (Promises, queueMicrotask) are processed after each phase, before moving to the next.
| Method | When it Executes | Phase |
|---|---|---|
| process.nextTick() | Immediately, after current operation | Before event loop phases |
| setImmediate() | On next iteration of event loop | Check phase |
| Promise.then() | Microtask queue | After current operation, before nextTick clears |
| setTimeout(fn, 0) | Next timer phase | Timers phase |
Node.js uses an event-driven, non-blocking I/O model. While JavaScript execution is single-threaded, I/O operations (file reads, network requests) are delegated to the OS/libuv thread pool. When I/O completes, the callback is placed in the event queue. The event loop picks up callbacks and executes them. For CPU-intensive tasks, use worker_threads (separate V8 instances) or child_process(separate processes) to avoid blocking the event loop.
| Feature | spawn | fork |
|---|---|---|
| Purpose | Any OS command | Node.js module only |
| Communication | Streams (stdout/stderr) | IPC (send/on message) |
| Return | ChildProcess with streams | ChildProcess with IPC |
| V8 instance | New process | New V8 instance |
| Use case | Running shell commands | Parallel processing in Node |
Streams process data piece by piece (chunks) without loading everything into memory. Four types: Readable(reading data), Writable (writing data), Duplex (both, like TCP sockets),Transform (modify data between read/write). Streams are essential for handling large files, processing real-time data, and building efficient data pipelines. Use pipeline() to connect streams with proper error handling and resource cleanup.
| Feature | cluster | worker_threads |
|---|---|---|
| Shares port | Yes (listen on same port) | No (each needs own port) |
| Memory | Separate process | Shared memory possible |
| Communication | IPC (send/message) | postMessage / SharedArrayBuffer |
| Isolation | Process-level | Thread-level |
| Overhead | Higher (full process) | Lower (threads) |
| Best for | HTTP server scaling | CPU-bound computation |
// Best practice for production:
process.on('uncaughtException', (err) => {
logger.error('Uncaught exception:', err);
// Perform cleanup (close DB, flush logs)
gracefulShutdown(() => process.exit(1));
});
process.on('unhandledRejection', (reason, promise) => {
logger.error('Unhandled rejection:', reason);
// Don't crash, but log and alert
// In Node 15+, this terminates the process by default
});