Child processes allow a Node.js application to spawn separate operating system processes — running shell commands, executing other Node.js scripts, launching native binaries, and parallelising work across processes with full OS-level isolation. Node.js provides four methods: exec (shell string, buffered output), execFile (binary directly, more secure), spawn (streaming I/O, large output), and fork (Node.js child with built-in IPC). Understanding which to use and how to handle their outputs safely is essential for scripts, build tools, CLI applications, and any MEAN Stack utility that needs to call external programs.
Child Process Methods Comparison
| Method | Shell? | Output | IPC? | Use For |
|---|---|---|---|---|
exec(cmd) |
Yes (sh/cmd) | Buffered string | No | Simple shell commands with small output |
execFile(bin, args) |
No | Buffered string | No | Run binary directly — no shell injection risk |
spawn(cmd, args) |
No | Streaming | No | Long-running processes, large output, stdin piping |
fork(script) |
No | Streaming | Yes | Spawning Node.js child processes with message passing |
exec() runs through a shell (/bin/sh on Unix) which enables shell metacharacters — pipes, redirects, wildcards. This is both its strength and its weakness. Never pass user input to exec() without sanitisation — it is a shell injection vulnerability. If user-controlled data appears in the command string, use execFile() or spawn() which do not use a shell — arguments are passed as an array directly to execve, making injection impossible.util.promisify(exec) or the modern execAsync equivalent for clean async/await usage. The callback-based API is error-prone because both stderr output and non-zero exit codes are “errors” but may mean different things (git sometimes writes informational messages to stderr). Check both the error object and stderr separately: the error’s .code property is the exit code, and stderr may contain non-fatal warnings even when the command succeeded.maxBuffer limit when using exec() or execFile() for commands that might produce large output. The default 1MB buffer silently kills processes that exceed it with Error: maxBuffer exceeded. For large outputs (listing thousands of files, generating large reports), use spawn() which streams output rather than buffering it, and pipe the output stream directly to a file or response rather than accumulating it in memory.Complete Child Process Examples
const { exec, execFile, spawn, fork } = require('child_process');
const { promisify } = require('util');
const path = require('path');
const execAsync = promisify(exec);
// ── exec — simple shell command ───────────────────────────────────────────
async function getGitLog(repoPath) {
try {
const { stdout, stderr } = await execAsync(
'git log --oneline -10',
{ cwd: repoPath, maxBuffer: 512 * 1024 }
);
if (stderr) console.warn('git stderr:', stderr);
return stdout.trim().split('\n').map(line => {
const [sha, ...msgParts] = line.split(' ');
return { sha, message: msgParts.join(' ') };
});
} catch (err) {
// err.code = exit code, err.message contains both stdout/stderr context
throw new Error(`git log failed (exit ${err.code}): ${err.message}`);
}
}
// ── execFile — binary without shell (safe for user input) ─────────────────
async function convertPDF(inputPath, outputDir, userProvidedOptions = []) {
// User input passed as args array — no shell injection possible
const safeArgs = [
inputPath,
'--output-dir', outputDir,
// SAFE: userProvidedOptions are separate array items, never concatenated into shell string
...userProvidedOptions.map(o => o.toString()),
];
return new Promise((resolve, reject) => {
execFile('/usr/bin/libreoffice', ['--headless', '--convert-to', 'pdf', ...safeArgs], {
timeout: 30000,
maxBuffer: 1024 * 1024,
}, (err, stdout, stderr) => {
if (err) reject(err);
else resolve({ stdout, stderr });
});
});
}
// ── spawn — streaming output for large data ────────────────────────────────
function streamMongoExport(database, collection, outputStream) {
return new Promise((resolve, reject) => {
const child = spawn('mongodump', [
'--db', database,
'--collection', collection,
'--archive',
'--gzip',
], {
stdio: ['ignore', 'pipe', 'pipe'], // stdin: ignore, stdout/stderr: pipe
});
child.stdout.pipe(outputStream); // stream directly — no buffer limit
let stderrData = '';
child.stderr.on('data', d => stderrData += d);
child.on('close', code => {
if (code !== 0) reject(new Error(`mongodump failed: ${stderrData}`));
else resolve();
});
child.on('error', reject);
});
}
// Express route: stream MongoDB export directly to HTTP response
app.get('/api/v1/admin/export/:collection', async (req, res) => {
res.setHeader('Content-Type', 'application/gzip');
res.setHeader('Content-Disposition', `attachment; filename="${req.params.collection}.dump.gz"`);
await streamMongoExport('taskmanager', req.params.collection, res);
});
// ── fork — Node.js IPC child ─────────────────────────────────────────────
// tasks/worker.js — child script
if (require.main === module) {
process.on('message', async ({ type, payload }) => {
if (type === 'BULK_IMPORT') {
const results = { success: 0, failed: 0, errors: [] };
for (const item of payload.items) {
try {
await Task.create(item);
results.success++;
} catch (err) {
results.failed++;
results.errors.push({ item: item.title, error: err.message });
}
// Report progress every 100 items
if (results.success % 100 === 0) {
process.send({ type: 'PROGRESS', progress: results.success });
}
}
process.send({ type: 'COMPLETE', results });
}
});
}
// Parent: fork the worker for bulk import
function runBulkImport(items) {
return new Promise((resolve, reject) => {
const child = fork(path.join(__dirname, 'tasks/worker.js'), [], {
silent: true,
});
child.on('message', msg => {
if (msg.type === 'PROGRESS') {
console.log(`Import progress: ${msg.progress} items`);
}
if (msg.type === 'COMPLETE') {
child.kill();
resolve(msg.results);
}
});
child.on('error', reject);
child.on('exit', code => { if (code !== 0) reject(new Error(`Worker exited ${code}`)); });
child.send({ type: 'BULK_IMPORT', payload: { items } });
});
}
// ── Shell injection prevention comparison ────────────────────────────────
// ❌ DANGEROUS — user input in exec string:
const userInput = '; rm -rf /'; // malicious input
exec(`ls ${userInput}`); // executes: ls ; rm -rf / ← shell injection!
// ✅ SAFE — user input as args array:
spawn('ls', [userInput]); // no shell — 'ls' is called with ['; rm -rf /'] as literal arg
How It Works
Step 1 — exec Runs Through a Shell — spawn Does Not
exec('ls -la | grep .js') works because the shell interprets |. spawn('ls', ['-la', '|', 'grep', '.js']) fails because spawn calls execve directly — the | is passed as a literal argument to ls, not interpreted as a pipe. This is the fundamental difference: shell features (pipes, redirects, glob expansion) require exec; safe argument passing requires spawn or execFile.
Step 2 — spawn stdio Controls Data Flow
The stdio option specifies how each file descriptor (stdin=0, stdout=1, stderr=2) is handled: 'pipe' creates a stream accessible as child.stdin/child.stdout/child.stderr; 'inherit' uses the parent’s file descriptors (child output goes to terminal); 'ignore' redirects to /dev/null. Piping stdout to a response stream (child.stdout.pipe(res)) creates a direct path from child process output to the HTTP client with no memory buffering.
Step 3 — fork Adds Structured IPC over spawn
fork() specialises spawn() for Node.js scripts by opening an additional IPC channel alongside the standard file descriptors. This IPC channel supports process.send() and process.on('message') on both ends, enabling structured JavaScript object passing between parent and child. Objects are serialised via JSON internally — circular references will throw. This IPC channel is more convenient than parsing stdout/stderr for communication.
Step 4 — Exit Code vs Error Object
The error passed to exec/execFile callbacks means the process either failed to start, timed out, or the buffer was exceeded — not necessarily that the command exited with a non-zero code. Non-zero exit codes are reported as err.code. Check both: if err is non-null, check err.code — if it is a number, the process ran but exited with that code; if it is a string (like 'ENOENT'), the process did not run at all.
Step 5 — timeout Option Prevents Runaway Processes
A child process without a timeout can run indefinitely — consuming CPU and memory, and blocking waiting promises. Set timeout: milliseconds to automatically kill the process if it has not exited by then. The killed process receives SIGTERM (on Unix) and the callback receives an error with killed: true. Always set timeouts for external processes in server applications.
Quick Reference
| Task | Code |
|---|---|
| Simple command | const { stdout } = await execAsync('git log') |
| Safe binary call | execFile('/bin/convert', [userFile, '-resize', '200x200', output]) |
| Stream large output | spawn('mongodump', args, { stdio: ['ignore','pipe','pipe'] }) |
| Node.js IPC child | const child = fork('./worker.js'); child.send(msg); child.on('message', ...) |
| Pipe child stdout to HTTP | child.stdout.pipe(res) |
| Set timeout | exec(cmd, { timeout: 5000 }) |
| Set working directory | exec(cmd, { cwd: '/path/to/repo' }) |
| Prevent shell injection | Use spawn()/execFile() with args array |