Node.js provides powerful and flexible APIs for interacting with the file system. Whether you're building a logging utility, a data storage layer, or simply manipulating configuration files, reading from and writing to files is a critical operation in many applications. The core module that facilitates file system operations in Node.js is the fs module.
In this document, we will explore how to read and write files using both synchronous and asynchronous methods, how to work with streams for large files, and understand the differences and best practices. We will also look into real-world use cases, error handling, encoding, working with JSON, and modern promise-based APIs.
Node.js ships with a built-in module called fs (short for file system) that provides both synchronous and asynchronous functions for interacting with the file system. It allows you to read, write, rename, delete, and manipulate files and directories.
const fs = require('fs'); // CommonJS import
// Modern ES module syntax (Node 14+ with type="module")
// import fs from 'fs';
The fs.readFile() method is used to asynchronously read the entire contents of a file.
const fs = require('fs');
fs.readFile('example.txt', 'utf8', (err, data) => {
if (err) {
console.error('Error reading file:', err);
return;
}
console.log('File content:', data);
});
The fs.readFileSync() method reads a file synchronously, blocking execution until complete.
const fs = require('fs');
try {
const data = fs.readFileSync('example.txt', 'utf8');
console.log('File content:', data);
} catch (err) {
console.error('Error reading file:', err);
}
fs.writeFile() writes data to a file asynchronously. If the file doesn't exist, it's created.
const fs = require('fs');
fs.writeFile('output.txt', 'Hello, Node.js!', 'utf8', (err) => {
if (err) {
console.error('Error writing to file:', err);
return;
}
console.log('File written successfully');
});
fs.writeFileSync() writes data synchronously.
const fs = require('fs');
try {
fs.writeFileSync('output.txt', 'Synchronous write', 'utf8');
console.log('Synchronous write successful');
} catch (err) {
console.error('Write error:', err);
}
To append content without overwriting the file, use fs.appendFile().
const fs = require('fs');
fs.appendFile('log.txt', 'New log entry\n', (err) => {
if (err) throw err;
console.log('Log updated.');
});
const fs = require('fs');
fs.readFile('data.json', 'utf8', (err, data) => {
if (err) throw err;
const obj = JSON.parse(data);
console.log(obj);
});
const fs = require('fs');
const person = {
name: 'Alice',
age: 30
};
fs.writeFile('person.json', JSON.stringify(person, null, 2), (err) => {
if (err) throw err;
console.log('JSON file saved.');
});
Modern Node.js supports promise-based file system operations via the fs/promises module.
const fs = require('fs/promises');
async function writeReadFile() {
try {
await fs.writeFile('hello.txt', 'Hello with Promises!', 'utf8');
const content = await fs.readFile('hello.txt', 'utf8');
console.log('Content:', content);
} catch (err) {
console.error(err);
}
}
writeReadFile();
When dealing with very large files, loading the entire file into memory is inefficient. Instead, Node.js streams can read or write in chunks.
const fs = require('fs');
const readStream = fs.createReadStream('bigfile.txt', 'utf8');
readStream.on('data', (chunk) => {
console.log('Chunk:', chunk);
});
readStream.on('end', () => {
console.log('Finished reading file.');
});
const fs = require('fs');
const writeStream = fs.createWriteStream('output.txt');
writeStream.write('Line 1\n');
writeStream.write('Line 2\n');
writeStream.end('End of file\n');
You can directly pipe read streams to write streams.
const fs = require('fs');
const readStream = fs.createReadStream('input.txt');
const writeStream = fs.createWriteStream('copy.txt');
readStream.pipe(writeStream);
const fs = require('fs');
if (fs.existsSync('check.txt')) {
console.log('File exists');
} else {
console.log('File not found');
}
const fs = require('fs');
fs.unlink('delete_me.txt', (err) => {
if (err) {
console.error('Failed to delete file:', err);
} else {
console.log('File deleted');
}
});
const fs = require('fs');
fs.rename('oldname.txt', 'newname.txt', (err) => {
if (err) throw err;
console.log('File renamed');
});
const fs = require('fs');
fs.chmod('example.txt', 0o777, (err) => {
if (err) throw err;
console.log('Permissions changed');
});
Always check for errors when performing file system operations, especially asynchronous ones.
fs.readFile('nonexistent.txt', 'utf8', (err, data) => {
if (err) {
if (err.code === 'ENOENT') {
console.error('File does not exist');
} else {
console.error('An error occurred:', err);
}
return;
}
console.log(data);
});
const fs = require('fs');
function log(message) {
const timestamp = new Date().toISOString();
fs.appendFile('app.log', `${timestamp} - ${message}\n`, (err) => {
if (err) console.error('Failed to write to log');
});
}
log('Application started');
const fs = require('fs');
fs.readFile('config.json', 'utf8', (err, data) => {
if (err) {
console.error('Failed to load config');
return;
}
const config = JSON.parse(data);
console.log('App configuration loaded:', config);
});
Node.js provides a comprehensive and flexible set of tools to read, write, and manage files efficiently. Understanding the differences between synchronous and asynchronous methods, using streams for large files, leveraging promise-based APIs, and ensuring proper error handling are essential skills for any Node.js developer.
Whether you are developing command-line utilities, backend services, or real-time applications, mastering the fs module and its capabilities can significantly improve your productivity and code quality. Remember to always choose non-blocking methods for better performance and responsiveness, and embrace modern patterns like async/await to write cleaner, more maintainable code.
A function passed as an argument and executed later.
Runs multiple instances to utilize multi-core systems.
Reusable blocks of code, exported and imported using require() or import.
nextTick() executes before setImmediate() in the event loop.
Starts a server and listens on specified port.
Node Package Manager β installs, manages, and shares JavaScript packages.
A minimal and flexible web application framework for Node.js.
A stream handles reading or writing data continuously.
It processes asynchronous callbacks and non-blocking I/O operations efficiently.
Node.js is a JavaScript runtime built on Chrome's V8 engine for server-side scripting.
An object representing the eventual completion or failure of an asynchronous operation.
require is CommonJS; import is ES6 syntax (requires transpilation or newer versions).
Use module.exports or exports.functionName.
Variables stored outside the code for configuration, accessed using process.env.
MongoDB, often used with Mongoose for schema management.
Describes project details and manages dependencies and scripts.
Synchronous blocks execution; asynchronous runs in background without blocking.
Allows or restricts resources shared between different origins.
Use try-catch, error events, or middleware for error handling.
Provides file system-related operations like read, write, delete.
Using event-driven architecture and non-blocking I/O.
Functions in Express that execute during request-response cycle.
A set of routes or endpoints to interact with server logic or databases.
Yes, it's single-threaded but handles concurrency using the event loop and asynchronous callbacks.
Middleware to parse incoming request bodies, like JSON or form data.
Copyrights © 2024 letsupdateskills All rights reserved