Node.js File System: Complete Guide to the fs Module

Node.js File System: Complete Guide to the fs Module

이 글의 핵심

Master the Node.js fs API: promises, streams, directory walks, file watchers, safe writes, and pitfalls—aligned with production backend practice.

Introduction

What is the fs module?

fs is Node’s built-in API for files and directories.

Capabilities:

  • Read/write files
  • Create/remove directories
  • stat metadata
  • Watch for changes
  • Stream large data

Three styles: synchronous (*Sync), callback, and Promise (fs.promises) — prefer Promises with async/await for server code.


1. Reading files

Synchronous (blocking)

const fs = require('fs');

try {
    const data = fs.readFileSync('file.txt', 'utf8');
    console.log(data);
} catch (err) {
    console.error(err.message);
}

Avoid on request handlers—it blocks the event loop.

Callback

fs.readFile('file.txt', 'utf8', (err, data) => {
    if (err) return console.error(err.message);
    console.log(data);
});
const fs = require('fs').promises;

async function readFile() {
    try {
        const data = await fs.readFile('file.txt', 'utf8');
        console.log(data);
    } catch (err) {
        if (err.code === 'ENOENT') console.error('Missing file');
        else if (err.code === 'EACCES') console.error('Permission denied');
        else console.error(err.message);
    }
}

readFile();

Encoding and buffers

const text = await fs.readFile('text.txt', 'utf8');
const buf = await fs.readFile('image.png'); // Buffer
const asString = buf.toString('utf8');
const base64 = buf.toString('base64');

2. Writing files

const fs = require('fs').promises;

await fs.writeFile('out.txt', 'Hello, Node.js!', 'utf8');
await fs.appendFile('out.txt', '\nMore', 'utf8');

JSON helpers

async function readJSON(file) {
    try {
        const data = await fs.readFile(file, 'utf8');
        return JSON.parse(data);
    } catch (err) {
        if (err.code === 'ENOENT') return null;
        throw err;
    }
}

async function writeJSON(file, obj) {
    await fs.writeFile(file, JSON.stringify(obj, null, 2), 'utf8');
}

3. File metadata and operations

const stats = await fs.stat('file.txt');
console.log(stats.size, stats.isFile(), stats.isDirectory());

await fs.copyFile('a.txt', 'b.txt');
await fs.rename('b.txt', 'c.txt');
await fs.unlink('c.txt');
await fs.chmod('file.txt', 0o755); // Unix

Existence check

async function exists(file) {
    try {
        await fs.access(file);
        return true;
    } catch {
        return false;
    }
}

4. Directories

await fs.mkdir('nested/path', { recursive: true });

const names = await fs.readdir('.');
const entries = await fs.readdir('.', { withFileTypes: true });

await fs.rm('dir', { recursive: true, force: true });

Recursive directory walk and findJSFiles patterns follow the idioms below.


5. Streams

See async & streams post for createReadStream, pipe, zlib, and Transform—the same patterns apply here.


6. Watching files

fs.watch

const watcher = fs.watch('dir', { recursive: true }, (event, filename) => {
    console.log(event, filename);
});

chokidar

npm install chokidar
const chokidar = require('chokidar');
chokidar.watch('src/**/*.js').on('change', (path) => console.log('changed', path));

7. Practical examples

JSON read/write (full flow)

const fs = require('fs').promises;

async function readJSON(filename) {
    try {
        const data = await fs.readFile(filename, 'utf8');
        return JSON.parse(data);
    } catch (err) {
        if (err.code === 'ENOENT') return null;
        throw err;
    }
}

async function writeJSON(filename, data) {
    await fs.writeFile(filename, JSON.stringify(data, null, 2), 'utf8');
}

async function main() {
    const users = [
        { id: 1, name: 'Alice', age: 25 },
        { id: 2, name: 'Bob', age: 30 }
    ];
    await writeJSON('users.json', users);
    const loaded = await readJSON('users.json');
    loaded[0].age = 26;
    await writeJSON('users.json', loaded);
}

main().catch(console.error);

Recursive directory walk

const fs = require('fs').promises;
const path = require('path');

async function walk(directory, onFile) {
    const entries = await fs.readdir(directory, { withFileTypes: true });
    for (const entry of entries) {
        const full = path.join(directory, entry.name);
        if (entry.isDirectory()) await walk(full, onFile);
        else await onFile(full);
    }
}

async function findJsFiles(root) {
    const out = [];
    await walk(root, async (file) => {
        if (path.extname(file) === '.js') out.push(file);
    });
    return out;
}

Backup helper (simplified)

class BackupManager {
    constructor(sourceDir, backupDir) {
        this.sourceDir = sourceDir;
        this.backupDir = backupDir;
    }

    async backup() {
        const fs = require('fs').promises;
        const path = require('path');
        await fs.mkdir(this.backupDir, { recursive: true });
        const stamp = new Date().toISOString().replace(/:/g, '-');
        const destDir = path.join(this.backupDir, `backup-${stamp}`);
        await fs.mkdir(destDir);
        const files = await fs.readdir(this.sourceDir);
        for (const name of files) {
            const src = path.join(this.sourceDir, name);
            const stat = await fs.stat(src);
            if (stat.isFile()) {
                await fs.copyFile(src, path.join(destDir, name));
            }
        }
        return destDir;
    }
}

CSV via readline (sketch)

const fs = require('fs');
const readline = require('readline');

async function parseCsv(filename) {
    const stream = fs.createReadStream(filename);
    const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
    const rows = [];
    let headers = [];
    let first = true;
    for await (const line of rl) {
        if (first) {
            headers = line.split(',');
            first = false;
            continue;
        }
        const cols = line.split(',');
        const obj = {};
        headers.forEach((h, i) => {
            obj[h.trim()] = (cols[i] || '').trim();
        });
        rows.push(obj);
    }
    return rows;
}

8. Error codes

Handle ENOENT, EACCES, EISDIR, ENOTDIR, EEXIST as in the original tables.

Safe write pattern (write temp then rename)

Atomic-ish replace by writing to file.tmp then fs.rename to final path—see the safeFileOperation helper pattern below.


9. Performance

  • Tune highWaterMark on streams
  • Prefer Promise.all for independent reads
  • Stream line counts instead of loading multi-GB files fully

10. Common pitfalls

  • Use path.join(__dirname, 'file.txt') instead of fragile relative paths
  • Pass 'utf8' when you expect a string
  • Avoid readFileSync in Express handlers
  • Close streams or use stream.pipeline / util.promisify(pipeline)

Summary

StyleServer useCLI / boot
SyncAvoidOK
CallbackLegacyOK
fs.promisesPreferredPreferred

Next steps

  • Database integration
  • Testing

Resources