Node.js 文件系统进阶
章节介绍
在掌握了文件系统基础操作后,我们需要深入学习更高级的文件系统特性。本教程将详细介绍文件流、异步操作模式、路径处理等高级主题,帮助您处理大文件、优化性能、提高代码可维护性。
核心知识点
同步与异步操作对比
理解同步和异步操作的区别对于编写高效的 Node.js 应用至关重要。
同步 vs 异步操作:
同步操作:
┌─────────────┐
│ 主线程 │ → 读取文件 → 等待完成 → 继续执行
└─────────────┘
异步操作:
┌─────────────┐
│ 主线程 │ → 发起读取请求 → 继续执行其他任务
│ │ ↓
│ │ 读取完成回调 → 处理结果
└─────────────┘同步操作
const fs = require('fs');
const path = require('path');
// 同步读取文件
function readFileSyncExample() {
console.log('开始读取文件...');
try {
const filePath = path.join(__dirname, 'test.txt');
const content = fs.readFileSync(filePath, 'utf8');
console.log('文件内容:', content);
console.log('文件读取完成');
} catch (error) {
console.error('读取文件失败:', error.message);
}
console.log('继续执行其他操作');
}
readFileSyncExample();
// 同步写入文件
function writeFileSyncExample() {
console.log('开始写入文件...');
try {
const filePath = path.join(__dirname, 'output.txt');
const content = 'Hello, Node.js!';
fs.writeFileSync(filePath, content, 'utf8');
console.log('文件写入完成');
} catch (error) {
console.error('写入文件失败:', error.message);
}
console.log('继续执行其他操作');
}
writeFileSyncExample();异步操作
const fs = require('fs');
const path = require('path');
// 异步读取文件(回调方式)
function readFileCallbackExample() {
console.log('开始读取文件...');
const filePath = path.join(__dirname, 'test.txt');
fs.readFile(filePath, 'utf8', (error, content) => {
if (error) {
console.error('读取文件失败:', error.message);
return;
}
console.log('文件内容:', content);
console.log('文件读取完成');
});
console.log('继续执行其他操作(不等待文件读取完成)');
}
readFileCallbackExample();
// 异步写入文件(回调方式)
function writeFileCallbackExample() {
console.log('开始写入文件...');
const filePath = path.join(__dirname, 'output.txt');
const content = 'Hello, Node.js!';
fs.writeFile(filePath, content, 'utf8', (error) => {
if (error) {
console.error('写入文件失败:', error.message);
return;
}
console.log('文件写入完成');
});
console.log('继续执行其他操作(不等待文件写入完成)');
}
writeFileCallbackExample();Promise 方式
const fs = require('fs').promises;
const path = require('path');
// 异步读取文件(Promise 方式)
async function readFilePromiseExample() {
console.log('开始读取文件...');
try {
const filePath = path.join(__dirname, 'test.txt');
const content = await fs.readFile(filePath, 'utf8');
console.log('文件内容:', content);
console.log('文件读取完成');
} catch (error) {
console.error('读取文件失败:', error.message);
}
console.log('继续执行其他操作');
}
readFilePromiseExample();
// 异步写入文件(Promise 方式)
async function writeFilePromiseExample() {
console.log('开始写入文件...');
try {
const filePath = path.join(__dirname, 'output.txt');
const content = 'Hello, Node.js!';
await fs.writeFile(filePath, content, 'utf8');
console.log('文件写入完成');
} catch (error) {
console.error('写入文件失败:', error.message);
}
console.log('继续执行其他操作');
}
writeFilePromiseExample();文件流(Stream)
文件流是处理大文件的有效方式,可以避免内存溢出问题。
文件流工作原理:
┌─────────────┐
│ 源文件 │
└──────┬──────┘
│
↓
┌─────────────┐
│ 可读流 │ → 分块读取数据
└──────┬──────┘
│
↓
┌─────────────┐
│ 处理管道 │ → 转换/处理数据
└──────┬──────┘
│
↓
┌─────────────┐
│ 可写流 │ → 分块写入数据
└──────┬──────┘
│
↓
┌─────────────┐
│ 目标文件 │
└─────────────┘可读流
const fs = require('fs');
const path = require('path');
// 创建可读流
function createReadStreamExample() {
const filePath = path.join(__dirname, 'large-file.txt');
const readStream = fs.createReadStream(filePath, { encoding: 'utf8' });
let chunkCount = 0;
// 监听数据事件
readStream.on('data', (chunk) => {
chunkCount++;
console.log(`读取第 ${chunkCount} 块数据,长度:${chunk.length}`);
});
// 监听结束事件
readStream.on('end', () => {
console.log('文件读取完成,共读取', chunkCount, '块数据');
});
// 监听错误事件
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
});
}
createReadStreamExample();
// 使用可读流读取文件
function readStreamExample() {
const filePath = path.join(__dirname, 'test.txt');
const readStream = fs.createReadStream(filePath, { encoding: 'utf8' });
let content = '';
readStream.on('data', (chunk) => {
content += chunk;
});
readStream.on('end', () => {
console.log('文件内容:', content);
});
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
});
}
readStreamExample();可写流
const fs = require('fs');
const path = require('path');
// 创建可写流
function createWriteStreamExample() {
const filePath = path.join(__dirname, 'output.txt');
const writeStream = fs.createWriteStream(filePath, { encoding: 'utf8' });
// 写入数据
writeStream.write('第一行数据\n');
writeStream.write('第二行数据\n');
writeStream.write('第三行数据\n');
// 结束写入
writeStream.end();
// 监听完成事件
writeStream.on('finish', () => {
console.log('文件写入完成');
});
// 监听错误事件
writeStream.on('error', (error) => {
console.error('写入文件失败:', error.message);
});
}
createWriteStreamExample();
// 使用可写流写入文件
function writeStreamExample() {
const filePath = path.join(__dirname, 'output.txt');
const writeStream = fs.createWriteStream(filePath, { encoding: 'utf8' });
const data = [
'第一行数据',
'第二行数据',
'第三行数据'
];
data.forEach((line, index) => {
writeStream.write(`${line}\n`);
console.log(`写入第 ${index + 1} 行`);
});
writeStream.end();
writeStream.on('finish', () => {
console.log('所有数据写入完成');
});
writeStream.on('error', (error) => {
console.error('写入文件失败:', error.message);
});
}
writeStreamExample();管道操作
const fs = require('fs');
const path = require('path');
// 使用管道复制文件
function pipeExample() {
const sourcePath = path.join(__dirname, 'source.txt');
const destPath = path.join(__dirname, 'destination.txt');
const readStream = fs.createReadStream(sourcePath);
const writeStream = fs.createWriteStream(destPath);
readStream.pipe(writeStream);
writeStream.on('finish', () => {
console.log('文件复制完成');
});
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
});
writeStream.on('error', (error) => {
console.error('写入文件失败:', error.message);
});
}
pipeExample();
// 使用管道处理大文件
function processLargeFile() {
const sourcePath = path.join(__dirname, 'large-file.txt');
const destPath = path.join(__dirname, 'processed-file.txt');
const readStream = fs.createReadStream(sourcePath, { encoding: 'utf8' });
const writeStream = fs.createWriteStream(destPath, { encoding: 'utf8' });
let lineCount = 0;
readStream.on('data', (chunk) => {
const lines = chunk.split('\n');
lines.forEach(line => {
if (line.trim()) {
lineCount++;
writeStream.write(`${lineCount}: ${line}\n`);
}
});
});
readStream.on('end', () => {
writeStream.end();
console.log(`处理完成,共 ${lineCount} 行`);
});
writeStream.on('finish', () => {
console.log('文件处理完成');
});
}
processLargeFile();转换流
转换流用于在读取和写入之间转换数据。
const { Transform } = require('stream');
const fs = require('fs');
const path = require('path');
// 创建转换流
function createTransformStream() {
const transformStream = new Transform({
transform(chunk, encoding, callback) {
// 转换数据(这里将所有文本转换为大写)
const transformed = chunk.toString().toUpperCase();
this.push(transformed);
callback();
}
});
return transformStream;
}
// 使用转换流
function useTransformStream() {
const sourcePath = path.join(__dirname, 'source.txt');
const destPath = path.join(__dirname, 'uppercase.txt');
const readStream = fs.createReadStream(sourcePath, { encoding: 'utf8' });
const writeStream = fs.createWriteStream(destPath, { encoding: 'utf8' });
const transformStream = createTransformStream();
readStream
.pipe(transformStream)
.pipe(writeStream);
writeStream.on('finish', () => {
console.log('文件转换完成');
});
}
useTransformStream();
// 创建行转换流
function createLineTransformStream(processor) {
let buffer = '';
return new Transform({
transform(chunk, encoding, callback) {
buffer += chunk.toString();
const lines = buffer.split('\n');
buffer = lines.pop(); // 保留不完整的行
lines.forEach(line => {
if (line.trim()) {
const processed = processor(line);
this.push(processed + '\n');
}
});
callback();
},
flush(callback) {
if (buffer.trim()) {
const processed = processor(buffer);
this.push(processed + '\n');
}
callback();
}
});
}
// 使用行转换流
function useLineTransformStream() {
const sourcePath = path.join(__dirname, 'source.txt');
const destPath = path.join(__dirname, 'processed.txt');
const readStream = fs.createReadStream(sourcePath, { encoding: 'utf8' });
const writeStream = fs.createWriteStream(destPath, { encoding: 'utf8' });
const lineTransformStream = createLineTransformStream((line) => {
return `[处理] ${line}`;
});
readStream
.pipe(lineTransformStream)
.pipe(writeStream);
writeStream.on('finish', () => {
console.log('行处理完成');
});
}
useLineTransformStream();高级路径处理
const path = require('path');
const fs = require('fs');
// 路径规范化
function normalizePath(inputPath) {
const normalized = path.normalize(inputPath);
console.log('规范化路径:', normalized);
return normalized;
}
normalizePath('./folder/../folder/./file.txt');
normalizePath('/path/to/../to/./file.txt');
// 路径解析
function parsePath(inputPath) {
const parsed = path.parse(inputPath);
console.log('路径解析:', parsed);
return parsed;
}
parsePath('/path/to/file.txt');
parsePath('C:\\Users\\user\\file.txt');
// 相对路径计算
function calculateRelativePath(from, to) {
const relative = path.relative(from, to);
console.log('相对路径:', relative);
return relative;
}
calculateRelativePath('/path/to/file1.txt', '/path/to/file2.txt');
calculateRelativePath('/path/to/subdir/file.txt', '/path/to/file.txt');
// 路径解析为绝对路径
function resolvePath(...paths) {
const resolved = path.resolve(...paths);
console.log('绝对路径:', resolved);
return resolved;
}
resolvePath('folder', 'file.txt');
resolvePath('/path/to', '../folder', 'file.txt');
// 路径拼接
function joinPath(...paths) {
const joined = path.join(...paths);
console.log('拼接路径:', joined);
return joined;
}
joinPath('folder', 'subfolder', 'file.txt');
joinPath('/path/to', '../folder', 'file.txt');
// 路径格式转换
function formatPath(inputPath) {
const ext = path.extname(inputPath);
const name = path.basename(inputPath, ext);
const dir = path.dirname(inputPath);
console.log('扩展名:', ext);
console.log('文件名:', name);
console.log('目录:', dir);
return { ext, name, dir };
}
formatPath('/path/to/file.txt');
formatPath('C:\\Users\\user\\document.pdf');
// 路径安全检查
function isPathSafe(inputPath, basePath) {
const resolved = path.resolve(basePath, inputPath);
const normalized = path.normalize(resolved);
const isSafe = normalized.startsWith(path.resolve(basePath));
console.log('路径安全:', isSafe);
return isSafe;
}
isPathSafe('../etc/passwd', '/safe/directory');
isPathSafe('subfolder/file.txt', '/safe/directory');实用案例分析
案例 1:大文件处理工具
创建一个能够高效处理大文件的工具。
// large-file-processor.js
const fs = require('fs');
const path = require('path');
const { Transform } = require('stream');
class LargeFileProcessor {
constructor(options = {}) {
this.chunkSize = options.chunkSize || 64 * 1024; // 64KB
this.encoding = options.encoding || 'utf8';
}
// 读取大文件
readLargeFile(filePath, onData, onComplete) {
const readStream = fs.createReadStream(filePath, {
encoding: this.encoding,
highWaterMark: this.chunkSize
});
let chunkCount = 0;
let totalBytes = 0;
readStream.on('data', (chunk) => {
chunkCount++;
totalBytes += chunk.length;
onData(chunk, chunkCount, totalBytes);
});
readStream.on('end', () => {
onComplete(chunkCount, totalBytes);
});
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
throw error;
});
}
// 写入大文件
writeLargeFile(filePath, dataGenerator, onComplete) {
const writeStream = fs.createWriteStream(filePath, {
encoding: this.encoding
});
let chunkCount = 0;
let totalBytes = 0;
function writeNext() {
const chunk = dataGenerator();
if (!chunk) {
writeStream.end();
return;
}
const canContinue = writeStream.write(chunk);
chunkCount++;
totalBytes += chunk.length;
if (canContinue) {
writeNext();
} else {
writeStream.once('drain', writeNext);
}
}
writeStream.on('finish', () => {
onComplete(chunkCount, totalBytes);
});
writeStream.on('error', (error) => {
console.error('写入文件失败:', error.message);
throw error;
});
writeNext();
}
// 复制大文件
copyLargeFile(sourcePath, destPath, onProgress, onComplete) {
const readStream = fs.createReadStream(sourcePath);
const writeStream = fs.createWriteStream(destPath);
let copiedBytes = 0;
const totalBytes = fs.statSync(sourcePath).size;
readStream.on('data', (chunk) => {
copiedBytes += chunk.length;
const progress = (copiedBytes / totalBytes) * 100;
onProgress(progress, copiedBytes, totalBytes);
});
readStream.pipe(writeStream);
writeStream.on('finish', () => {
onComplete(copiedBytes);
});
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
throw error;
});
writeStream.on('error', (error) => {
console.error('写入文件失败:', error.message);
throw error;
});
}
// 处理大文件(逐行)
processFileByLine(filePath, lineProcessor, onComplete) {
const readStream = fs.createReadStream(filePath, { encoding: this.encoding });
let lineCount = 0;
let buffer = '';
readStream.on('data', (chunk) => {
buffer += chunk;
const lines = buffer.split('\n');
buffer = lines.pop();
lines.forEach(line => {
if (line.trim()) {
lineCount++;
lineProcessor(line, lineCount);
}
});
});
readStream.on('end', () => {
if (buffer.trim()) {
lineCount++;
lineProcessor(buffer, lineCount);
}
onComplete(lineCount);
});
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
throw error;
});
}
// 创建转换流
createTransformStream(transformer) {
return new Transform({
transform(chunk, encoding, callback) {
const transformed = transformer(chunk);
this.push(transformed);
callback();
}
});
}
// 处理大文件(使用转换流)
processLargeFileWithTransform(sourcePath, destPath, transformer, onComplete) {
const readStream = fs.createReadStream(sourcePath, { encoding: this.encoding });
const writeStream = fs.createWriteStream(destPath, { encoding: this.encoding });
const transformStream = this.createTransformStream(transformer);
readStream.pipe(transformStream).pipe(writeStream);
writeStream.on('finish', () => {
onComplete();
});
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
throw error;
});
writeStream.on('error', (error) => {
console.error('写入文件失败:', error.message);
throw error;
});
}
}
// 使用示例
const processor = new LargeFileProcessor();
// 读取大文件
console.log('读取大文件:');
processor.readLargeFile(
path.join(__dirname, 'large-file.txt'),
(chunk, chunkCount, totalBytes) => {
console.log(`读取第 ${chunkCount} 块,累计 ${totalBytes} 字节`);
},
(chunkCount, totalBytes) => {
console.log(`读取完成,共 ${chunkCount} 块,${totalBytes} 字节`);
}
);
// 复制大文件
console.log('\n复制大文件:');
processor.copyLargeFile(
path.join(__dirname, 'source.txt'),
path.join(__dirname, 'destination.txt'),
(progress, copiedBytes, totalBytes) => {
console.log(`进度:${progress.toFixed(2)}%,已复制 ${copiedBytes}/${totalBytes} 字节`);
},
(copiedBytes) => {
console.log(`复制完成,共 ${copiedBytes} 字节`);
}
);
// 逐行处理文件
console.log('\n逐行处理文件:');
processor.processFileByLine(
path.join(__dirname, 'data.txt'),
(line, lineCount) => {
console.log(`第 ${lineCount} 行:${line}`);
},
(lineCount) => {
console.log(`处理完成,共 ${lineCount} 行`);
}
);
// 使用转换流处理文件
console.log('\n使用转换流处理文件:');
processor.processLargeFileWithTransform(
path.join(__dirname, 'source.txt'),
path.join(__dirname, 'uppercase.txt'),
(chunk) => chunk.toString().toUpperCase(),
() => {
console.log('转换完成');
}
);案例 2:文件压缩工具
创建一个能够压缩和解压缩文件的工具。
// file-compressor.js
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
const { promisify } = require('util');
const pipeline = promisify(require('stream').pipeline);
class FileCompressor {
constructor(options = {}) {
this.compressionLevel = options.compressionLevel || zlib.constants.Z_BEST_COMPRESSION;
}
// 压缩文件
async compressFile(sourcePath, destPath) {
const readStream = fs.createReadStream(sourcePath);
const writeStream = fs.createWriteStream(destPath);
const gzipStream = zlib.createGzip({
level: this.compressionLevel
});
try {
await pipeline(readStream, gzipStream, writeStream);
const sourceSize = fs.statSync(sourcePath).size;
const destSize = fs.statSync(destPath).size;
const ratio = ((1 - destSize / sourceSize) * 100).toFixed(2);
console.log('压缩完成');
console.log(`原始大小:${sourceSize} 字节`);
console.log(`压缩后大小:${destSize} 字节`);
console.log(`压缩率:${ratio}%`);
return {
sourceSize,
destSize,
ratio
};
} catch (error) {
console.error('压缩失败:', error.message);
throw error;
}
}
// 解压缩文件
async decompressFile(sourcePath, destPath) {
const readStream = fs.createReadStream(sourcePath);
const writeStream = fs.createWriteStream(destPath);
const gunzipStream = zlib.createGunzip();
try {
await pipeline(readStream, gunzipStream, writeStream);
const sourceSize = fs.statSync(sourcePath).size;
const destSize = fs.statSync(destPath).size;
console.log('解压缩完成');
console.log(`压缩文件大小:${sourceSize} 字节`);
console.log(`解压缩后大小:${destSize} 字节`);
return {
sourceSize,
destSize
};
} catch (error) {
console.error('解压缩失败:', error.message);
throw error;
}
}
// 批量压缩文件
async compressFiles(filePairs, onProgress) {
const results = [];
for (let i = 0; i < filePairs.length; i++) {
const { source, dest } = filePairs[i];
try {
const result = await this.compressFile(source, dest);
results.push({ source, dest, success: true, ...result });
} catch (error) {
results.push({ source, dest, success: false, error: error.message });
}
if (onProgress) {
onProgress(i + 1, filePairs.length);
}
}
return results;
}
// 批量解压缩文件
async decompressFiles(filePairs, onProgress) {
const results = [];
for (let i = 0; i < filePairs.length; i++) {
const { source, dest } = filePairs[i];
try {
const result = await this.decompressFile(source, dest);
results.push({ source, dest, success: true, ...result });
} catch (error) {
results.push({ source, dest, success: false, error: error.message });
}
if (onProgress) {
onProgress(i + 1, filePairs.length);
}
}
return results;
}
}
// 使用示例
const compressor = new FileCompressor();
// 压缩单个文件
console.log('压缩文件:');
compressor.compressFile(
path.join(__dirname, 'large-file.txt'),
path.join(__dirname, 'large-file.txt.gz')
).then(result => {
console.log('压缩结果:', result);
});
// 解压缩文件
console.log('\n解压缩文件:');
compressor.decompressFile(
path.join(__dirname, 'large-file.txt.gz'),
path.join(__dirname, 'large-file-decompressed.txt')
).then(result => {
console.log('解压缩结果:', result);
});
// 批量压缩文件
console.log('\n批量压缩文件:');
const filesToCompress = [
{ source: path.join(__dirname, 'file1.txt'), dest: path.join(__dirname, 'file1.txt.gz') },
{ source: path.join(__dirname, 'file2.txt'), dest: path.join(__dirname, 'file2.txt.gz') },
{ source: path.join(__dirname, 'file3.txt'), dest: path.join(__dirname, 'file3.txt.gz') }
];
compressor.compressFiles(filesToCompress, (current, total) => {
console.log(`进度:${current}/${total}`);
}).then(results => {
console.log('批量压缩完成:');
results.forEach(result => {
if (result.success) {
console.log(` ${result.source} -> ${result.dest} (压缩率:${result.ratio}%)`);
} else {
console.log(` ${result.source} -> ${result.dest} (失败:${result.error})`);
}
});
});案例 3:文件监控工具
创建一个能够监控文件系统变化的工具。
// file-monitor.js
const fs = require('fs');
const path = require('path');
class FileMonitor {
constructor(options = {}) {
this.watchPath = options.watchPath || process.cwd();
this.recursive = options.recursive !== false;
this.watchers = new Map();
this.callbacks = new Map();
}
// 开始监控
start() {
this.watchDirectory(this.watchPath);
console.log(`开始监控:${this.watchPath}`);
}
// 监控目录
watchDirectory(dirPath) {
if (this.watchers.has(dirPath)) {
return;
}
const watcher = fs.watch(dirPath, { recursive: this.recursive }, (eventType, filename) => {
if (filename) {
const filePath = path.join(dirPath, filename);
this.handleEvent(eventType, filePath, filename);
}
});
this.watchers.set(dirPath, watcher);
}
// 处理文件事件
handleEvent(eventType, filePath, filename) {
const stats = this.getStats(filePath);
if (!stats) {
return;
}
const event = {
type: eventType,
path: filePath,
name: filename,
isFile: stats.isFile(),
isDirectory: stats.isDirectory(),
size: stats.size,
modified: stats.mtime
};
this.triggerCallbacks(event);
}
// 获取文件信息
getStats(filePath) {
try {
return fs.statSync(filePath);
} catch (error) {
return null;
}
}
// 注册回调函数
on(eventType, callback) {
if (!this.callbacks.has(eventType)) {
this.callbacks.set(eventType, []);
}
this.callbacks.get(eventType).push(callback);
}
// 触发回调函数
triggerCallbacks(event) {
const callbacks = this.callbacks.get(event.type) || [];
callbacks.forEach(callback => {
try {
callback(event);
} catch (error) {
console.error('回调函数执行失败:', error.message);
}
});
}
// 停止监控
stop() {
this.watchers.forEach((watcher, path) => {
watcher.close();
console.log(`停止监控:${path}`);
});
this.watchers.clear();
}
}
// 使用示例
const monitor = new FileMonitor({
watchPath: __dirname,
recursive: true
});
// 监听文件创建
monitor.on('rename', (event) => {
if (fs.existsSync(event.path)) {
console.log(`文件创建:${event.name}`);
} else {
console.log(`文件删除:${event.name}`);
}
});
// 监听文件修改
monitor.on('change', (event) => {
console.log(`文件修改:${event.name} (${event.size} 字节)`);
});
// 开始监控
monitor.start();
// 10 秒后停止监控
setTimeout(() => {
monitor.stop();
}, 10000);代码示例
示例 1:文件分割工具
// file-splitter.js
const fs = require('fs');
const path = require('path');
function splitFile(filePath, chunkSize, outputDir) {
const stats = fs.statSync(filePath);
const fileSize = stats.size;
const totalChunks = Math.ceil(fileSize / chunkSize);
console.log(`文件大小:${fileSize} 字节`);
console.log(`分块大小:${chunkSize} 字节`);
console.log(`总分块数:${totalChunks}`);
const readStream = fs.createReadStream(filePath);
let chunkIndex = 0;
let currentChunkSize = 0;
let writeStream = null;
function createWriteStream(index) {
const chunkPath = path.join(outputDir, `chunk_${index.toString().padStart(4, '0')}.bin`);
return fs.createWriteStream(chunkPath);
}
readStream.on('data', (chunk) => {
if (!writeStream) {
writeStream = createWriteStream(chunkIndex);
}
writeStream.write(chunk);
currentChunkSize += chunk.length;
if (currentChunkSize >= chunkSize) {
writeStream.end();
console.log(`分块 ${chunkIndex + 1}/${totalChunks} 完成`);
chunkIndex++;
currentChunkSize = 0;
writeStream = null;
}
});
readStream.on('end', () => {
if (writeStream) {
writeStream.end();
console.log(`分块 ${chunkIndex + 1}/${totalChunks} 完成`);
}
console.log('文件分割完成');
});
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
});
}
// 使用示例
const filePath = path.join(__dirname, 'large-file.txt');
const chunkSize = 1024 * 1024; // 1MB
const outputDir = path.join(__dirname, 'chunks');
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, { recursive: true });
}
splitFile(filePath, chunkSize, outputDir);示例 2:文件合并工具
// file-merger.js
const fs = require('fs');
const path = require('path');
function mergeFiles(chunkDir, outputFilePath) {
const chunks = fs.readdirSync(chunkDir)
.filter(file => file.startsWith('chunk_') && file.endsWith('.bin'))
.sort();
console.log(`找到 ${chunks.length} 个分块`);
const writeStream = fs.createWriteStream(outputFilePath);
let chunkIndex = 0;
function readNextChunk() {
if (chunkIndex >= chunks.length) {
writeStream.end();
console.log('文件合并完成');
return;
}
const chunkPath = path.join(chunkDir, chunks[chunkIndex]);
const readStream = fs.createReadStream(chunkPath);
readStream.on('data', (chunk) => {
writeStream.write(chunk);
});
readStream.on('end', () => {
console.log(`合并分块 ${chunkIndex + 1}/${chunks.length}`);
chunkIndex++;
readNextChunk();
});
readStream.on('error', (error) => {
console.error(`读取分块失败:${error.message}`);
});
}
writeStream.on('error', (error) => {
console.error('写入文件失败:', error.message);
});
readNextChunk();
}
// 使用示例
const chunkDir = path.join(__dirname, 'chunks');
const outputFilePath = path.join(__dirname, 'merged-file.txt');
mergeFiles(chunkDir, outputFilePath);示例 3:文件加密工具
// file-encryptor.js
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
function encryptFile(sourcePath, destPath, password, algorithm = 'aes-256-cbc') {
const key = crypto.scryptSync(password, 'salt', 32);
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, key, iv);
const readStream = fs.createReadStream(sourcePath);
const writeStream = fs.createWriteStream(destPath);
// 写入 IV
writeStream.write(iv);
readStream.pipe(cipher).pipe(writeStream);
writeStream.on('finish', () => {
console.log('文件加密完成');
});
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
});
writeStream.on('error', (error) => {
console.error('写入文件失败:', error.message);
});
}
function decryptFile(sourcePath, destPath, password, algorithm = 'aes-256-cbc') {
const key = crypto.scryptSync(password, 'salt', 32);
const readStream = fs.createReadStream(sourcePath);
const writeStream = fs.createWriteStream(destPath);
let iv = null;
let decipher = null;
readStream.on('data', (chunk) => {
if (!iv) {
iv = chunk.slice(0, 16);
const encryptedData = chunk.slice(16);
decipher = crypto.createDecipheriv(algorithm, key, iv);
writeStream.write(decipher.update(encryptedData));
} else {
writeStream.write(decipher.update(chunk));
}
});
readStream.on('end', () => {
if (decipher) {
writeStream.write(decipher.final());
}
writeStream.end();
console.log('文件解密完成');
});
readStream.on('error', (error) => {
console.error('读取文件失败:', error.message);
});
writeStream.on('error', (error) => {
console.error('写入文件失败:', error.message);
});
}
// 使用示例
const sourceFile = path.join(__dirname, 'sensitive.txt');
const encryptedFile = path.join(__dirname, 'sensitive.enc');
const decryptedFile = path.join(__dirname, 'sensitive-decrypted.txt');
const password = 'my-secret-password';
// 加密文件
console.log('加密文件:');
encryptFile(sourceFile, encryptedFile, password);
// 解密文件
console.log('\n解密文件:');
decryptFile(encryptedFile, decryptedFile, password);实现技巧与注意事项
文件流使用建议
- 使用流处理大文件:避免一次性加载大文件到内存
- 正确处理错误:监听 error 事件
- 管理资源:确保流正确关闭
- 使用管道:简化流之间的连接
异步操作最佳实践
- 优先使用异步操作:避免阻塞事件循环
- 使用 Promise/async-await:简化异步代码
- 正确处理错误:使用 try-catch 或 .catch()
- 避免回调地狱:使用 Promise 或 async-await
路径处理注意事项
- 使用 path 模块:避免手动拼接路径
- 验证路径安全性:防止路径遍历攻击
- 处理跨平台路径:注意不同操作系统的路径分隔符
- 规范化路径:使用 path.normalize()
常见问题与解决方案
问题 1:大文件内存溢出
// 问题代码:一次性读取大文件
const content = fs.readFileSync('large-file.txt'); // 可能导致内存溢出
// 解决方案:使用流处理
const readStream = fs.createReadStream('large-file.txt');
readStream.on('data', (chunk) => {
console.log('读取数据块,长度:', chunk.length);
});问题 2:异步操作顺序问题
// 问题代码:异步操作顺序不确定
fs.readFile('file1.txt', (err, data1) => {
console.log('File 1');
});
fs.readFile('file2.txt', (err, data2) => {
console.log('File 2');
});
// 解决方案:使用 Promise 或 async/await
async function readFiles() {
const data1 = await fs.promises.readFile('file1.txt');
console.log('File 1');
const data2 = await fs.promises.readFile('file2.txt');
console.log('File 2');
}
readFiles();问题 3:路径安全问题
// 问题代码:未验证用户输入的路径
const userPath = req.body.path;
const filePath = path.join(baseDir, userPath); // 可能导致路径遍历攻击
// 解决方案:验证路径安全性
const userPath = req.body.path;
const resolvedPath = path.resolve(baseDir, userPath);
const normalizedPath = path.normalize(resolvedPath);
if (!normalizedPath.startsWith(path.resolve(baseDir))) {
throw new Error('非法路径');
}
const filePath = normalizedPath;问题 4:流未正确关闭
// 问题代码:流可能未正确关闭
const readStream = fs.createReadStream('file.txt');
const writeStream = fs.createWriteStream('output.txt');
readStream.pipe(writeStream);
// 如果发生错误,流可能未正确关闭
// 解决方案:正确处理错误和关闭流
const readStream = fs.createReadStream('file.txt');
const writeStream = fs.createWriteStream('output.txt');
readStream.on('error', (error) => {
console.error('读取错误:', error);
readStream.destroy();
writeStream.destroy();
});
writeStream.on('error', (error) => {
console.error('写入错误:', error);
readStream.destroy();
writeStream.destroy();
});
writeStream.on('finish', () => {
console.log('写入完成');
});
readStream.pipe(writeStream);总结
本教程详细介绍了 Node.js 文件系统的高级特性,包括同步与异步操作、文件流、路径处理等重要内容。掌握这些高级特性对于处理大文件、优化性能、提高代码可维护性至关重要。
通过本集的学习,您应该能够:
- 理解同步和异步操作的区别和适用场景
- 使用文件流处理大文件
- 创建和使用转换流
- 掌握高级路径处理技巧
- 实现文件压缩、加密等高级功能
- 避免常见的文件系统操作错误
在下一集中,我们将学习 Node.js 的路径与 URL 模块,这是处理路径和 URL 的重要工具。继续加油,您的 Node.js 技能正在不断提升!