Handle large file reading and writing flow stream
- fs.createReadStream
createReadStream yes fs A way to read the stream in a module
createReadStream Read the file , There are two parameters
The first parameter , The path to read the file
The second parameter highWaterMark The highest water level , By default, the maximum number of reads is 64K
The read class contents are all buffer type
The result returned is an instance of a readable stream , It's a non fluid model --- We're going to end up in a flow mode
First we need to define the variables of our read stream , Suppose we have a 1.txt file , We're going to read 1.txt The class inside
Let's set the highest water level first, every time we read 1k
let rs=fs.createReadStream('./1.txt',{highWaterMark:1})
our rs There are two event methods bound inside data and end
- fs.createWriteStream
const fs = require('fs');
const path = require('path');
let readPath = path.join(__dirname, `./dist/my.txt`);
let copyPath = path.join(__dirname, './dist/my.txt');
// Realize every read file real-time write New buffer Save to array And turn it into buffer
save(`--: The first 2 Time to join :--`);
async function save(value) {
// Read the file
const localBufArr = await readFileStream(readPath);
let totalLength = 0;
// Add a new entry buffer
localBufArr.push( new Buffer(value,['utf8']));
localBufArr.map(v => {
totalLength += v.length
})
// buffer Merge
let buf = Buffer.concat(localBufArr, totalLength); // buffer Merge Will array buffer Turn into the whole thing buffer
console.log('buf',buf);
// Create a writable stream
let firstCreate = fs.createWriteStream(copyPath);
// write in buffer
firstCreate.write(buf);
}
// Reading documents Using stream
function readFileStream(spath) {
return new Promise((resolve, reject) => {
// Judge whether the file exists
if (fs.existsSync(spath)) {
let readStream = fs.createReadStream(spath);
let arr = [];
let startTime = Date.now();
// It's constantly triggering inside rs.emit('data', data );data Can't change , When the hold mode is turned on , The data will go crazy data event
readStream.on('data', function (chunk) { //chunk yes buffer type
arr.push(chunk)
})
// Finished reading listening file , Will automatically trigger once end event , It doesn't trigger until it's finished reading
readStream.on('end', function (chunk) {
let useTime = Date.now() - startTime;
console.log(" To read a document " + (useTime/1000) +"s")
resolve(arr)
})
// Monitoring error
readStream.on('error', function (err) {
console.log(err);
})
} else {
reject(" There's no change in the file ")
}
})
}