[英]Merging Node.js streams
I have a bunch of files that I read, process and merge certain data from corresponding multiple stream into a single stream. 我有一堆文件,我从对应的多个流中读取,处理并将某些数据合并到单个流中。
Is there a more elegant solution than below (having a separate counter, calling combinedStream.end()
after all source streams emit end
): 有没有比下面更优雅的解决方案(有一个单独的计数器,在所有源流发出
end
之后调用combinedStream.end()
):
let combinedStream = ....;
let counter = 0;
filePaths.forEach(function(filePath) {
counter += 1;
const fileStream = fs.createReadStream(filePath);
const myStream = new MyStream(fileStream);
myStream.on('data', myStream.write);
myStream.on('end', function() {
counter -= 1;
if (counter === 0) {
combinedStream.end();
}
});
});
return combinedStream;
A cleaner approach could be the one used in that repo , even though it does nothing more than hiding your counter somewhere and let you deal with a more comfortable callbacks based model. 一种更清洁的方法可能是该回购中使用的方法,尽管它只不过是将计数器隐藏在某个地方并让您处理更舒适的基于回调的模型而已。
This way, your code will look like: 这样,您的代码将如下所示:
let sharedStream = ...
function onEachFilename(filename, callback) {
// here you can read from the stream and push the data on the shared one,
// then invoke the "internal" callback on the end event
}
function onEndAll() {
// here you can finalize and close the shared stream
}
forEach(filenames, onEachFilename, onEndAll);
Keep in mind that somewhere there is still a function that is in charge to count for you and invoke the onEnd
function once all the callback
functions have been invoked. 请记住,某个地方仍然有一个函数负责为您计数,并在所有
callback
函数都被调用后调用onEnd
函数。
You can just process files with a Transform stream then pipe to a PassThrough Stream. 您可以只使用Transform流处理文件,然后通过管道传递到PassThrough流。
Since, you are using let
, I guess you can use ES2015. 由于您正在使用
let
,所以我想您可以使用ES2015。
"use strict";
let fs=require('fs');
let filePaths=['./tmp/h.txt','./tmp/s.txt'];
let Stream = require('stream');
class StreamProcessor {
constructor() {
this.process_streams = [];
}
push (source_stream) {
// Create a new Transform Stream
let transform = new StreamTransformer();
// Register the finish event and pipe
transform.processed = transform.wait.call(transform);
source_stream.pipe(transform);
// push the stream to the internal array
this.process_streams.push(transform);
}
done (callback) {
let streams = this.process_streams;
// Wait for all Transform streams to finish processing
Promise.all(
streams.map(function(s) {return s.processed; })
)
.then ( function() {
let combined_stream=new Stream.PassThrough();
streams.forEach(function (stream) {
stream.pipe(combined_stream);
});
// Call the callback with stream
callback(null,combined_stream);
})
.catch(function (err) {
callback(err);
});
}
}
class StreamTransformer extends Stream.Transform {
constructor () {
// call super
super();
}
_transform(chunk,enc, transformed) {
// process files here
let data=chunk.toString();
data=data.substring(0,data.length-2);
this.push(data);
transformed();
}
_flush(flushed) {
// for additonal at end
this.push('\n');
flushed();
}
wait() {
// returns a promise that resolves, when all the data is processed;
let stream = this;
return new Promise(function(resolve,reject) {
stream.on('finish', function() {
resolve(true); });
stream.on('error', function(err) {
reject(err);
});
});
}
}
/// Now you can do..
let process_stream = new StreamProcessor();
filePaths.forEach(function (fpath) {
let fstream = fs.createReadStream(fpath);
process_stream.push(fstream);
});
process_stream.done( function
(err,combined_stream) {
// Consume the combines stream
combined_stream.pipe(process.stdout);
});
Test files contains 'hello' and 'stream' 测试文件包含“ hello”和“ stream”
// Outputs is
// hell
// stream
This can be improved further.. . 这可以进一步改善。 :/
:/
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.