如何在流中使用ES8 async/await

How to use ES8 async/await with streams?

本文关键字:ES8 async await      更新时间:2023-09-26

Inhttps://stackoverflow.com/a/18658613/779159是如何使用内置的加密库和流计算文件的md5的示例。

var fs = require('fs');
var crypto = require('crypto');
// the file you want to get the hash    
var fd = fs.createReadStream('/some/file/name.txt');
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
fd.on('end', function() {
    hash.end();
    console.log(hash.read()); // the desired sha1sum
});
// read all file and pipe it (write it) to the hash object
fd.pipe(hash);

但是,是否可以将其转换为使用ES8 async/await,而不是使用如上所述的回调,同时仍然保持使用流的效率?

await关键字仅适用于promise,而不适用于流。有一些想法可以制作一个额外的类似流的数据类型,它将获得自己的语法,,但这些想法都是非常实验性的,我不会详细介绍

无论如何,您的回调只是在等待流的结束,这非常适合承诺。你只需要包装流:

var fd = fs.createReadStream('/some/file/name.txt');
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
// read all file and pipe it (write it) to the hash object
fd.pipe(hash);
var end = new Promise(function(resolve, reject) {
    hash.on('end', () => resolve(hash.read()));
    fd.on('error', reject); // or something like that. might need to close `hash`
});

在流/承诺模块的nodejs-pipeline的最新版本中,还有一个助手函数可以做到这一点:

import { pipeline } from 'node:stream/promises';
const fd = fs.createReadStream('/some/file/name.txt');
const hash = crypto.createHash('sha1');
hash.setEncoding('hex');
// read all file and pipe it (write it) to the hash object
const end = pipeline(fd, hash);

现在你可以等待这个承诺:

(async function() {
    let sha1sum = await end;
    console.log(sha1sum);
}());

如果您使用的是节点版本>=v10.0.0,那么您可以使用stream.pipeline和util.prostify.

const fs = require('fs');
const crypto = require('crypto');
const util = require('util');
const stream = require('stream');
const pipeline = util.promisify(stream.pipeline);
const hash = crypto.createHash('sha1');
hash.setEncoding('hex');
async function run() {
  await pipeline(
    fs.createReadStream('/some/file/name.txt'),
    hash
  );
  console.log('Pipeline succeeded');
}
run().catch(console.error);

节点V15现在在流/promise中有一个promisefiy管道。这是最干净、最正式的方式。

const { pipeline } = require('stream/promises');
async function run() {
  await pipeline(
    fs.createReadStream('archive.tar'),
    zlib.createGzip(),
    fs.createWriteStream('archive.tar.gz')
  );
  console.log('Pipeline succeeded.');
}
run().catch(console.error);

我们都应该感谢它在这里做了这么多工作:

  • 捕获所有流中的错误
  • 当出现错误时销毁未完成的流
  • 仅当最后一个可写流完成时返回

这个管道是Node.JS最强大的功能之一。使其完全异步并不容易。现在我们有了。

类似的东西可以工作:

for (var res of fetchResponses){ //node-fetch package responses
    const dest = fs.createWriteStream(filePath,{flags:'a'});
    totalBytes += Number(res.headers.get('content-length'));
    await new Promise((resolve, reject) => {
        res.body.pipe(dest);
        res.body.on("error", (err) => {
            reject(err);
        });
        dest.on("finish", function() {
            resolve();
        });
    });         
}

2021更新:

Node文档中的新示例:

async function print(readable) {
  readable.setEncoding('utf8');
  let data = '';
  for await (const chunk of readable) {
    data += chunk;
  }
  console.log(data);
}

参见https://nodejs.org/api/stream.html#stream_readable_symbol_asynciterator

我相信它会对某人有所帮助:

async function readFile(filename) {
    let records = []
    return new Promise(resolve => {
        fs.createReadStream(filename)
            .on("data", (data) => {
                records.push(data);
            })
            .on("end", () => {
                resolve(records)
            });
    })
}

我会发表评论,但没有足够的声誉。

提醒:如果您的应用程序正在四处传递流并执行async/await,请在等待之前非常小心地连接所有管道。你最终可能会得到不包含你认为他们做了什么的流。以下是的最小示例

const { PassThrough } = require('stream');
async function main() {
    const initialStream = new PassThrough();
    const otherStream = new PassThrough();
    const data = [];
    otherStream.on('data', dat => data.push(dat));
    const resultOtherStreamPromise = new Promise(resolve => otherStream.on('end', () => { resolve(Buffer.concat(data)) }));
    const yetAnotherStream = new PassThrough();
    const data2 = [];
    yetAnotherStream.on('data', dat => data2.push(dat));
    const resultYetAnotherStreamPromise = new Promise(resolve => yetAnotherStream.on('end', () => { resolve(Buffer.concat(data2)) }));
    initialStream.pipe(otherStream);
    initialStream.write('some ');
    await Promise.resolve(); // Completely unrelated await
    initialStream.pipe(yetAnotherStream);
    initialStream.end('data');
    const [resultOtherStream, resultYetAnotherStream] = await Promise.all([
        resultOtherStreamPromise,
        resultYetAnotherStreamPromise,
    ]);
    console.log('other stream:', resultOtherStream.toString()); // other stream: some data
    console.log('yet another stream:', resultYetAnotherStream.toString()); // yet another stream: data
}
main();