最新消息: 电脑我帮您提供丰富的电脑知识,编程学习,软件下载,win7系统下载。

如何将多个ReadableStream传递给单个WriteStream?

IT培训 admin 3浏览 0评论

如何将多个ReadableStream传递给单个WriteStream?

我正在处理防火墙限制,我一次只能发10MB。为了处理更大的上传,我想使用类似,将多个块写入磁盘,并在最后重新组合它们。

我现在只是在编写测​​试,但是我的实现中的某些内容是错误的。

首先,我将文件拆分为:

    const splitFile = async () => {
      const chunkSize = 1024 * 1024;
      const photo = fs.createReadStream(path.resolve(FIXTURES, 'hello-tron.jpg'));

      // Write to 2 files
      photo.on('readable', function() {
        const file1 = path.resolve(TEMP, 'chunk.jpg.1');
        const file2 = path.resolve(TEMP, 'chunk.jpg.2');
        let data;
        while (data = this.read(chunkSize)) {
          if (!fs.existsSync(file1)) {
            const output1 = fs.createWriteStream(file1);
            output1.write(data);
            output1.close();
            return;
          }
          const output2 = fs.createWriteStream(file2);
          output2.write(data);
          if (data === null) {
            output2.close();
          }
        }
      });

      return new Promise(resolve => {
        photo.on('end', resolve);
      });
    };

然后我像这样重新组装:

const recombine = async () => {
  const output = fs.createWriteStream(path.resolve(TEMP, 'recombined.jpg'));
  const file1 = path.resolve(TEMP, 'chunk.jpg.1');
  const file2 = path.resolve(TEMP, 'chunk.jpg.2');
  return new Promise(resolve => {
    const stream1 = fs.createReadStream(file1);
    const stream2 = fs.createReadStream(file2);

    const recombinator = new Recombinator({
      readables: [stream1, stream2]
    });

    stream1.on('readable', () => {
      stream2.on('readable', () => {
        recombinator.pipe(output);
      });
    });

    stream1.on('end', () => {
      stream2.on('end', () => {
        resolve();
      });
    });
  })
};

这是Recombinator类:

/* Takes multiple readable streams and returns a single
 * readable stream that can be piped to a writable stream
 */
const {Readable} = require('stream');

class Recombinator extends Readable {
  constructor(opts) {
    super({...opts, readables: undefined});
    const self = this;
    self.readables = opts.readables || [];
  }

  _read(size) {
    this.push(this._getChunk(size));
  }

  _getChunk(size) {
    const reader = this.readables.find(r => !r.closed);
    if (!reader) {
      return null;
    }
    const data = reader.read(size);
    if (!data) {
      reader.closed = true;
      return this._getChunk(size);
    }
    return data;
  }
}

module.exports = Recombinator;

这是原始图片:

这是重新组装的图像:

回答如下:

部分问题在于认为readable事件只被触发一次,但每次有数据被读取时它都被触发。嵌套事件处理程序可能也不是很好。

我正在使用的解决方案是更改Recombinator构造函数,如下所示:

constructor(opts) {
    super({...opts, readables: undefined});
    const self = this;
    self.readables = opts.readables || [];

    self._readableCount = 0;
    self._endedCount = 0;

    // Attach listeners to know when all readables are open and closed
    self.readables.forEach(r => {
      r.on('readable', () => {
        if (r._markedReadable) {
          return;
        }
        r._markedReadable = true;
        self._readableCount++;
      });
      r.on('end', () => {
        if (r._markedEnded) {
          return;
        }
        r._markedEnded = true;
        self._endedCount++;
      });
    })
  }

并添加异步方法,以等待所有读者打开,如下所示:

  async ready(retry = 10) {
    if (this._readableCount === this.readables.length) {
      return Promise.resolve();
    }
    if (retry === 0) {
      return Promise.reject(`Timeout waiting for ${this.readables.length} readables to open - got ${this._readableCount}`);
    }
    await delay(500);
    return this.ready(retry - 1);
  }

  async done(retry = 10) {
    if (this._endedCount === this.readables.length) {
      return Promise.resolve();
    }
    if (retry === 0) {
      return Promise.reject(`Timeout waiting for ${this.readables.length} readables to end - got ${this._endedCount}`);
    }
    await delay(500);
    return this.done(retry - 1);
  }

如何将多个ReadableStream传递给单个WriteStream?

我正在处理防火墙限制,我一次只能发10MB。为了处理更大的上传,我想使用类似,将多个块写入磁盘,并在最后重新组合它们。

我现在只是在编写测​​试,但是我的实现中的某些内容是错误的。

首先,我将文件拆分为:

    const splitFile = async () => {
      const chunkSize = 1024 * 1024;
      const photo = fs.createReadStream(path.resolve(FIXTURES, 'hello-tron.jpg'));

      // Write to 2 files
      photo.on('readable', function() {
        const file1 = path.resolve(TEMP, 'chunk.jpg.1');
        const file2 = path.resolve(TEMP, 'chunk.jpg.2');
        let data;
        while (data = this.read(chunkSize)) {
          if (!fs.existsSync(file1)) {
            const output1 = fs.createWriteStream(file1);
            output1.write(data);
            output1.close();
            return;
          }
          const output2 = fs.createWriteStream(file2);
          output2.write(data);
          if (data === null) {
            output2.close();
          }
        }
      });

      return new Promise(resolve => {
        photo.on('end', resolve);
      });
    };

然后我像这样重新组装:

const recombine = async () => {
  const output = fs.createWriteStream(path.resolve(TEMP, 'recombined.jpg'));
  const file1 = path.resolve(TEMP, 'chunk.jpg.1');
  const file2 = path.resolve(TEMP, 'chunk.jpg.2');
  return new Promise(resolve => {
    const stream1 = fs.createReadStream(file1);
    const stream2 = fs.createReadStream(file2);

    const recombinator = new Recombinator({
      readables: [stream1, stream2]
    });

    stream1.on('readable', () => {
      stream2.on('readable', () => {
        recombinator.pipe(output);
      });
    });

    stream1.on('end', () => {
      stream2.on('end', () => {
        resolve();
      });
    });
  })
};

这是Recombinator类:

/* Takes multiple readable streams and returns a single
 * readable stream that can be piped to a writable stream
 */
const {Readable} = require('stream');

class Recombinator extends Readable {
  constructor(opts) {
    super({...opts, readables: undefined});
    const self = this;
    self.readables = opts.readables || [];
  }

  _read(size) {
    this.push(this._getChunk(size));
  }

  _getChunk(size) {
    const reader = this.readables.find(r => !r.closed);
    if (!reader) {
      return null;
    }
    const data = reader.read(size);
    if (!data) {
      reader.closed = true;
      return this._getChunk(size);
    }
    return data;
  }
}

module.exports = Recombinator;

这是原始图片:

这是重新组装的图像:

回答如下:

部分问题在于认为readable事件只被触发一次,但每次有数据被读取时它都被触发。嵌套事件处理程序可能也不是很好。

我正在使用的解决方案是更改Recombinator构造函数,如下所示:

constructor(opts) {
    super({...opts, readables: undefined});
    const self = this;
    self.readables = opts.readables || [];

    self._readableCount = 0;
    self._endedCount = 0;

    // Attach listeners to know when all readables are open and closed
    self.readables.forEach(r => {
      r.on('readable', () => {
        if (r._markedReadable) {
          return;
        }
        r._markedReadable = true;
        self._readableCount++;
      });
      r.on('end', () => {
        if (r._markedEnded) {
          return;
        }
        r._markedEnded = true;
        self._endedCount++;
      });
    })
  }

并添加异步方法,以等待所有读者打开,如下所示:

  async ready(retry = 10) {
    if (this._readableCount === this.readables.length) {
      return Promise.resolve();
    }
    if (retry === 0) {
      return Promise.reject(`Timeout waiting for ${this.readables.length} readables to open - got ${this._readableCount}`);
    }
    await delay(500);
    return this.ready(retry - 1);
  }

  async done(retry = 10) {
    if (this._endedCount === this.readables.length) {
      return Promise.resolve();
    }
    if (retry === 0) {
      return Promise.reject(`Timeout waiting for ${this.readables.length} readables to end - got ${this._endedCount}`);
    }
    await delay(500);
    return this.done(retry - 1);
  }
发布评论

评论列表 (0)

  1. 暂无评论