Download S3 files to local disk instead of holding connections to S3#157
Download S3 files to local disk instead of holding connections to S3#157
Conversation
lib/stream/leo-stream.js
Outdated
| let res; | ||
| let rej; | ||
| file.localFileReady = new Promise((resolve, reject) => { | ||
| res = resolve; |
There was a problem hiding this comment.
@jgrantr is there a better way to do this? I just need a promise that I'll resolve somewhere else
There was a problem hiding this comment.
No. If you don't want everything in the block, what you have done here is what I've done in other places. May want to give it a more descriptive name (like, what does the promise represent) so it's clear what it is used for, but the general approach looks good.
lib/stream/leo-stream.js
Outdated
| }); | ||
| }, 10); | ||
| downloadQueue.error(function(err, task) { | ||
| loggerS3.debug("Download queue error", err, task); |
There was a problem hiding this comment.
change this to .error
lib/stream/leo-stream.js
Outdated
|
|
||
| file.localFilename = s3LocalFileHelper.buildLocalFilePath(file, item.end); | ||
|
|
||
| let res; |
There was a problem hiding this comment.
Rename these so the are more readable
| unlinkSync(file.fullpath); | ||
| deleted++; | ||
| purgeSize += file.size; | ||
| logger.debug("Would delete:", file.fullpath, eidTimestamp, startEidTimestamp, endEidTimestamp, size, maxStorage, eidTimestamp < startEidTimestamp, eidTimestamp > endEidTimestamp && size > maxStorage); |
There was a problem hiding this comment.
Change Would delete to Deleting
lib/stream/leo-stream.js
Outdated
| let res; | ||
| let rej; | ||
| file.localFileReady = new Promise((resolve, reject) => { | ||
| res = resolve; |
There was a problem hiding this comment.
No. If you don't want everything in the block, what you have done here is what I've done in other places. May want to give it a more descriptive name (like, what does the promise represent) so it's clear what it is used for, but the general approach looks good.
| // event data in front of it | ||
| for (; last_s3_index < items.length && bytes < opts.fast_s3_read_parallel_fetch_max_bytes; last_s3_index++) { | ||
| agg_bytes += items[last_s3_index].size; // Accounts for any non S3 data between S3 files | ||
| for (; last_s3_index <= index || (last_s3_index < items.length && bytes < opts.fast_s3_read_parallel_fetch_max_bytes); last_s3_index++) { |
There was a problem hiding this comment.
@czirker This will fix us not having an S3 file for the given index, right?
No description provided.