From c95e1665324a6077bf3515dd98003ec12162e5fc Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Wed, 8 Mar 2023 22:20:27 +0000 Subject: [PATCH 01/14] feat: handle concurrent batches WIP - switch to an sqs lib that polls for new messages concurrently rather than in batches - rewrite pickup worker so we can compose it out of single-responsibilty pieces instead of having to pass through the giant config ball. License: MIT Signed-off-by: Oli Evans --- package-lock.json | 132 +++++++++++++++++++++- package.json | 3 +- pickup/index.js | 42 +------ pickup/lib/ipfs.js | 72 +++++++----- pickup/lib/pickup.js | 85 ++++++++++++++ pickup/lib/s3.js | 47 ++++++-- pickup/test/downloadStatusManager.test.js | 101 ----------------- pickup/test/pickup.test.js | 2 +- 8 files changed, 304 insertions(+), 180 deletions(-) create mode 100644 pickup/lib/pickup.js delete mode 100644 pickup/test/downloadStatusManager.test.js diff --git a/package-lock.json b/package-lock.json index 1f52b80..ec0ae9b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,7 +14,8 @@ ], "dependencies": { "@aws-cdk/aws-apigatewayv2-alpha": "^2.50.0-alpha.0", - "aws-cdk-lib": "2.50.0" + "aws-cdk-lib": "2.50.0", + "squiss-ts": "^4.4.1" }, "devDependencies": { "@serverless-stack/cli": "^1.18.4", @@ -13486,6 +13487,11 @@ "varint": "^6.0.0" } }, + "node_modules/linked-list": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/linked-list/-/linked-list-2.1.0.tgz", + "integrity": "sha512-0GK/ylO6e5cv1PCOIdTRHxOaCgQ+0jKwHt+cHzkiCAZlx0KM5Id1bBAPad6g2mkvBNp1pNdmG0cohFGfqjkv9A==" + }, "node_modules/load-json-file": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-7.0.1.tgz", @@ -16257,6 +16263,66 @@ "aws-sdk": "^2.1114.0" } }, + "node_modules/squiss-ts": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/squiss-ts/-/squiss-ts-4.4.1.tgz", + "integrity": "sha512-mLZ8ppMJUr7Zn2LS0extvY+E6oDMdZKgZy10epTIGau9qWvwihCnXwb1LifgT6qNvdH7jqxAB/7mgRAV8DEkIA==", + "hasInstallScript": true, + "dependencies": { + "aws-sdk": "2.814.0", + "linked-list": "^2.1.0", + "semver": "6.3.0", + "ts-type-guards": "^0.7.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": "^6 || ^8 || ^10 || ^12 || ^14 || ^16 || ^18" + } + }, + "node_modules/squiss-ts/node_modules/aws-sdk": { + "version": "2.814.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.814.0.tgz", + "integrity": "sha512-empd1m/J/MAkL6d9OeRpmg9thobULu0wk4v8W3JToaxGi2TD7PIdvE6yliZKyOVAdJINhBWEBhxR4OUIHhcGbQ==", + "dependencies": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.3.2", + "xml2js": "0.4.19" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/squiss-ts/node_modules/aws-sdk/node_modules/uuid": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/squiss-ts/node_modules/jmespath": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "integrity": "sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w==", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/squiss-ts/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/ssh-remote-port-forward": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/ssh-remote-port-forward/-/ssh-remote-port-forward-1.0.4.tgz", @@ -17691,6 +17757,11 @@ "node": ">=6" } }, + "node_modules/ts-type-guards": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/ts-type-guards/-/ts-type-guards-0.7.0.tgz", + "integrity": "sha512-f+IUliQzU09E8Ml5JwL+sxWenrebphNhy0LaCmkwACOvliuB7LxuAXUrAVfjwF+2PX5yMs9hfydBPR5lIIuUCA==" + }, "node_modules/tsconfig-paths": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", @@ -29037,6 +29108,11 @@ } } }, + "linked-list": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/linked-list/-/linked-list-2.1.0.tgz", + "integrity": "sha512-0GK/ylO6e5cv1PCOIdTRHxOaCgQ+0jKwHt+cHzkiCAZlx0KM5Id1bBAPad6g2mkvBNp1pNdmG0cohFGfqjkv9A==" + }, "load-json-file": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-7.0.1.tgz", @@ -30096,7 +30172,7 @@ "nanoid": "^4.0.0", "node-fetch": "^3.3.0", "openapi-backend": "^5.3.0", - "p-retry": "*", + "p-retry": "^5.1.2", "pino-lambda": "4.1.0" }, "dependencies": { @@ -31124,6 +31200,53 @@ "debug": "^4.3.1" } }, + "squiss-ts": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/squiss-ts/-/squiss-ts-4.4.1.tgz", + "integrity": "sha512-mLZ8ppMJUr7Zn2LS0extvY+E6oDMdZKgZy10epTIGau9qWvwihCnXwb1LifgT6qNvdH7jqxAB/7mgRAV8DEkIA==", + "requires": { + "aws-sdk": "2.814.0", + "linked-list": "^2.1.0", + "semver": "6.3.0", + "ts-type-guards": "^0.7.0", + "uuid": "^8.3.2" + }, + "dependencies": { + "aws-sdk": { + "version": "2.814.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.814.0.tgz", + "integrity": "sha512-empd1m/J/MAkL6d9OeRpmg9thobULu0wk4v8W3JToaxGi2TD7PIdvE6yliZKyOVAdJINhBWEBhxR4OUIHhcGbQ==", + "requires": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.3.2", + "xml2js": "0.4.19" + }, + "dependencies": { + "uuid": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" + } + } + }, + "jmespath": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "integrity": "sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w==" + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } + } + }, "ssh-remote-port-forward": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/ssh-remote-port-forward/-/ssh-remote-port-forward-1.0.4.tgz", @@ -32222,6 +32345,11 @@ } } }, + "ts-type-guards": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/ts-type-guards/-/ts-type-guards-0.7.0.tgz", + "integrity": "sha512-f+IUliQzU09E8Ml5JwL+sxWenrebphNhy0LaCmkwACOvliuB7LxuAXUrAVfjwF+2PX5yMs9hfydBPR5lIIuUCA==" + }, "tsconfig-paths": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", diff --git a/package.json b/package.json index 0c6c276..d5459be 100644 --- a/package.json +++ b/package.json @@ -35,7 +35,8 @@ ], "dependencies": { "@aws-cdk/aws-apigatewayv2-alpha": "^2.50.0-alpha.0", - "aws-cdk-lib": "2.50.0" + "aws-cdk-lib": "2.50.0", + "squiss-ts": "^4.4.1" }, "ts-standard": { "ignore": [ diff --git a/pickup/index.js b/pickup/index.js index 5a5ec86..ab5d381 100644 --- a/pickup/index.js +++ b/pickup/index.js @@ -1,44 +1,10 @@ -import { createConsumer } from './lib/consumer.js' +import { createPickupFromEnv } from './lib/pickup' import { logger } from './lib/logger.js' -import { DownloadStatusManager } from './lib/downloadStatusManager.js' - -const { - IPFS_API_URL, - SQS_QUEUE_URL, - DYNAMO_TABLE_NAME, - DYNAMO_DB_ENDPOINT, - BATCH_SIZE, - MAX_RETRY, - TIMEOUT_FETCH, - LOG_STATE_EVERY_SECONDS, - VALIDATION_BUCKET -} = process.env - -if (!IPFS_API_URL) throw new Error('IPFS_API_URL not found in ENV') -if (!SQS_QUEUE_URL) throw new Error('SQS_QUEUE_URL not found in ENV') -if (!DYNAMO_TABLE_NAME) throw new Error('DYNAMO_TABLE_NAME not found in ENV') async function start () { - logger.info({}, 'Pickup starting...') - const app = await createConsumer({ - ipfsApiUrl: IPFS_API_URL, - queueUrl: SQS_QUEUE_URL, - dynamoTable: DYNAMO_TABLE_NAME, - dynamoEndpoint: DYNAMO_DB_ENDPOINT || undefined, - validationBucket: VALIDATION_BUCKET || undefined, - batchSize: Number(BATCH_SIZE || 1), - maxRetry: Number(MAX_RETRY || 5), - timeoutFetchMs: Number(TIMEOUT_FETCH || 30) * 1000, - downloadStatusManager: new DownloadStatusManager(), - downloadStatusLoggerSeconds: Math.max(Number(LOG_STATE_EVERY_SECONDS) || 300, 60) - }) - - app.on('message_received', msg => { - const { requestid, cid } = JSON.parse(msg.Body) - logger.info({ requestid, cid }, 'Processing request') - }) - app.start() - logger.info({}, `Pickup subscribed to ${SQS_QUEUE_URL}`) + logger.info({}, 'Pickup starting...') + const pickup = await createPickupFromEnv() + await pickup.start() } start() diff --git a/pickup/lib/ipfs.js b/pickup/lib/ipfs.js index 298c093..d77e74c 100644 --- a/pickup/lib/ipfs.js +++ b/pickup/lib/ipfs.js @@ -1,57 +1,45 @@ -import { compose } from 'node:stream' +import { compose, Readable } from 'node:stream' import { CID } from 'multiformats/cid' import { Multiaddr } from 'multiaddr' import debounce from 'debounce' import fetch from 'node-fetch' - import { logger } from './logger.js' -import { STATE_DOWNLOADING } from './downloadStatusManager.js' export const ERROR_TIMEOUT = 'TIMEOUT' /** * Start the fetch of a car * - * @param string cid - The CID requested - * @param string ipfsApiUrl - The IPFS server url - * @param object downloadError - The error object, is filled in if an error occurs - * @param int timeoutMs - The timeout for each block fetch in milliseconds. - * The Download is set to `failed` if the IPFS server - * fetch action do not respond while is downloading the blocks. - * @param {DownloadStatusManager} downloadStatusManager - * @returns {Promise<*>} + * @param {string} cid - The CID requested + * @param {string} ipfsApiUrl - The IPFS server url + * @param {AbortController} abortCtl - Can i kill it? + * @param {number} timeoutMs - The timeout for each block fetch in milliseconds. + * @returns {Promise} */ -export async function fetchCar (cid, ipfsApiUrl, downloadError, timeoutMs = 30000, downloadStatusManager) { +export async function fetchCar ({cid, ipfsApiUrl, abortCtl = new AbortController(), timeoutMs = 30000}) { if (!isCID(cid)) { throw new Error({ message: `Invalid CID: ${cid}` }) } const url = new URL(`/api/v0/dag/export?arg=${cid}`, ipfsApiUrl) - const ctl = new AbortController() - const startCountdown = debounce(() => { - downloadError.code = ERROR_TIMEOUT - ctl.abort() - }, timeoutMs) + const startCountdown = debounce(() => abortCtl.abort(), timeoutMs) startCountdown() - const res = await fetch(url, { method: 'POST', signal: ctl.signal }) + + const signal = abortCtl.signal + const res = await fetch(url, { method: 'POST', signal }) if (!res.ok) { throw new Error(`${res.status} ${res.statusText} ${url}`) } - let downloadSize = 0 async function * restartCountdown (source) { - // startCountdown.clear() - // throw new Error('There was an error!!') for await (const chunk of source) { - downloadSize += chunk.length - downloadStatusManager.setStatus(cid, STATE_DOWNLOADING, downloadSize) startCountdown() yield chunk } startCountdown.clear() } - return compose(res.body, restartCountdown) + return compose(res.body, restartCountdown, { signal }) } /** @@ -136,7 +124,7 @@ export function isCID (cid) { * Test the connection with IPFS server * @param {string} ipfsApiUrl * @param {number} timeoutMs - * @returns {Promise} + * @returns {Promise>} */ export async function testIpfsApi (ipfsApiUrl, timeoutMs = 10000) { const url = new URL('/api/v0/id', ipfsApiUrl) @@ -149,10 +137,38 @@ export async function testIpfsApi (ipfsApiUrl, timeoutMs = 10000) { } throw new Error(`IPFS API test failed. POST ${url} returned ${res.status} ${res.statusText}`) } - const { AgentVersion, ID } = await res.json() - logger.info({ agentVersion: AgentVersion, peerId: ID }, 'Connected') + return await res.json() } catch (err) { - logger.error({ err }, 'Test ipfs fail') throw new Error('IPFS API test failed.', { cause: err }) } } + +export class CarFetcher { + /** + * @param {object} config + * @param {string} ipfsApiUrl + * @param {number} fetchTimeoutMs + */ + constructor ({ ipfsApiUrl, fetchTimeoutMs = 60000 }) { + this.ipfsApiUrl = ipfsApiUrl + this.fetchTimeoutMs = fetchTimeoutMs + } + + /** + * @param {object} config + * @param {string} config.cid + * @param {string[]} config.origins + * @param {(Readable) => Promise} config.upload + */ + async fetch ({ cid, origins, upload }) { + const { ipfsApiUrl, fetchTimeoutMs } = this + try { + await connectTo(origins, ipfsApiUrl) + const body = await fetchCar({ cid, ipfsApiUrl, timeoutMs: fetchTimeoutMs }) + await upload(body) + } finally { + await disconnect(origins, ipfsApiUrl) + await waitForGC(ipfsApiUrl) + } + } +} diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js new file mode 100644 index 0000000..523a96f --- /dev/null +++ b/pickup/lib/pickup.js @@ -0,0 +1,85 @@ +import { Squiss, Message } from 'squiss-ts' +import { CarFetcher, testIpfsApi } from './ipfs.js' +import { S3Uploader } from './s3.js' +import { logger } from './logger.js' + +/** + * Use me in prod to set all the things. + * + * @param {Record} env + */ +export async function createPickupFromEnv (env = process.env) { + const { + IPFS_API_URL, + SQS_QUEUE_URL, + BATCH_SIZE, + TIMEOUT_FETCH, + VALIDATION_BUCKET + } = env + + if (!IPFS_API_URL) throw new Error('IPFS_API_URL not found in ENV') + if (!SQS_QUEUE_URL) throw new Error('SQS_QUEUE_URL not found in ENV') + if (!VALIDATION_BUCKET) throw new Error('VALIDATION_BUCKET not found in ENV') + + const pickup = await createPickup({ + queuePoller: createSqsPoller({ + queueUrl: SQS_QUEUE_URL, + maxInFlight: BATCH_SIZE + }), + carFetcher: new CarFetcher({ + ipfsApiUrl: IPFS_API_URL, + fetchTimeoutMs: TIMEOUT_FETCH + }), + s3Uploader: new S3Uploader({ + s3: new S3Client(), + bucket: VALIDATION_BUCKET + }) + }) + + return pickup +} + +/** + * @param {object} config + * @param {Squiss} config.queuePoller + * @param {CarFetcher} config.carFetcher + * @param {S3Uploader} config.s3Uploader + */ +export async function createPickup ({ queuePoller, carFetcher, s3Uploader }) { + await testIpfsApi(carFetcher.ipfsApiUrl) + + /** + * @param {Message} msg + */ + async function messageHandler (msg) { + const { cid, origins, key } = msg.body + try { + logger.info({ cid }, 'Fetching car') + const uploader = s3Uploader.createUploader({ cid, key }) + await carFetcher.fetch({ cid, origins, uploader }) + logger.info({ cid }, 'OK. Car in S3') + msg.del() // the message is handled, remove it from queue. + } catch (error) { + logger.info({ cid, error }, `Error ${ err.message || err }`) + // return the msg to the queue for another go + msg.release() + } + } + + queuePoller.on('message', messageHandler) + + return queuePoller + } + +/** + * @param {import('squiss-ts').ISquissOptions} config + */ +export function createSqsPoller (config) { + return new Squiss({ + // set our default overrides here, we always want these. + autoExtendTimeout: true, + bodyFormat: 'json', + ...config + }) +} + diff --git a/pickup/lib/s3.js b/pickup/lib/s3.js index eac040f..78a772e 100644 --- a/pickup/lib/s3.js +++ b/pickup/lib/s3.js @@ -10,11 +10,9 @@ import { logger } from './logger.js' * @param {string} key * @param {Stream} body * @param {string} cid - * @param {object} downloadError - * @param {string} downloadError.code * @returns {Promise} */ -export async function sendToS3 ({ client, bucket, key }, { body, cid, downloadError }) { +export async function sendToS3 ({ client, bucket, key, body, cid }) { const params = { Metadata: { structure: 'complete' }, Bucket: bucket, @@ -26,12 +24,12 @@ export async function sendToS3 ({ client, bucket, key }, { body, cid, downloadEr // see: https://github.com/aws/aws-sdk-js-v3/blob/main/lib/lib-storage/README.md const s3Upload = new Upload({ client, params }) - body.on('error', (err) => { + body.on('error', (error) => { if (err.code === 'AbortError' || err.constructor.name === 'AbortError') { - logger.trace({ err, cid }, 'The abort command was thrown by a ipfs timeout') + logger.trace({ error, cid }, 'The abort command was thrown by a ipfs timeout') return } - logger.error({ err, code: downloadError.code, cid }, 'S3 upload error') + logger.error({ error, cid }, 'S3 upload error') }) return s3Upload.done() @@ -41,11 +39,12 @@ export async function sendToS3 ({ client, bucket, key }, { body, cid, downloadEr * Create the uploader * @param {import('@aws-sdk/client-s3'.S3Client)} client * @param {string} bucket - * @param {string} key * @returns {import('@aws-sdk/client-s3'.S3Client)} */ -export function createS3Uploader ({ client = createS3Client(), bucket, key }) { - return sendToS3.bind(null, { client, bucket, key }) +export function createS3Uploader ({ client = createS3Client(), bucket }) { + return function (key) { + sendToS3() + } } /** @@ -57,3 +56,33 @@ export function createS3Client () { // Expects AWS_* ENV vars set. return new S3Client({}) } + +export class S3Uploader { + /** + * @param {object} config + * @param {S3Client} s3 + * @param {string} bucket + */ + constructor ({ s3, bucket }) { + this.s3 = s3 + this.bucket = bucket + } + + /** + * @param {object} config + * @param {string} cid + * @param {string} key + */ + createUploader ({ cid, key }) { + const { s3, bucket } = this + return async function (body) { + return sendToS3({ + client: s3, + bucket, + key, + body, + cid + }) + } + } +} diff --git a/pickup/test/downloadStatusManager.test.js b/pickup/test/downloadStatusManager.test.js deleted file mode 100644 index a7eb83c..0000000 --- a/pickup/test/downloadStatusManager.test.js +++ /dev/null @@ -1,101 +0,0 @@ -import { - DownloadStatusManager, - STATE_DONE, - STATE_DOWNLOADING, - STATE_FAILED, - STATE_QUEUED, - STATE_TIMEOUT -} from '../lib/downloadStatusManager.js' -import test from 'ava' - -// verify the lib behaves as expected -test('with no data should not be running', async t => { - const statusManager = new DownloadStatusManager() - t.falsy(statusManager.isRunning()) -}) - -test('with a queued value should be running', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('123', STATE_QUEUED) - t.deepEqual(statusManager.getStatus(), { 123: { state: STATE_QUEUED } }) - t.truthy(statusManager.isRunning()) -}) - -test('set the downloading state with a size', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('123', STATE_QUEUED) - statusManager.setStatus('123', STATE_DOWNLOADING, 100) - t.deepEqual(statusManager.getStatus(), { 123: { state: STATE_DOWNLOADING, size: 100 } }) - t.truthy(statusManager.isRunning()) -}) - -test('set the done state with a size', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('123', STATE_QUEUED) - statusManager.setStatus('123', STATE_DOWNLOADING, 100) - statusManager.setStatus('123', STATE_DONE, 200) - t.deepEqual(statusManager.getStatus(), { 123: { state: STATE_DONE, size: 200 } }) - t.falsy(statusManager.isRunning()) -}) - -test('set multiple state and should be running', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('123', STATE_QUEUED) - statusManager.setStatus('456', STATE_DOWNLOADING, 100) - statusManager.setStatus('780', STATE_DONE, 200) - statusManager.setStatus('abc', STATE_FAILED) - statusManager.setStatus('efg', STATE_TIMEOUT) - t.deepEqual(statusManager.getStatus(), - { - 123: { - state: 'queued' - }, - 456: { - size: 100, - state: 'downloading' - }, - 780: { - size: 200, - state: 'done' - }, - abc: { - state: 'failed' - }, - efg: { - state: 'timeout' - } - }) - t.truthy(statusManager.isRunning()) -}) - -test('set multiple state and should be not running', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('780', STATE_DONE, 200) - statusManager.setStatus('abc', STATE_FAILED) - statusManager.setStatus('efg', STATE_TIMEOUT) - t.deepEqual(statusManager.getStatus(), - { - 780: { - size: 200, - state: 'done' - }, - abc: { - state: 'failed' - }, - efg: { - state: 'timeout' - } - }) - t.falsy(statusManager.isRunning()) -}) - -test('should reset the state', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('780', STATE_DONE, 200) - statusManager.setStatus('abc', STATE_FAILED) - statusManager.setStatus('efg', STATE_TIMEOUT) - statusManager.reset() - t.deepEqual(statusManager.getStatus(), - {}) - t.falsy(statusManager.isRunning()) -}) diff --git a/pickup/test/pickup.test.js b/pickup/test/pickup.test.js index 2d6c594..4ed75eb 100644 --- a/pickup/test/pickup.test.js +++ b/pickup/test/pickup.test.js @@ -2,7 +2,7 @@ import nock from 'nock' import test from 'ava' import { SendMessageCommand } from '@aws-sdk/client-sqs' -import { createConsumer } from '../lib/consumer.js' +import { createPickup } from '../lib/pickup.js' import { compose } from './_compose.js' import { prepareCid, verifyMessage, sleep, getMessagesFromSQS, stopConsumer, getValueFromDynamo } from './_helpers.js' import { DownloadStatusManager, STATE_DONE, STATE_QUEUED, STATE_DOWNLOADING } from '../lib/downloadStatusManager.js' From 49b0ab3c72e1078e500738703f5fdfa3a6d1398f Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Thu, 9 Mar 2023 12:50:56 +0000 Subject: [PATCH 02/14] chore: tests pass License: MIT Signed-off-by: Oli Evans --- package-lock.json | 1186 +++++++++++++++++++++++++-- package.json | 3 +- pickup/index.js | 11 +- pickup/lib/consumer.js | 153 ---- pickup/lib/downloadStatusManager.js | 33 - pickup/lib/dynamo.js | 43 - pickup/lib/ipfs.js | 16 +- pickup/lib/pickup.js | 46 +- pickup/lib/pickupBatch.js | 148 ---- pickup/lib/s3.js | 27 +- pickup/package.json | 6 +- pickup/test/_helpers.js | 18 +- pickup/test/consumer.test.js | 85 -- pickup/test/pickup.test.js | 329 +------- 14 files changed, 1208 insertions(+), 896 deletions(-) delete mode 100644 pickup/lib/consumer.js delete mode 100644 pickup/lib/downloadStatusManager.js delete mode 100644 pickup/lib/dynamo.js delete mode 100644 pickup/lib/pickupBatch.js delete mode 100644 pickup/test/consumer.test.js diff --git a/package-lock.json b/package-lock.json index ec0ae9b..fc9f2e9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,8 +14,7 @@ ], "dependencies": { "@aws-cdk/aws-apigatewayv2-alpha": "^2.50.0-alpha.0", - "aws-cdk-lib": "2.50.0", - "squiss-ts": "^4.4.1" + "aws-cdk-lib": "2.50.0" }, "devDependencies": { "@serverless-stack/cli": "^1.18.4", @@ -6569,6 +6568,15 @@ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "node_modules/@eslint/js": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.35.0.tgz", + "integrity": "sha512-JXdzbRiWclLVoD8sNUjR443VVlYqiYmDVT6rGUEIEHU5YJW0gaVZwV2xgM7D4arkvASqD0IlLUVjHiFuxaftRw==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/@gar/promisify": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", @@ -6629,6 +6637,19 @@ "node": ">=10.10.0" } }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, "node_modules/@humanwhocodes/object-schema": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", @@ -8942,6 +8963,15 @@ "node": ">=10.0.0" } }, + "node_modules/builtins": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", + "integrity": "sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==", + "dev": true, + "dependencies": { + "semver": "^7.0.0" + } + }, "node_modules/busboy": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", @@ -10886,6 +10916,74 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true }, + "node_modules/eslint-plugin-n": { + "version": "15.6.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.6.1.tgz", + "integrity": "sha512-R9xw9OtCRxxaxaszTQmQAlPgM+RdGjaL1akWuY/Fv9fRAi8Wj4CUKc6iYVG8QNRjRuo8/BqVYIpfqberJUEacA==", + "dev": true, + "dependencies": { + "builtins": "^5.0.1", + "eslint-plugin-es": "^4.1.0", + "eslint-utils": "^3.0.0", + "ignore": "^5.1.1", + "is-core-module": "^2.11.0", + "minimatch": "^3.1.2", + "resolve": "^1.22.1", + "semver": "^7.3.8" + }, + "engines": { + "node": ">=12.22.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-n/node_modules/eslint-plugin-es": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", + "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", + "dev": true, + "dependencies": { + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "engines": { + "node": ">=8.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=4.19.1" + } + }, + "node_modules/eslint-plugin-n/node_modules/eslint-plugin-es/node_modules/eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/eslint-plugin-n/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/eslint-plugin-node": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", @@ -11249,9 +11347,9 @@ } }, "node_modules/esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, "dependencies": { "estraverse": "^5.1.0" @@ -11892,9 +11990,9 @@ } }, "node_modules/globals": { - "version": "13.18.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.18.0.tgz", - "integrity": "sha512-/mR4KI8Ps2spmoc0Ulu9L7agOF0du1CZNQ3dke8yItYlyKNmGrkONemBbd6V8UTc1Wgcqn21t3WYB7dbRmh6/A==", + "version": "13.20.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", + "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -11943,6 +12041,12 @@ "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", "dev": true }, + "node_modules/grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, "node_modules/graphql": { "version": "16.6.0", "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.6.0.tgz", @@ -13220,6 +13324,16 @@ "url": "https://github.com/sponsors/panva" } }, + "node_modules/js-sdsl": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz", + "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, "node_modules/js-string-escape": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz", @@ -18546,110 +18660,605 @@ "multiformats": "^9.6.5", "node-fetch": "^3.2.10", "pino": "8.8.0", - "sqs-consumer": "^5.7.0" + "squiss-ts": "^4.4.1" }, "devDependencies": { "@aws-sdk/client-sqs": "^3.118.1", "@web-std/blob": "^3.0.4", "ava": "^4.3.0", "ipfs-car": "^0.7.0", - "standard": "^16.0.4", + "standard": "^17.0.0", "testcontainers": "^8.10.1" }, "engines": { "node": ">=16.14" } }, - "validator": { - "version": "1.0.1", - "license": "MIT", + "pickup/node_modules/@eslint/eslintrc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.0.tgz", + "integrity": "sha512-fluIaaV+GyV24CCu/ggiHdV+j4RNh85yQnAYS/G2mZODZgGmmlrgCydjUcV3YvxCm9x8nMAfThsqTni4KiXT4A==", + "dev": true, "dependencies": { - "@aws-sdk/client-s3": "3.258.0", - "@aws-sdk/lib-dynamodb": "^3.112.0", - "@aws-sdk/lib-storage": "^3.97.0", - "@ipld/car": "5.1.0", - "async-retry": "1.3.3", - "debounce": "^1.2.1", - "linkdex": "2.0.0", - "multiaddr": "10.0.1", - "multiformats": "11.0.1", - "node-fetch": "^3.2.10", - "pino": "8.8.0", - "sqs-consumer": "^5.7.0" - }, - "devDependencies": { - "@aws-sdk/client-sqs": "^3.118.1", - "@web-std/blob": "^3.0.4", - "ava": "^4.3.0", - "ipfs-car": "^0.7.0", - "standard": "^16.0.4", - "testcontainers": "^8.10.1" + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.4.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" }, "engines": { - "node": ">=16.14" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "validator/node_modules/@ipld/car": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@ipld/car/-/car-5.1.0.tgz", - "integrity": "sha512-k9pO0YqJvmFGY5pJDhF2Ocz+mRp3C3r4ikr1NrUXkzN/z4JzhE7XbQzUCcm7daq8k4tRqap0fWPjxZwjS9PUcQ==", + "pickup/node_modules/@humanwhocodes/config-array": { + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", + "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "dev": true, "dependencies": { - "@ipld/dag-cbor": "^9.0.0", - "cborg": "^1.9.0", - "multiformats": "^11.0.0", - "varint": "^6.0.0" + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" }, "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" + "node": ">=10.10.0" } }, - "validator/node_modules/@ipld/dag-cbor": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/@ipld/dag-cbor/-/dag-cbor-9.0.0.tgz", - "integrity": "sha512-zdsiSiYDEOIDW7mmWOYWC9gukjXO+F8wqxz/LfN7iSwTfIyipC8+UQrCbPupFMRb/33XQTZk8yl3My8vUQBRoA==", + "pickup/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, "dependencies": { - "cborg": "^1.10.0", - "multiformats": "^11.0.0" + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" }, - "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" } }, - "validator/node_modules/multiformats": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/multiformats/-/multiformats-11.0.1.tgz", - "integrity": "sha512-atWruyH34YiknSdL5yeIir00EDlJRpHzELYQxG7Iy29eCyL+VrZHpPrX5yqlik3jnuqpLpRKVZ0SGVb9UzKaSA==", + "pickup/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" + "node": ">=8" } - } - }, - "dependencies": { - "@apidevtools/json-schema-ref-parser": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.0.tgz", - "integrity": "sha512-teB30tFooE3iQs2HQIKJ02D8UZA1Xy1zaczzhUjJs0CymYxeC0g+y5rCY2p8NHBM6DBUVoR8rSM4kHLj1WE9mQ==", - "requires": { - "@jsdevtools/ono": "^7.1.3", - "@types/json-schema": "^7.0.6", - "call-me-maybe": "^1.0.1", - "js-yaml": "^4.1.0" + }, + "pickup/node_modules/eslint": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.35.0.tgz", + "integrity": "sha512-BxAf1fVL7w+JLRQhWl2pzGeSiGqbWumV4WNvc9Rhp6tiCtm4oHnyPBSEtMGZwrQgudFQ+otqzWoPB7x+hxoWsw==", + "dev": true, + "dependencies": { + "@eslint/eslintrc": "^2.0.0", + "@eslint/js": "8.35.0", + "@humanwhocodes/config-array": "^0.11.8", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.4.0", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-sdsl": "^4.1.4", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "regexpp": "^3.2.0", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "@assemblyscript/loader": { - "version": "0.9.4", - "resolved": "https://registry.npmjs.org/@assemblyscript/loader/-/loader-0.9.4.tgz", - "integrity": "sha512-HazVq9zwTVwGmqdwYzu7WyQ6FQVZ7SwET0KKQuKm55jD0IfUpZgN0OPIiZG3zV1iSrVYcN0bdwLRXI/VNCYsUA==", - "dev": true + "pickup/node_modules/eslint-config-standard": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.0.0.tgz", + "integrity": "sha512-/2ks1GKyqSOkH7JFvXJicu0iMpoojkwB+f5Du/1SC0PtBL+s8v30k9njRZ21pm2drKYm2342jFnGWzttxPmZVg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "peerDependencies": { + "eslint": "^8.0.1", + "eslint-plugin-import": "^2.25.2", + "eslint-plugin-n": "^15.0.0", + "eslint-plugin-promise": "^6.0.0" + } }, - "@aws-cdk/aws-apigatewayv2-alpha": { - "version": "2.50.0-alpha.0", - "resolved": "https://registry.npmjs.org/@aws-cdk/aws-apigatewayv2-alpha/-/aws-apigatewayv2-alpha-2.50.0-alpha.0.tgz", - "integrity": "sha512-dttWDqy+nTg/fD9y0egvj7/zdnOVEo0qyGsep1RV+p16R3F4ObMKyPVIg15fz57tK//Gp/i1QgXsZaSqbcWHOg==", - "requires": {} + "pickup/node_modules/eslint-config-standard-jsx": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-11.0.0.tgz", + "integrity": "sha512-+1EV/R0JxEK1L0NGolAr8Iktm3Rgotx3BKwgaX+eAuSX8D952LULKtjgZD3F+e6SvibONnhLwoTi9DPxN5LvvQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "peerDependencies": { + "eslint": "^8.8.0", + "eslint-plugin-react": "^7.28.0" + } + }, + "pickup/node_modules/eslint-plugin-promise": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", + "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + } + }, + "pickup/node_modules/eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "pickup/node_modules/espree": { + "version": "9.4.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz", + "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==", + "dev": true, + "dependencies": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "pickup/node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "pickup/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "pickup/node_modules/load-json-file": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-5.3.0.tgz", + "integrity": "sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.15", + "parse-json": "^4.0.0", + "pify": "^4.0.1", + "strip-bom": "^3.0.0", + "type-fest": "^0.3.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "pickup/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/pkg-conf": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-3.1.0.tgz", + "integrity": "sha512-m0OTbR/5VPNPqO1ph6Fqbj7Hv6QU7gR/tQW40ZqrL1rjgCU85W6C1bJn0BItuJqnR98PWzw7Z8hHeChD1WrgdQ==", + "dev": true, + "dependencies": { + "find-up": "^3.0.0", + "load-json-file": "^5.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/pkg-conf/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/pkg-conf/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/pkg-conf/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/pkg-conf/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/pkg-conf/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "pickup/node_modules/standard": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/standard/-/standard-17.0.0.tgz", + "integrity": "sha512-GlCM9nzbLUkr+TYR5I2WQoIah4wHA2lMauqbyPLV/oI5gJxqhHzhjl9EG2N0lr/nRqI3KCbCvm/W3smxvLaChA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "eslint": "^8.13.0", + "eslint-config-standard": "17.0.0", + "eslint-config-standard-jsx": "^11.0.0", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-n": "^15.1.0", + "eslint-plugin-promise": "^6.0.0", + "eslint-plugin-react": "^7.28.0", + "standard-engine": "^15.0.0" + }, + "bin": { + "standard": "bin/cmd.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "pickup/node_modules/standard-engine": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/standard-engine/-/standard-engine-15.0.0.tgz", + "integrity": "sha512-4xwUhJNo1g/L2cleysUqUv7/btn7GEbYJvmgKrQ2vd/8pkTmN8cpqAZg+BT8Z1hNeEH787iWUdOpL8fmApLtxA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "get-stdin": "^8.0.0", + "minimist": "^1.2.6", + "pkg-conf": "^3.1.0", + "xdg-basedir": "^4.0.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "pickup/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "pickup/node_modules/type-fest": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.3.1.tgz", + "integrity": "sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "validator": { + "version": "1.0.1", + "license": "MIT", + "dependencies": { + "@aws-sdk/client-s3": "3.258.0", + "@aws-sdk/lib-dynamodb": "^3.112.0", + "@aws-sdk/lib-storage": "^3.97.0", + "@ipld/car": "5.1.0", + "async-retry": "1.3.3", + "debounce": "^1.2.1", + "linkdex": "2.0.0", + "multiaddr": "10.0.1", + "multiformats": "11.0.1", + "node-fetch": "^3.2.10", + "pino": "8.8.0", + "sqs-consumer": "^5.7.0" + }, + "devDependencies": { + "@aws-sdk/client-sqs": "^3.118.1", + "@web-std/blob": "^3.0.4", + "ava": "^4.3.0", + "ipfs-car": "^0.7.0", + "standard": "^16.0.4", + "testcontainers": "^8.10.1" + }, + "engines": { + "node": ">=16.14" + } + }, + "validator/node_modules/@ipld/car": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@ipld/car/-/car-5.1.0.tgz", + "integrity": "sha512-k9pO0YqJvmFGY5pJDhF2Ocz+mRp3C3r4ikr1NrUXkzN/z4JzhE7XbQzUCcm7daq8k4tRqap0fWPjxZwjS9PUcQ==", + "dependencies": { + "@ipld/dag-cbor": "^9.0.0", + "cborg": "^1.9.0", + "multiformats": "^11.0.0", + "varint": "^6.0.0" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, + "validator/node_modules/@ipld/dag-cbor": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/@ipld/dag-cbor/-/dag-cbor-9.0.0.tgz", + "integrity": "sha512-zdsiSiYDEOIDW7mmWOYWC9gukjXO+F8wqxz/LfN7iSwTfIyipC8+UQrCbPupFMRb/33XQTZk8yl3My8vUQBRoA==", + "dependencies": { + "cborg": "^1.10.0", + "multiformats": "^11.0.0" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, + "validator/node_modules/multiformats": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/multiformats/-/multiformats-11.0.1.tgz", + "integrity": "sha512-atWruyH34YiknSdL5yeIir00EDlJRpHzELYQxG7Iy29eCyL+VrZHpPrX5yqlik3jnuqpLpRKVZ0SGVb9UzKaSA==", + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + } + }, + "dependencies": { + "@apidevtools/json-schema-ref-parser": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.0.tgz", + "integrity": "sha512-teB30tFooE3iQs2HQIKJ02D8UZA1Xy1zaczzhUjJs0CymYxeC0g+y5rCY2p8NHBM6DBUVoR8rSM4kHLj1WE9mQ==", + "requires": { + "@jsdevtools/ono": "^7.1.3", + "@types/json-schema": "^7.0.6", + "call-me-maybe": "^1.0.1", + "js-yaml": "^4.1.0" + } + }, + "@assemblyscript/loader": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/@assemblyscript/loader/-/loader-0.9.4.tgz", + "integrity": "sha512-HazVq9zwTVwGmqdwYzu7WyQ6FQVZ7SwET0KKQuKm55jD0IfUpZgN0OPIiZG3zV1iSrVYcN0bdwLRXI/VNCYsUA==", + "dev": true + }, + "@aws-cdk/aws-apigatewayv2-alpha": { + "version": "2.50.0-alpha.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-apigatewayv2-alpha/-/aws-apigatewayv2-alpha-2.50.0-alpha.0.tgz", + "integrity": "sha512-dttWDqy+nTg/fD9y0egvj7/zdnOVEo0qyGsep1RV+p16R3F4ObMKyPVIg15fz57tK//Gp/i1QgXsZaSqbcWHOg==", + "requires": {} }, "@aws-crypto/crc32": { "version": "3.0.0", @@ -23968,6 +24577,12 @@ } } }, + "@eslint/js": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.35.0.tgz", + "integrity": "sha512-JXdzbRiWclLVoD8sNUjR443VVlYqiYmDVT6rGUEIEHU5YJW0gaVZwV2xgM7D4arkvASqD0IlLUVjHiFuxaftRw==", + "dev": true + }, "@gar/promisify": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", @@ -24016,6 +24631,12 @@ "minimatch": "^3.0.4" } }, + "@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true + }, "@humanwhocodes/object-schema": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", @@ -25795,6 +26416,15 @@ "dev": true, "optional": true }, + "builtins": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", + "integrity": "sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==", + "dev": true, + "requires": { + "semver": "^7.0.0" + } + }, "busboy": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", @@ -27255,6 +27885,51 @@ } } }, + "eslint-plugin-n": { + "version": "15.6.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.6.1.tgz", + "integrity": "sha512-R9xw9OtCRxxaxaszTQmQAlPgM+RdGjaL1akWuY/Fv9fRAi8Wj4CUKc6iYVG8QNRjRuo8/BqVYIpfqberJUEacA==", + "dev": true, + "requires": { + "builtins": "^5.0.1", + "eslint-plugin-es": "^4.1.0", + "eslint-utils": "^3.0.0", + "ignore": "^5.1.1", + "is-core-module": "^2.11.0", + "minimatch": "^3.1.2", + "resolve": "^1.22.1", + "semver": "^7.3.8" + }, + "dependencies": { + "eslint-plugin-es": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", + "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", + "dev": true, + "requires": { + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "dependencies": { + "eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + } + } + }, + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true + } + } + }, "eslint-plugin-node": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", @@ -27423,9 +28098,9 @@ "dev": true }, "esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, "requires": { "estraverse": "^5.1.0" @@ -27918,9 +28593,9 @@ } }, "globals": { - "version": "13.18.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.18.0.tgz", - "integrity": "sha512-/mR4KI8Ps2spmoc0Ulu9L7agOF0du1CZNQ3dke8yItYlyKNmGrkONemBbd6V8UTc1Wgcqn21t3WYB7dbRmh6/A==", + "version": "13.20.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", + "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", "dev": true, "requires": { "type-fest": "^0.20.2" @@ -27954,6 +28629,12 @@ "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", "dev": true }, + "grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, "graphql": { "version": "16.6.0", "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.6.0.tgz", @@ -28893,6 +29574,12 @@ "integrity": "sha512-YRv4Tk/Wlug8qicwqFNFVEZSdbROCHRAC6qu/i0dyNKr5JQdoa2pIGoS04lLO/jXQX7Z9omoNewYIVIxqZBd9Q==", "dev": true }, + "js-sdsl": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz", + "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==", + "dev": true + }, "js-string-escape": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz", @@ -30150,9 +30837,330 @@ "multiformats": "^9.6.5", "node-fetch": "^3.2.10", "pino": "8.8.0", - "sqs-consumer": "^5.7.0", - "standard": "^16.0.4", + "squiss-ts": "^4.4.1", + "standard": "^17.0.0", "testcontainers": "^8.10.1" + }, + "dependencies": { + "@eslint/eslintrc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.0.tgz", + "integrity": "sha512-fluIaaV+GyV24CCu/ggiHdV+j4RNh85yQnAYS/G2mZODZgGmmlrgCydjUcV3YvxCm9x8nMAfThsqTni4KiXT4A==", + "dev": true, + "requires": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.4.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + } + }, + "@humanwhocodes/config-array": { + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", + "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "dev": true, + "requires": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + } + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "eslint": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.35.0.tgz", + "integrity": "sha512-BxAf1fVL7w+JLRQhWl2pzGeSiGqbWumV4WNvc9Rhp6tiCtm4oHnyPBSEtMGZwrQgudFQ+otqzWoPB7x+hxoWsw==", + "dev": true, + "requires": { + "@eslint/eslintrc": "^2.0.0", + "@eslint/js": "8.35.0", + "@humanwhocodes/config-array": "^0.11.8", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.4.0", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-sdsl": "^4.1.4", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "regexpp": "^3.2.0", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0" + } + }, + "eslint-config-standard": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.0.0.tgz", + "integrity": "sha512-/2ks1GKyqSOkH7JFvXJicu0iMpoojkwB+f5Du/1SC0PtBL+s8v30k9njRZ21pm2drKYm2342jFnGWzttxPmZVg==", + "dev": true, + "requires": {} + }, + "eslint-config-standard-jsx": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-11.0.0.tgz", + "integrity": "sha512-+1EV/R0JxEK1L0NGolAr8Iktm3Rgotx3BKwgaX+eAuSX8D952LULKtjgZD3F+e6SvibONnhLwoTi9DPxN5LvvQ==", + "dev": true, + "requires": {} + }, + "eslint-plugin-promise": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", + "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", + "dev": true, + "requires": {} + }, + "eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "dev": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + } + }, + "espree": { + "version": "9.4.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz", + "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==", + "dev": true, + "requires": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "requires": { + "is-glob": "^4.0.3" + } + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "load-json-file": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-5.3.0.tgz", + "integrity": "sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.15", + "parse-json": "^4.0.0", + "pify": "^4.0.1", + "strip-bom": "^3.0.0", + "type-fest": "^0.3.0" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "requires": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + } + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "pkg-conf": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-3.1.0.tgz", + "integrity": "sha512-m0OTbR/5VPNPqO1ph6Fqbj7Hv6QU7gR/tQW40ZqrL1rjgCU85W6C1bJn0BItuJqnR98PWzw7Z8hHeChD1WrgdQ==", + "dev": true, + "requires": { + "find-up": "^3.0.0", + "load-json-file": "^5.2.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true + } + } + }, + "standard": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/standard/-/standard-17.0.0.tgz", + "integrity": "sha512-GlCM9nzbLUkr+TYR5I2WQoIah4wHA2lMauqbyPLV/oI5gJxqhHzhjl9EG2N0lr/nRqI3KCbCvm/W3smxvLaChA==", + "dev": true, + "requires": { + "eslint": "^8.13.0", + "eslint-config-standard": "17.0.0", + "eslint-config-standard-jsx": "^11.0.0", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-n": "^15.1.0", + "eslint-plugin-promise": "^6.0.0", + "eslint-plugin-react": "^7.28.0", + "standard-engine": "^15.0.0" + } + }, + "standard-engine": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/standard-engine/-/standard-engine-15.0.0.tgz", + "integrity": "sha512-4xwUhJNo1g/L2cleysUqUv7/btn7GEbYJvmgKrQ2vd/8pkTmN8cpqAZg+BT8Z1hNeEH787iWUdOpL8fmApLtxA==", + "dev": true, + "requires": { + "get-stdin": "^8.0.0", + "minimist": "^1.2.6", + "pkg-conf": "^3.1.0", + "xdg-basedir": "^4.0.0" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "type-fest": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.3.1.tgz", + "integrity": "sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==", + "dev": true + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true + } } }, "pickup-api": { diff --git a/package.json b/package.json index d5459be..0c6c276 100644 --- a/package.json +++ b/package.json @@ -35,8 +35,7 @@ ], "dependencies": { "@aws-cdk/aws-apigatewayv2-alpha": "^2.50.0-alpha.0", - "aws-cdk-lib": "2.50.0", - "squiss-ts": "^4.4.1" + "aws-cdk-lib": "2.50.0" }, "ts-standard": { "ignore": [ diff --git a/pickup/index.js b/pickup/index.js index ab5d381..3964ceb 100644 --- a/pickup/index.js +++ b/pickup/index.js @@ -2,9 +2,14 @@ import { createPickupFromEnv } from './lib/pickup' import { logger } from './lib/logger.js' async function start () { - logger.info({}, 'Pickup starting...') - const pickup = await createPickupFromEnv() - await pickup.start() + logger.info({}, 'Pickup starting...') + try { + const pickup = await createPickupFromEnv() + await pickup.start() + } catch (err) { + logger.error(err, 'Pickup ded!') + throw err + } } start() diff --git a/pickup/lib/consumer.js b/pickup/lib/consumer.js deleted file mode 100644 index e16e1c4..0000000 --- a/pickup/lib/consumer.js +++ /dev/null @@ -1,153 +0,0 @@ -import retry from 'async-retry' -import { Consumer } from 'sqs-consumer' -import { DynamoDBClient } from '@aws-sdk/client-dynamodb' - -import { createS3Uploader } from './s3.js' -import { testIpfsApi } from './ipfs.js' -import { pickupBatch } from './pickupBatch.js' -import { logger } from './logger.js' - -/** - * Delete a message from ths SQS queue - * - * @param {import('@aws-sdk/client-sqs'.SQSClient)} opts.sqs - The SQS client - * @param string queueUrl - The Sqs Queue URL - * @param {import('@aws-sdk/client-sqs'.Message)} opts.message - The SQS message - * @returns {Promise} - */ -export async function deleteMessage ({ sqs, queueUrl }, message) { - const deleteParams = { - QueueUrl: queueUrl, - ReceiptHandle: message.ReceiptHandle - } - try { - await sqs - .deleteMessage(deleteParams) - .promise() - } catch (err) { - logger.error({ err }, 'SQS delete message failed') - throw err - } -} - -/** - * Create the consumer for the SQS queue. - * - * @param {string} ipfsApiUrl - The URL of the IPFS server. - * @param {string} queueUrl - The Sqs Queue URL. - * @param {import('@aws-sdk/client-s3'.S3Client)} s3 - The S3 client. - * @param {number} batchSize - The size of the concurrent batch. - * @param {number} maxRetry - The number of max retry before set the pin request to failed. - * @param {number} visibilityTimeout - The message visibility timeout in seconds, used internally by sqs-consumer. - * @param {number} heartbeatInterval - The message heartbeatInterval in seconds, used internally by sqs-consumer. - * @param {number} handleMessageTimeout - The max limit for the car download in milliseconds, used internally by sqs-consumer. - * @param {number} testMaxRetry - The max retry to check if the IPFS server is available. - * @param {number} testTimeoutMs - The timeout in millisecond for each IPFS availability try. - * @param {number} timeoutFetchMs - The timeout for each fetch in milliseconds. The Download is set to `failed` if the IPFS server - * fetch action do not respond while is downloading the blocks. - * @param {string} dynamoTable - The dynamo DB table - * @param {string} dynamoEndpoint - The dynamo DB endpoint - * @param {string} validationBucket - The s3 bucket for the validation, if exists replace the save bucket - * @param {DownloadStatusManager} downloadStatusManager - * @param {Number} downloadStatusLoggerSeconds - The interval in seconds for the download state - * @returns {Promise} - */ -export async function createConsumer ({ - ipfsApiUrl, - queueUrl, - s3, - batchSize = 10, - maxRetry = 5, - visibilityTimeout = 20, - heartbeatInterval = 10, - handleMessageTimeout = 4 * 60 * 60 * 1000, - testMaxRetry = 5, - testTimeoutMs = 10000, - timeoutFetchMs = 30000, - dynamoTable, - dynamoEndpoint, - validationBucket, - downloadStatusManager, - downloadStatusLoggerSeconds = 300 // logs every 5 minutes -}) { - // throws if can't connect - await retry(() => { - return testIpfsApi(ipfsApiUrl, testTimeoutMs) - }, { retries: testMaxRetry }) - - const dynamo = new DynamoDBClient({ endpoint: dynamoEndpoint }) - - logger.info({ - batchSize, - visibilityTimeout, - heartbeatInterval, - queueUrl, - handleMessageTimeout, - maxRetry, - timeoutFetchMs, - validationBucket - }, 'Create sqs consumer') - - const app = Consumer.create({ - queueUrl, - // The message deletion is managed manually - shouldDeleteMessages: false, - // needs partial acks before we can increase batch size - // see: https://github.com/bbc/sqs-consumer/pull/255 - batchSize, // 1 to 10 - visibilityTimeout, // seconds, how long to hide message from queue after reading. - heartbeatInterval, // seconds, must be lower than `visibilityTimeout`. how long before increasing the `visibilityTimeout` - attributeNames: ['ApproximateReceiveCount'], // log retries - // allow 4hrs before timeout. 2/3rs of the world can upload faster than - // 20Mbit/s (fixed broadband), at which 32GiB would transfer in 3.5hrs. - // we can make this more or less generous, but note it ties up a worker. - // see: https://www.speedtest.net/global-index - // see: https://www.omnicalculator.com/other/download-time?c=GBP&v=fileSize:32!gigabyte,downloadSpeed:5!megabit - // TODO: enforce 32GiB limit - handleMessageTimeout, // ms, error if processing takes longer than this. - handleMessageBatch: async (messages) => { - return pickupBatch(messages, { - ipfsApiUrl, - createS3Uploader, - s3, - queueManager: app, - dynamo, - dynamoTable, - validationBucket, - timeoutFetchMs, - maxRetry, - downloadStatusManager - }) - } - }) - - const downloadStatusLoggerInterval = setInterval(() => { - const status = downloadStatusManager.getStatus() - if (Object.keys(status).length) { - logger.info(status, 'DownloadStatus') - } - }, downloadStatusLoggerSeconds * 1000) - - downloadStatusLoggerInterval.unref() - - app.on('stopped', () => { - clearInterval(downloadStatusLoggerInterval) - }) - - app.on('error', (err) => { - if ( - (err.code === 'MissingParameter' || err.constructor.name === 'MissingParameter') && - err.message.includes('Error changing visibility timeout: The request must contain the parameter ChangeMessageVisibilityBatchRequestEntry')) { - logger.trace({ err }, 'The sqs-library is trying to change the visibility of the timeout of an empty message list') - return - } - - logger.error({ err }, 'App error') - }) - - app.on('processing_error', (err) => { - logger.error({ err }, 'App processing error') - }) - - return app -} diff --git a/pickup/lib/downloadStatusManager.js b/pickup/lib/downloadStatusManager.js deleted file mode 100644 index 44f64c4..0000000 --- a/pickup/lib/downloadStatusManager.js +++ /dev/null @@ -1,33 +0,0 @@ -export const STATE_QUEUED = 'queued' -export const STATE_DOWNLOADING = 'downloading' -export const STATE_FAILED = 'failed' -export const STATE_TIMEOUT = 'timeout' -export const STATE_DONE = 'done' - -export class DownloadStatusManager { - constructor () { - this.status = {} - } - - reset () { - this.status = {} - } - - setStatus (cid, state, size) { - this.status[cid] = { ...this.status[cid], state } - - if (size !== undefined) { - this.status[cid] = { ...this.status[cid], size } - } - } - - getStatus () { - return this.status - } - - isRunning () { - return Object.keys(this.status).length > 0 && !!Object.values(this.status).find(s => { - return !['done', 'failed', 'timeout'].includes(s.state) - }) - } -} diff --git a/pickup/lib/dynamo.js b/pickup/lib/dynamo.js deleted file mode 100644 index 483efac..0000000 --- a/pickup/lib/dynamo.js +++ /dev/null @@ -1,43 +0,0 @@ -import { UpdateCommand } from '@aws-sdk/lib-dynamodb' -import { logger } from './logger.js' - -/** - * Update the pin status for a given CID - * - * @param {import('@aws-sdk/lib-dynamodb'.DynamoDBClient)} dynamo - * @param {string} table - * @param {cid} string - * @param {string} status - * @param {string} error - */ -export async function updatePinStatus ({ dynamo, table, cid, status, error }) { - try { - logger.trace({ cid, status }, 'Dynamo try to update pin status') - - const command = { - TableName: table, - Key: { cid }, - ExpressionAttributeNames: { - '#status': 'status', - '#error': 'error', - '#downloadFailedAt': 'downloadFailedAt' - }, - ExpressionAttributeValues: { - ':s': status, - ':e': error || '', - ':df': new Date().toISOString() - }, - UpdateExpression: 'set #status = :s, #error = :e, #downloadFailedAt = :df', - ReturnValues: 'ALL_NEW' - } - - logger.trace({ cid, command }, 'Dynamo command') - const res = await dynamo.send(new UpdateCommand(command)) - - logger.trace({ res, cid, status }, 'Dynamo pin status updated') - return res.Attributes - } catch (err) { - logger.error({ cid, status }, 'Dynamo pin status error') - throw err - } -} diff --git a/pickup/lib/ipfs.js b/pickup/lib/ipfs.js index d77e74c..4cdf5ab 100644 --- a/pickup/lib/ipfs.js +++ b/pickup/lib/ipfs.js @@ -1,4 +1,4 @@ -import { compose, Readable } from 'node:stream' +import { compose } from 'node:stream' import { CID } from 'multiformats/cid' import { Multiaddr } from 'multiaddr' import debounce from 'debounce' @@ -7,6 +7,8 @@ import { logger } from './logger.js' export const ERROR_TIMEOUT = 'TIMEOUT' +/** @typedef {import('node:stream').Readable} Readable */ + /** * Start the fetch of a car * @@ -16,7 +18,7 @@ export const ERROR_TIMEOUT = 'TIMEOUT' * @param {number} timeoutMs - The timeout for each block fetch in milliseconds. * @returns {Promise} */ -export async function fetchCar ({cid, ipfsApiUrl, abortCtl = new AbortController(), timeoutMs = 30000}) { +export async function fetchCar ({ cid, ipfsApiUrl, abortCtl = new AbortController(), timeoutMs = 30000 }) { if (!isCID(cid)) { throw new Error({ message: `Invalid CID: ${cid}` }) } @@ -24,7 +26,7 @@ export async function fetchCar ({cid, ipfsApiUrl, abortCtl = new AbortController const startCountdown = debounce(() => abortCtl.abort(), timeoutMs) startCountdown() - + const signal = abortCtl.signal const res = await fetch(url, { method: 'POST', signal }) if (!res.ok) { @@ -39,7 +41,7 @@ export async function fetchCar ({cid, ipfsApiUrl, abortCtl = new AbortController startCountdown.clear() } - return compose(res.body, restartCountdown, { signal }) + return compose(res.body, restartCountdown) } /** @@ -146,8 +148,8 @@ export async function testIpfsApi (ipfsApiUrl, timeoutMs = 10000) { export class CarFetcher { /** * @param {object} config - * @param {string} ipfsApiUrl - * @param {number} fetchTimeoutMs + * @param {string} config.ipfsApiUrl + * @param {number} config.fetchTimeoutMs */ constructor ({ ipfsApiUrl, fetchTimeoutMs = 60000 }) { this.ipfsApiUrl = ipfsApiUrl @@ -158,7 +160,7 @@ export class CarFetcher { * @param {object} config * @param {string} config.cid * @param {string[]} config.origins - * @param {(Readable) => Promise} config.upload + * @param {(body: Readable) => Promise} config.upload */ async fetch ({ cid, origins, upload }) { const { ipfsApiUrl, fetchTimeoutMs } = this diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js index 523a96f..d9d21bf 100644 --- a/pickup/lib/pickup.js +++ b/pickup/lib/pickup.js @@ -1,11 +1,11 @@ -import { Squiss, Message } from 'squiss-ts' +import { Squiss } from 'squiss-ts' import { CarFetcher, testIpfsApi } from './ipfs.js' import { S3Uploader } from './s3.js' import { logger } from './logger.js' /** * Use me in prod to set all the things. - * + * * @param {Record} env */ export async function createPickupFromEnv (env = process.env) { @@ -16,22 +16,21 @@ export async function createPickupFromEnv (env = process.env) { TIMEOUT_FETCH, VALIDATION_BUCKET } = env - + if (!IPFS_API_URL) throw new Error('IPFS_API_URL not found in ENV') if (!SQS_QUEUE_URL) throw new Error('SQS_QUEUE_URL not found in ENV') if (!VALIDATION_BUCKET) throw new Error('VALIDATION_BUCKET not found in ENV') const pickup = await createPickup({ - queuePoller: createSqsPoller({ + sqsPoller: createSqsPoller({ queueUrl: SQS_QUEUE_URL, - maxInFlight: BATCH_SIZE + maxInFlight: BATCH_SIZE }), carFetcher: new CarFetcher({ ipfsApiUrl: IPFS_API_URL, fetchTimeoutMs: TIMEOUT_FETCH }), s3Uploader: new S3Uploader({ - s3: new S3Client(), bucket: VALIDATION_BUCKET }) }) @@ -41,35 +40,36 @@ export async function createPickupFromEnv (env = process.env) { /** * @param {object} config - * @param {Squiss} config.queuePoller + * @param {Squiss} config.sqsPoller * @param {CarFetcher} config.carFetcher * @param {S3Uploader} config.s3Uploader */ -export async function createPickup ({ queuePoller, carFetcher, s3Uploader }) { +export async function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { + // throw if we can't connect to kubo await testIpfsApi(carFetcher.ipfsApiUrl) /** - * @param {Message} msg + * @param {import('squiss-ts').Message} msg */ async function messageHandler (msg) { const { cid, origins, key } = msg.body try { logger.info({ cid }, 'Fetching car') - const uploader = s3Uploader.createUploader({ cid, key }) - await carFetcher.fetch({ cid, origins, uploader }) + const upload = s3Uploader.createUploader({ cid, key }) + await carFetcher.fetch({ cid, origins, upload }) logger.info({ cid }, 'OK. Car in S3') msg.del() // the message is handled, remove it from queue. - } catch (error) { - logger.info({ cid, error }, `Error ${ err.message || err }`) + } catch (err) { + logger.error({ cid, err }, 'Failed to fetch CAR') // return the msg to the queue for another go msg.release() } } - - queuePoller.on('message', messageHandler) - - return queuePoller - } + + sqsPoller.on('message', messageHandler) + + return sqsPoller +} /** * @param {import('squiss-ts').ISquissOptions} config @@ -78,8 +78,16 @@ export function createSqsPoller (config) { return new Squiss({ // set our default overrides here, we always want these. autoExtendTimeout: true, + receiveSqsAttributes: ['ApproximateReceiveCount'], + + // allow 4hrs before timeout. 2/3rs of the world can upload faster than + // 20Mbit/s (fixed broadband), at which 32GiB would transfer in 3.5hrs. + // we can make this more or less generous, but note it ties up a worker. + // see: https://www.speedtest.net/global-index + // see: https://www.omnicalculator.com/other/download-time?c=GBP&v=fileSize:32!gigabyte,downloadSpeed:5!megabit + // TODO: enforce 32GiB limit + noExtensionsAfterSecs: 4 * 60 * 60, bodyFormat: 'json', ...config }) } - diff --git a/pickup/lib/pickupBatch.js b/pickup/lib/pickupBatch.js deleted file mode 100644 index 506d911..0000000 --- a/pickup/lib/pickupBatch.js +++ /dev/null @@ -1,148 +0,0 @@ -import { fetchCar, connectTo, disconnect, waitForGC, ERROR_TIMEOUT } from './ipfs.js' -import { deleteMessage } from './consumer.js' -import { updatePinStatus } from './dynamo.js' -import { logger } from './logger.js' -import { STATE_DONE, STATE_TIMEOUT, STATE_FAILED, STATE_QUEUED } from './downloadStatusManager.js' - -/** - * Fetch CARs for a batch of SQS messages. - * @param {import('sqs-consumer').SQSMessage[]} messages - * @param {string} ipfsApiUrl - * @param {Function} createS3Uploader - * @param {import('@aws-sdk/client-s3'.S3Client)} s3 - * @param {Consumer} queueManager - * @param {import('@aws-sdk/lib-dynamodb'.DynamoDBClient)} dynamo - * @param {string} dynamoTable - * @param {string} validationBucket - * @param {number} timeoutFetchMs - * @param {number} maxRetry - * @param {DownloadStatusManager} downloadStatusManager - * @returns {Promise} - */ -export async function pickupBatch (messages, { - ipfsApiUrl, - createS3Uploader, - s3, - queueManager, - dynamo, - dynamoTable, - validationBucket, - timeoutFetchMs, - maxRetry, - downloadStatusManager -}) { - const jobs = [] - const allOrigins = [] - - logger.info({ messages }, 'Pickup batch start') - - const requestIds = [] - - for (const message of messages) { - const { cid, origins, bucket, key, requestid } = JSON.parse(message.Body) - logger.trace({ cid, requestid }, 'Push message in job list') - jobs.push({ message, requestid, cid, upload: createS3Uploader({ bucket: validationBucket ?? bucket, key, client: s3 }) }) - allOrigins.concat(origins) - requestIds.push(requestid) - } - - // Prepare! - logger.trace({ allOrigins, ipfsApiUrl }, 'Wait for GC and connect to origins') - await Promise.all([ - waitForGC(ipfsApiUrl), - connectTo(allOrigins, ipfsApiUrl) - ]) - - logger.info(`Ready to process ${jobs.length} jobs`) - - // Stores the totalMessages because the `messages`array will be modified in the process - const totalMessages = messages.length - // Stores the requestIds to properly remove the items from the `messages` array - - // Collect the results, just for logging purpose - const resultStats = {} - - // Do! - const res = await Promise.allSettled(jobs.map(async job => { - const { message, cid, upload, requestid } = job - logger.info({ cid, requestid, messageId: message.MessageId }, 'Start job') - - downloadStatusManager.setStatus(cid, STATE_QUEUED) - // Inject a downloadError object to the `upload` function, is required to intercept the timeout error - // because the `abort` action do not allow to pass a code to the call - const downloadError = {} - - try { - const body = await fetchCar(cid, ipfsApiUrl, downloadError, timeoutFetchMs, downloadStatusManager) - logger.info({ cid, requestid, messageId: message.MessageId }, 'IPFS node responded, downloading the car') - await upload({ body, cid, downloadError }) - logger.info({ cid, requestid, messageId: message.MessageId }, 'Car downloaded and stored in S3') - - // After the download some low level action are required to override the `sqs-consumer` library limit. - // The library works with the full batch and the is required to remove the messages while they are processed. - // The processed message is removed from the queue list to avoid further changeVisabilityTimeout. - const arrayRemoveIndex = requestIds.indexOf(requestid) - requestIds.splice(Number(arrayRemoveIndex), 1) - messages.splice(Number(arrayRemoveIndex), 1) - logger.trace({ - cid, - requestid, - messageId: message.MessageId, - arrayRemoveIndex - }, 'Removed processed message from the messages') - - await deleteMessage(queueManager, message) - downloadStatusManager.setStatus(cid, STATE_DONE) - resultStats[cid] = 'success' - } catch (err) { - // The processed message is removed from the queue list to avoid further changeVisabilityTimeout. - const arrayRemoveIndex = requestIds.indexOf(requestid) - requestIds.splice(Number(arrayRemoveIndex), 1) - messages.splice(Number(arrayRemoveIndex), 1) - - const currentRetry = Number(message.Attributes.ApproximateReceiveCount) - if (downloadError.code === ERROR_TIMEOUT || - currentRetry >= maxRetry - ) { - const errorMessage = currentRetry >= maxRetry ? 'Max retry' : 'Download timeout' - logger.error({ cid, requestid, currentRetry, messageId: message.MessageI, arrayRemoveIndex }, - errorMessage) - // Update the status on dynamodb to failed - await updatePinStatus({ - dynamo, - table: dynamoTable, - cid, - status: 'failed', - error: errorMessage - }) - - // Delete the message from the queue - await deleteMessage(queueManager, message) - resultStats[cid] = 'timeout' - downloadStatusManager.setStatus(cid, STATE_TIMEOUT) - } else { - // For any other error the message from the queue is not removed, - // then when the visibility timeout is expired the file is resend on the queue - // A deadLetterQueue should be set to avoid infinite retry. - logger.error({ err, cid, requestid, messageId: message.MessageI, arrayRemoveIndex }, 'Download error') - resultStats[cid] = `fail: ${err.message}` - downloadStatusManager.setStatus(cid, STATE_FAILED) - } - throw err - } finally { - // The message_processed event is fired. - queueManager.emit('message_processed', message) - } - return message // hand back msg so we can ack all that succeded - })) - - // Clear the origins! - await disconnect(allOrigins, ipfsApiUrl) - - const ok = res.filter(r => r.status === 'fulfilled').map(r => r.value) - logger.info({ resultStats, success: ok.length, total: totalMessages }, 'Done processing batch.') - - logger.info(downloadStatusManager.getStatus()) - downloadStatusManager.reset() - return ok -} diff --git a/pickup/lib/s3.js b/pickup/lib/s3.js index 78a772e..3ed6146 100644 --- a/pickup/lib/s3.js +++ b/pickup/lib/s3.js @@ -8,24 +8,25 @@ import { logger } from './logger.js' * @param {import('@aws-sdk/client-s3'.S3Client)} client * @param {string} bucket * @param {string} key - * @param {Stream} body + * @param {Readable} body * @param {string} cid * @returns {Promise} */ export async function sendToS3 ({ client, bucket, key, body, cid }) { - const params = { - Metadata: { structure: 'complete' }, - Bucket: bucket, - Key: key, - Body: body - } - // Handles s3 multipart uploading // see: https://github.com/aws/aws-sdk-js-v3/blob/main/lib/lib-storage/README.md - const s3Upload = new Upload({ client, params }) + const s3Upload = new Upload({ + client, + params: { + Metadata: { structure: 'complete' }, + Bucket: bucket, + Key: key, + Body: body + } + }) body.on('error', (error) => { - if (err.code === 'AbortError' || err.constructor.name === 'AbortError') { + if (error.code === 'AbortError' || error.constructor.name === 'AbortError') { logger.trace({ error, cid }, 'The abort command was thrown by a ipfs timeout') return } @@ -63,7 +64,7 @@ export class S3Uploader { * @param {S3Client} s3 * @param {string} bucket */ - constructor ({ s3, bucket }) { + constructor ({ s3 = new S3Client(), bucket }) { this.s3 = s3 this.bucket = bucket } @@ -75,6 +76,10 @@ export class S3Uploader { */ createUploader ({ cid, key }) { const { s3, bucket } = this + /** + * @typedef {import('node:stream').Readable} Readable + * @param {Readable} body + */ return async function (body) { return sendToS3({ client: s3, diff --git a/pickup/package.json b/pickup/package.json index 51a5b12..35fa3d2 100644 --- a/pickup/package.json +++ b/pickup/package.json @@ -24,17 +24,17 @@ "multiformats": "^9.6.5", "node-fetch": "^3.2.10", "pino": "8.8.0", - "sqs-consumer": "^5.7.0" + "squiss-ts": "^4.4.1" }, "devDependencies": { "@aws-sdk/client-sqs": "^3.118.1", "@web-std/blob": "^3.0.4", "ava": "^4.3.0", "ipfs-car": "^0.7.0", - "standard": "^16.0.4", + "standard": "^17.0.0", "testcontainers": "^8.10.1" }, "engines": { "node": ">=16.14" } -} +} \ No newline at end of file diff --git a/pickup/test/_helpers.js b/pickup/test/_helpers.js index 3e857b4..e4157d5 100644 --- a/pickup/test/_helpers.js +++ b/pickup/test/_helpers.js @@ -81,21 +81,11 @@ export async function sleep (ms) { return new Promise((resolve) => setTimeout(() => resolve(), ms)) } -export async function verifyMessage ({ msg, cars, dynamoClient, dynamoTable, t, bucket, s3, expectedError }) { +export async function verifyMessage ({ msg, cars, t, bucket, s3 }) { try { - const message = JSON.parse(msg.Body) + const message = msg.body const index = Number(message.requestid) - // If there is a timeout, the dynamo item status should be updated to `failed` - if (cars[index].expectedResult === 'failed') { - const item = await getValueFromDynamo({ dynamoClient, dynamoTable, cid: cars[index].cid }) - t.is(item.cid, cars[index].cid) - t.is(item.status, 'failed') - t.is(item.error, expectedError) - t.truthy(item.downloadFailedAt > item.created) - } else if (cars[index].expectedResult === 'error') { - // after the first error, the expectedResult of the car is set to success to allow the upload in the next step - cars[index].expectedResult = 'success' - } else { + if (cars[index].expectedResult === 'success') { // If succeed, the s3 file should have the same content of the car generated const { cid: msgCid } = message t.is(msgCid, cars[index].cid) @@ -112,7 +102,7 @@ export async function verifyMessage ({ msg, cars, dynamoClient, dynamoTable, t, } } -export async function stopConsumer (consumer) { +export async function stopPickup (consumer) { consumer.stop() return await retry(() => !consumer.isRunning, { retries: 5 diff --git a/pickup/test/consumer.test.js b/pickup/test/consumer.test.js deleted file mode 100644 index 961686b..0000000 --- a/pickup/test/consumer.test.js +++ /dev/null @@ -1,85 +0,0 @@ -import { SendMessageCommand } from '@aws-sdk/client-sqs' -import { Consumer } from 'sqs-consumer' -import { createConsumer } from '../lib/consumer.js' -import { compose } from './_compose.js' -import test from 'ava' -import { DownloadStatusManager } from '../lib/downloadStatusManager.js' - -test.before(async t => { - t.timeout(1000 * 60) - t.context = await compose() -}) - -test.after(async t => { - await t.context.shutDownDockers() -}) - -// verify the lib behaves as expected -test('sqs-consumer', async t => { - const testCid = 'hello!' - const { sqs, createQueue } = t.context - - const QueueUrl = await createQueue() - await sqs.send(new SendMessageCommand({ - DelaySeconds: 1, - MessageBody: JSON.stringify({ cid: testCid }), - QueueUrl - })) - - await new Promise((resolve, reject) => { - const app = Consumer.create({ - queueUrl: QueueUrl, - handleMessage: async (message) => { - const res = JSON.parse(message.Body) - t.is(res.cid, testCid) - app.stop() - resolve(true) - } - }) - app.on('error', (err) => { - reject(err) - }) - app.on('processing_error', (err) => { - reject(err) - }) - app.on('timeout_error', (err) => { - reject(err) - }) - app.start() - }) -}) - -test('createConsumer', async t => { - t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3 } = t.context - - const cid = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - const key = `psa/${cid}.car` - const queueUrl = await createQueue() - const bucket = await createBucket() - const consumer = await createConsumer({ ipfsApiUrl, queueUrl, s3, downloadStatusManager: new DownloadStatusManager() }) - const done = new Promise((resolve, reject) => { - consumer.on('message_processed', msg => { - const { cid: msgCid } = JSON.parse(msg.Body) - t.is(msgCid, cid) - resolve() - }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) - }) - consumer.start() - - await sqs.send(new SendMessageCommand({ - DelaySeconds: 1, - MessageBody: JSON.stringify({ cid, bucket, key, origins: [], requestid: 'test1' }), - QueueUrl: queueUrl - })) - - return done -}) - -test('createConsumer errors if can\'t connect to IPFS', async t => { - const { createQueue } = t.context - const queueUrl = await createQueue() - await t.throwsAsync(createConsumer({ ipfsApiUrl: 'http://127.0.0.1', queueUrl, downloadStatusManager: new DownloadStatusManager() })) -}) diff --git a/pickup/test/pickup.test.js b/pickup/test/pickup.test.js index 4ed75eb..9d61f7c 100644 --- a/pickup/test/pickup.test.js +++ b/pickup/test/pickup.test.js @@ -1,11 +1,11 @@ import nock from 'nock' import test from 'ava' import { SendMessageCommand } from '@aws-sdk/client-sqs' - -import { createPickup } from '../lib/pickup.js' +import { createPickup, createSqsPoller } from '../lib/pickup.js' import { compose } from './_compose.js' -import { prepareCid, verifyMessage, sleep, getMessagesFromSQS, stopConsumer, getValueFromDynamo } from './_helpers.js' -import { DownloadStatusManager, STATE_DONE, STATE_QUEUED, STATE_DOWNLOADING } from '../lib/downloadStatusManager.js' +import { prepareCid, verifyMessage, sleep, getMessagesFromSQS, stopPickup } from './_helpers.js' +import { CarFetcher } from '../lib/ipfs.js' +import { S3Uploader } from '../lib/s3.js' test.before(async t => { t.timeout(1000 * 60) @@ -18,20 +18,14 @@ test.after(async t => { }) test('throw an error if can\'t connect to IPFS', async t => { - const { createQueue } = t.context - const queueUrl = await createQueue() - await t.throwsAsync(createConsumer({ - ipfsApiUrl: 'http://127.0.0.1', - queueUrl, - testMaxRetry: 1, - testTimeoutMs: 50, - downloadStatusManager: new DownloadStatusManager() + await t.throwsAsync(createPickup({ + carFetcher: new CarFetcher({ ipfsApiUrl: 'http://127.0.0.1' }) })) }) test('Process 3 messages concurrently and the last has a timeout', async t => { t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoEndpoint, dynamoTable } = t.context + const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() @@ -50,7 +44,7 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { .post('/api/v0/id')// Alive .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') + .reply(200, 'GC Success').persist() cars.forEach((car, index) => { nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin @@ -67,28 +61,18 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { })) } - // Create the consumer - const consumer = await createConsumer( - { - ipfsApiUrl, - queueUrl, - s3, - heartbeatInterval: 2, - visibilityTimeout: 3, - dynamoEndpoint, - dynamoTable, - timeoutFetchMs: 2000, - downloadStatusManager: new DownloadStatusManager(), - validationBucket - } - ) + const pickup = await createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 2000 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) + }) // The number of the messages resolved, when is max close the test and finalize let resolved = 0 const done = new Promise((resolve, reject) => { - consumer.on('message_received', async msg => { - const message = JSON.parse(msg.Body) + pickup.on('message', async msg => { + const message = msg.body const index = Number(message.requestid) if (cars[index].expectedResult !== 'error') { const myBuffer = await cars[index].car.arrayBuffer() @@ -100,9 +84,9 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { } }) - consumer.on('message_processed', async msg => { + pickup.on('handled', async msg => { try { - await verifyMessage({ msg, cars, dynamoClient, dynamoTable, t, bucket: validationBucket, s3, expectedError: 'Download timeout' }) + await verifyMessage({ msg, cars, t, bucket: validationBucket, s3 }) resolved++ if (resolved === cars.length) { @@ -110,32 +94,32 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) t.is(resultMessages, undefined) nockPickup.done() - await stopConsumer(consumer) + await stopPickup(pickup) resolve() } } catch (e) { reject(e) } }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) + pickup.on('error', reject) + pickup.on('timeoutReached', reject) }) - consumer.start() + pickup.start() return done }) test('Process 1 message that fails and returns in the list', async t => { t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoEndpoint, dynamoTable } = t.context + const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() // Preapre the data for the test const cars = [ - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'error' }) + await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }) ] // Configure nock to mock the response @@ -163,278 +147,51 @@ test('Process 1 message that fails and returns in the list', async t => { })) } - // Create the consumer - const consumer = await createConsumer( - { - ipfsApiUrl, - queueUrl, - s3, - heartbeatInterval: 2, - visibilityTimeout: 3, - dynamoEndpoint, - dynamoTable, - timeoutFetchMs: 2000, - downloadStatusManager: new DownloadStatusManager() - } - ) - - // The number of the messages resolved, when is max close the test and finalize - let resolved = 0 - - const done = new Promise((resolve, reject) => { - consumer.on('message_received', async msg => { - const message = JSON.parse(msg.Body) - const index = Number(message.requestid) - if (cars[index].expectedResult !== 'error') { - const myBuffer = await cars[index].car.arrayBuffer() - - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(0, 10))) - await sleep(cars[index].timeBetweenChunks) - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(10))) - cars[index].carReadableStream.push(null) - } - }) - - consumer.on('message_processed', async msg => { - try { - await verifyMessage({ msg, cars, dynamoClient, dynamoTable, t, bucket, s3 }) - resolved++ - - // The +1 is add to manage the second try - if (resolved === cars.length + 1) { - await sleep(5) - const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) - t.is(resultMessages, undefined) - - nockPickup.done() - await stopConsumer(consumer) - resolve() - } - } catch (e) { - reject(e) - } - }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) - }) - - consumer.start() - - return done -}) - -test('Process 3 messages concurrently and the last has an error', async t => { - t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoEndpoint, dynamoTable } = t.context - - const queueUrl = await createQueue() - const bucket = await createBucket() - - // Preapre the data for the test - const cars = [ - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'error' }) - ] - - // Configure nock to mock the response - const nockPickup = nock(ipfsApiUrl) - nockPickup - .post('/api/v0/id')// Alive - .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) - .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') - .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') - - cars.forEach((car, index) => { - if (car.expectedResult === 'error') { - nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin - .reply(400, () => { - return 'OK' - }) - } - nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin - .reply(200, () => { - return cars[index].carReadableStream - }) + const pickup = await createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 5000 }), + s3Uploader: new S3Uploader({ s3, bucket }) }) - // Send the SQS messages in queue - for (let i = 0; i < cars.length; i++) { - await sqs.send(new SendMessageCommand({ - MessageBody: JSON.stringify({ cid: cars[i].cid, bucket, key: cars[i].key, origins: [], requestid: i }), - QueueUrl: queueUrl - })) - } - - const downloadStatusManager = new DownloadStatusManager() - // Create the consumer - const consumer = await createConsumer( - { - ipfsApiUrl, - queueUrl, - s3, - heartbeatInterval: 2, - visibilityTimeout: 3, - dynamoEndpoint, - dynamoTable, - timeoutFetchMs: 2000, - downloadStatusManager, - downloadStatusLoggerSeconds: 1 - } - ) - // The number of the messages resolved, when is max close the test and finalize - let resolved = 0 + let rx = 0 const done = new Promise((resolve, reject) => { - consumer.on('message_received', async msg => { - const message = JSON.parse(msg.Body) + pickup.on('message', async msg => { + rx++ + if (rx !== 2) return + const message = msg.body const index = Number(message.requestid) if (cars[index].expectedResult !== 'error') { const myBuffer = await cars[index].car.arrayBuffer() - await sleep(cars[index].timeBetweenChunks) - t.deepEqual(downloadStatusManager.getStatus()[cars[index].cid], { - state: STATE_QUEUED - }) cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(0, 10))) await sleep(cars[index].timeBetweenChunks) - t.deepEqual(downloadStatusManager.getStatus()[cars[index].cid], { - size: 10, - state: STATE_DOWNLOADING - }) cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(10))) - await sleep(cars[index].timeBetweenChunks) - t.truthy(downloadStatusManager.getStatus()[cars[index].cid].size > 100) - t.truthy(downloadStatusManager.getStatus()[cars[index].cid].state, STATE_DOWNLOADING) cars[index].carReadableStream.push(null) - t.truthy(downloadStatusManager.getStatus()[cars[index].cid].state, STATE_DONE) } }) - consumer.on('message_processed', async msg => { + pickup.on('deleted', async res => { + const msg = res.msg try { await verifyMessage({ msg, cars, dynamoClient, dynamoTable, t, bucket, s3 }) - resolved++ - - if (resolved === cars.length + 1) { - await sleep(2000) - const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) - t.is(resultMessages, undefined) - await nockPickup.done() - t.falsy(Object.keys(downloadStatusManager.getStatus()).length) - await stopConsumer(consumer) - resolve() - } - } catch (e) { - reject(e) - } - }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) - }) - - consumer.start() - - return done -}) - -test('Process 1 message that fails with a max retry', async t => { - t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoEndpoint, dynamoTable } = t.context - - const queueUrl = await createQueue() - const bucket = await createBucket() - - // Preapre the data for the test - const cars = [ - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'error' }) - ] - - const retryNum = 3 - // Configure nock to mock the response - const nockPickup = nock(ipfsApiUrl) - nockPickup - .post('/api/v0/id')// Alive - .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) - for (let i = 0; i < retryNum; i++) { - nockPickup.post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') - nockPickup.post(`/api/v0/dag/export?arg=${cars[0].cid}`) // Get pin - .reply((uri, requestBody) => [400, 'KO']) - } - - // Send the SQS messages in queue - for (let i = 0; i < cars.length; i++) { - await sqs.send(new SendMessageCommand({ - MessageBody: JSON.stringify({ cid: cars[i].cid, bucket, key: cars[i].key, origins: [], requestid: i }), - QueueUrl: queueUrl - })) - } + await sleep(5) + const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) + t.is(resultMessages, undefined) - // Create the consumer - const consumer = await createConsumer( - { - ipfsApiUrl, - queueUrl, - s3, - heartbeatInterval: 2, - visibilityTimeout: 3, - dynamoEndpoint, - dynamoTable, - timeoutFetchMs: 2000, - maxRetry: 3, - downloadStatusManager: new DownloadStatusManager() - } - ) - - // The number of the messages resolved, when is max close the test and finalize - let resolved = 0 - - let shouldFail = false - const done = new Promise((resolve, reject) => { - consumer.on('message_received', async msg => { - if (Number(msg.Attributes.ApproximateReceiveCount) > retryNum) { - shouldFail = true - } - }) - - consumer.on('message_processed', async msg => { - try { - resolved++ - - // The +1 is add to manage the second try - if (resolved === retryNum) { - await sleep(50) - const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) - t.is(resultMessages, undefined) - - const item = await getValueFromDynamo({ dynamoClient, dynamoTable, cid: cars[0].cid }) - t.is(item.cid, cars[0].cid) - t.is(item.status, 'failed') - t.is(item.error, 'Max retry') - t.truthy(item.downloadFailedAt > item.created) - - await sleep(4000) - nockPickup.done() - await stopConsumer(consumer) - if (shouldFail) { - reject(new Error('Message not set to failed after max retry')) - } else { - resolve() - } - } + nockPickup.done() + await stopPickup(pickup) + resolve() } catch (e) { reject(e) } }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) + pickup.on('error', reject) + pickup.on('timeoutReached', reject) }) - consumer.start() + pickup.start() return done }) From 7bc7d913d91b0e21d21d35078b7679128c8f5eb4 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Thu, 9 Mar 2023 13:48:32 +0000 Subject: [PATCH 03/14] chore: docs License: MIT Signed-off-by: Oli Evans --- pickup/lib/pickup.js | 1 - 1 file changed, 1 deletion(-) diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js index d9d21bf..2a82889 100644 --- a/pickup/lib/pickup.js +++ b/pickup/lib/pickup.js @@ -82,7 +82,6 @@ export function createSqsPoller (config) { // allow 4hrs before timeout. 2/3rs of the world can upload faster than // 20Mbit/s (fixed broadband), at which 32GiB would transfer in 3.5hrs. - // we can make this more or less generous, but note it ties up a worker. // see: https://www.speedtest.net/global-index // see: https://www.omnicalculator.com/other/download-time?c=GBP&v=fileSize:32!gigabyte,downloadSpeed:5!megabit // TODO: enforce 32GiB limit From 4342ef830f7156ce46f80ea2af625cf4d7df5d41 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Thu, 9 Mar 2023 14:54:11 +0000 Subject: [PATCH 04/14] chore: fix init order and docker test test the api is available when start is called. License: MIT Signed-off-by: Oli Evans --- Dockerfile | 2 +- package-lock.json | 10 ++++++---- pickup/index.js | 7 ++++--- pickup/lib/ipfs.js | 5 +++++ pickup/lib/pickup.js | 19 +++++++++++-------- pickup/package.json | 4 ++-- pickup/test/_helpers.js | 2 +- test/docker.test.js | 11 +++-------- validator/package.json | 2 +- validator/test/_helpers.js | 2 +- 10 files changed, 35 insertions(+), 29 deletions(-) diff --git a/Dockerfile b/Dockerfile index ecf7a60..934e486 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,4 +7,4 @@ COPY package*.json ./ COPY pickup/package.json ./pickup/package.json RUN npm ci -w pickup --no-audit COPY . . -CMD [ "npm", "start", "-w", "pickup" ] \ No newline at end of file +CMD [ "npm", "start", "-w", "pickup", "--silent"] \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index fc9f2e9..1dd2639 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8155,6 +8155,7 @@ "version": "1.3.3", "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dev": true, "dependencies": { "retry": "0.13.1" } @@ -18654,11 +18655,11 @@ "@aws-sdk/lib-dynamodb": "^3.112.0", "@aws-sdk/lib-storage": "^3.97.0", "@multiformats/multiaddr": "^11.4.0", - "async-retry": "1.3.3", "debounce": "^1.2.1", "multiaddr": "^10.0.1", "multiformats": "^9.6.5", "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", "pino": "8.8.0", "squiss-ts": "^4.4.1" }, @@ -19177,12 +19178,12 @@ "@aws-sdk/lib-dynamodb": "^3.112.0", "@aws-sdk/lib-storage": "^3.97.0", "@ipld/car": "5.1.0", - "async-retry": "1.3.3", "debounce": "^1.2.1", "linkdex": "2.0.0", "multiaddr": "10.0.1", "multiformats": "11.0.1", "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", "pino": "8.8.0", "sqs-consumer": "^5.7.0" }, @@ -25812,6 +25813,7 @@ "version": "1.3.3", "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dev": true, "requires": { "retry": "0.13.1" } @@ -30829,13 +30831,13 @@ "@aws-sdk/lib-storage": "^3.97.0", "@multiformats/multiaddr": "^11.4.0", "@web-std/blob": "^3.0.4", - "async-retry": "1.3.3", "ava": "^4.3.0", "debounce": "^1.2.1", "ipfs-car": "^0.7.0", "multiaddr": "^10.0.1", "multiformats": "^9.6.5", "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", "pino": "8.8.0", "squiss-ts": "^4.4.1", "standard": "^17.0.0", @@ -33570,7 +33572,6 @@ "@aws-sdk/lib-storage": "^3.97.0", "@ipld/car": "5.1.0", "@web-std/blob": "^3.0.4", - "async-retry": "1.3.3", "ava": "^4.3.0", "debounce": "^1.2.1", "ipfs-car": "^0.7.0", @@ -33578,6 +33579,7 @@ "multiaddr": "10.0.1", "multiformats": "11.0.1", "node-fetch": "^3.2.10", + "p-retry": "*", "pino": "8.8.0", "sqs-consumer": "^5.7.0", "standard": "^16.0.4", diff --git a/pickup/index.js b/pickup/index.js index 3964ceb..bc4eabf 100644 --- a/pickup/index.js +++ b/pickup/index.js @@ -1,11 +1,12 @@ -import { createPickupFromEnv } from './lib/pickup' +import { createPickupFromEnv } from './lib/pickup.js' import { logger } from './lib/logger.js' async function start () { - logger.info({}, 'Pickup starting...') try { - const pickup = await createPickupFromEnv() + const pickup = createPickupFromEnv() + logger.info('Pickup starting...') await pickup.start() + logger.info('Pickup started') } catch (err) { logger.error(err, 'Pickup ded!') throw err diff --git a/pickup/lib/ipfs.js b/pickup/lib/ipfs.js index 4cdf5ab..bd0e561 100644 --- a/pickup/lib/ipfs.js +++ b/pickup/lib/ipfs.js @@ -3,6 +3,7 @@ import { CID } from 'multiformats/cid' import { Multiaddr } from 'multiaddr' import debounce from 'debounce' import fetch from 'node-fetch' +import retry from 'p-retry' import { logger } from './logger.js' export const ERROR_TIMEOUT = 'TIMEOUT' @@ -173,4 +174,8 @@ export class CarFetcher { await waitForGC(ipfsApiUrl) } } + + async testIpfsApi () { + return retry(() => testIpfsApi(this.ipfsApiUrl), { retries: 4 }) + } } diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js index 2a82889..2e01102 100644 --- a/pickup/lib/pickup.js +++ b/pickup/lib/pickup.js @@ -1,5 +1,5 @@ import { Squiss } from 'squiss-ts' -import { CarFetcher, testIpfsApi } from './ipfs.js' +import { CarFetcher } from './ipfs.js' import { S3Uploader } from './s3.js' import { logger } from './logger.js' @@ -8,7 +8,7 @@ import { logger } from './logger.js' * * @param {Record} env */ -export async function createPickupFromEnv (env = process.env) { +export function createPickupFromEnv (env = process.env) { const { IPFS_API_URL, SQS_QUEUE_URL, @@ -21,7 +21,7 @@ export async function createPickupFromEnv (env = process.env) { if (!SQS_QUEUE_URL) throw new Error('SQS_QUEUE_URL not found in ENV') if (!VALIDATION_BUCKET) throw new Error('VALIDATION_BUCKET not found in ENV') - const pickup = await createPickup({ + const pickup = createPickup({ sqsPoller: createSqsPoller({ queueUrl: SQS_QUEUE_URL, maxInFlight: BATCH_SIZE @@ -44,10 +44,7 @@ export async function createPickupFromEnv (env = process.env) { * @param {CarFetcher} config.carFetcher * @param {S3Uploader} config.s3Uploader */ -export async function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { - // throw if we can't connect to kubo - await testIpfsApi(carFetcher.ipfsApiUrl) - +export function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { /** * @param {import('squiss-ts').Message} msg */ @@ -68,7 +65,13 @@ export async function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { sqsPoller.on('message', messageHandler) - return sqsPoller + const pollerStart = sqsPoller.start + const start = async () => { + // throw if we can't connect to kubo + await carFetcher.testIpfsApi() + pollerStart() + } + return { ...sqsPoller, start } } /** diff --git a/pickup/package.json b/pickup/package.json index 35fa3d2..e6754c9 100644 --- a/pickup/package.json +++ b/pickup/package.json @@ -18,11 +18,11 @@ "@aws-sdk/lib-dynamodb": "^3.112.0", "@aws-sdk/lib-storage": "^3.97.0", "@multiformats/multiaddr": "^11.4.0", - "async-retry": "1.3.3", "debounce": "^1.2.1", "multiaddr": "^10.0.1", "multiformats": "^9.6.5", "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", "pino": "8.8.0", "squiss-ts": "^4.4.1" }, @@ -37,4 +37,4 @@ "engines": { "node": ">=16.14" } -} \ No newline at end of file +} diff --git a/pickup/test/_helpers.js b/pickup/test/_helpers.js index e4157d5..46bd80b 100644 --- a/pickup/test/_helpers.js +++ b/pickup/test/_helpers.js @@ -1,5 +1,5 @@ import stream from 'node:stream' -import retry from 'async-retry' +import retry from 'p-retry' import { DynamoDBDocumentClient, GetCommand, PutCommand } from '@aws-sdk/lib-dynamodb' import { packToBlob } from 'ipfs-car/pack/blob' import { MemoryBlockStore } from 'ipfs-car/blockstore/memory' diff --git a/test/docker.test.js b/test/docker.test.js index b6fb904..f955f21 100644 --- a/test/docker.test.js +++ b/test/docker.test.js @@ -4,10 +4,6 @@ import test from 'ava' // builds image and starts container test('build', async t => { t.timeout(1000 * 120) - const SQS_QUEUE_URL = 'http://127.0.0.1' - const IPFS_API_URL = 'http://127.0.0.1:5001' - const DYNAMO_TABLE_NAME = 'test-table' - const DYNAMO_DB_ENDPOINT = 'http://127.0.0.1:9000' const img = await GenericContainer.fromDockerfile(new URL('../', import.meta.url).pathname) .build() // In case the test fails comment this and uncomment the log snippet @@ -16,10 +12,9 @@ test('build', async t => { await t.throwsAsync(img.start()) // set all the things it needs - img.withEnv('IPFS_API_URL', IPFS_API_URL) - img.withEnv('SQS_QUEUE_URL', SQS_QUEUE_URL) - img.withEnv('DYNAMO_TABLE_NAME', DYNAMO_TABLE_NAME) - img.withEnv('DYNAMO_DB_ENDPOINT', DYNAMO_DB_ENDPOINT) + img.withEnv('IPFS_API_URL', 'http://127.0.0.1:5001') + img.withEnv('SQS_QUEUE_URL', 'http://127.0.0.1') + img.withEnv('VALIDATION_BUCKET', 'foo') let pickup try { diff --git a/validator/package.json b/validator/package.json index 7b8cfe2..1d523f3 100644 --- a/validator/package.json +++ b/validator/package.json @@ -16,12 +16,12 @@ "@aws-sdk/lib-dynamodb": "^3.112.0", "@aws-sdk/lib-storage": "^3.97.0", "@ipld/car": "5.1.0", - "async-retry": "1.3.3", "debounce": "^1.2.1", "linkdex": "2.0.0", "multiaddr": "10.0.1", "multiformats": "11.0.1", "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", "pino": "8.8.0", "sqs-consumer": "^5.7.0" }, diff --git a/validator/test/_helpers.js b/validator/test/_helpers.js index 495bd90..f17266f 100644 --- a/validator/test/_helpers.js +++ b/validator/test/_helpers.js @@ -1,5 +1,5 @@ import stream from 'node:stream' -import retry from 'async-retry' +import retry from 'p-retry' import { DynamoDBDocumentClient, GetCommand, PutCommand } from '@aws-sdk/lib-dynamodb' import { packToBlob } from 'ipfs-car/pack/blob' import { MemoryBlockStore } from 'ipfs-car/blockstore/memory' From 6a9a8f5fa5e46dc327d80b768dc13881cfeac947 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Thu, 9 Mar 2023 16:39:38 +0000 Subject: [PATCH 05/14] chore: fix test race License: MIT Signed-off-by: Oli Evans --- pickup/lib/pickup.js | 8 ++++---- pickup/test/pickup.test.js | 29 ++++++++++++++++++++--------- 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js index 2e01102..7b5d889 100644 --- a/pickup/lib/pickup.js +++ b/pickup/lib/pickup.js @@ -65,13 +65,13 @@ export function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { sqsPoller.on('message', messageHandler) - const pollerStart = sqsPoller.start - const start = async () => { + const pollerStart = sqsPoller.start.bind(sqsPoller) + sqsPoller.start = async () => { // throw if we can't connect to kubo await carFetcher.testIpfsApi() - pollerStart() + return pollerStart() } - return { ...sqsPoller, start } + return sqsPoller } /** diff --git a/pickup/test/pickup.test.js b/pickup/test/pickup.test.js index 9d61f7c..77cee5e 100644 --- a/pickup/test/pickup.test.js +++ b/pickup/test/pickup.test.js @@ -1,5 +1,6 @@ import nock from 'nock' import test from 'ava' +import { nanoid } from 'nanoid' import { SendMessageCommand } from '@aws-sdk/client-sqs' import { createPickup, createSqsPoller } from '../lib/pickup.js' import { compose } from './_compose.js' @@ -18,14 +19,22 @@ test.after(async t => { }) test('throw an error if can\'t connect to IPFS', async t => { - await t.throwsAsync(createPickup({ - carFetcher: new CarFetcher({ ipfsApiUrl: 'http://127.0.0.1' }) - })) + const { createQueue, createBucket, s3 } = t.context + const queueUrl = await createQueue() + const bucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 2000 }), + s3Uploader: new S3Uploader({ s3, bucket }) + }) + await t.throwsAsync(() => pickup.start()) }) test('Process 3 messages concurrently and the last has a timeout', async t => { t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoTable } = t.context + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() @@ -38,6 +47,7 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 3000, expectedResult: 'failed' }) ] + const ipfsApiUrl = `https://${nanoid()}:6000` // Configure nock to mock the response const nockPickup = nock(ipfsApiUrl) nockPickup @@ -61,7 +71,7 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { })) } - const pickup = await createPickup({ + const pickup = createPickup({ sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 2000 }), s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) @@ -105,14 +115,14 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { pickup.on('timeoutReached', reject) }) - pickup.start() + await pickup.start() return done }) test('Process 1 message that fails and returns in the list', async t => { t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoTable } = t.context + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() @@ -122,6 +132,7 @@ test('Process 1 message that fails and returns in the list', async t => { await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }) ] + const ipfsApiUrl = `https://${nanoid()}:6000` // Configure nock to mock the response const nockPickup = nock(ipfsApiUrl) nockPickup @@ -147,7 +158,7 @@ test('Process 1 message that fails and returns in the list', async t => { })) } - const pickup = await createPickup({ + const pickup = createPickup({ sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 5000 }), s3Uploader: new S3Uploader({ s3, bucket }) @@ -191,7 +202,7 @@ test('Process 1 message that fails and returns in the list', async t => { pickup.on('timeoutReached', reject) }) - pickup.start() + await pickup.start() return done }) From b7470641760867bb9c6f97184da952d8d173aed7 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Thu, 9 Mar 2023 21:43:42 +0000 Subject: [PATCH 06/14] =?UTF-8?q?chore:=20revert=20err=20and=20remove=20un?= =?UTF-8?q?used=20fn=C2=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit License: MIT Signed-off-by: Oli Evans --- pickup/lib/s3.js | 30 ++++-------------------------- 1 file changed, 4 insertions(+), 26 deletions(-) diff --git a/pickup/lib/s3.js b/pickup/lib/s3.js index 3ed6146..2e4bb16 100644 --- a/pickup/lib/s3.js +++ b/pickup/lib/s3.js @@ -25,39 +25,17 @@ export async function sendToS3 ({ client, bucket, key, body, cid }) { } }) - body.on('error', (error) => { - if (error.code === 'AbortError' || error.constructor.name === 'AbortError') { - logger.trace({ error, cid }, 'The abort command was thrown by a ipfs timeout') + body.on('error', (err) => { + if (err.code === 'AbortError' || err.constructor.name === 'AbortError') { + logger.trace({ err, cid }, 'The abort command was thrown by a ipfs timeout') return } - logger.error({ error, cid }, 'S3 upload error') + logger.error({ err, cid }, 'S3 upload error') }) return s3Upload.done() } -/** - * Create the uploader - * @param {import('@aws-sdk/client-s3'.S3Client)} client - * @param {string} bucket - * @returns {import('@aws-sdk/client-s3'.S3Client)} - */ -export function createS3Uploader ({ client = createS3Client(), bucket }) { - return function (key) { - sendToS3() - } -} - -/** - * Create the S3Client - * - * @returns {import('@aws-sdk/client-s3'.S3Client)} - */ -export function createS3Client () { - // Expects AWS_* ENV vars set. - return new S3Client({}) -} - export class S3Uploader { /** * @param {object} config From 027943c6744b7b0f53882daf4b547d6b98ea0b09 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Mon, 13 Mar 2023 11:50:32 +0000 Subject: [PATCH 07/14] fix: add max timeout for fetching fixes https://github.com/web3-storage/pickup/issues/101 License: MIT Signed-off-by: Oli Evans --- README.md | 35 +++++++++++++++ pickup/lib/ipfs.js | 91 ++++++++++++++++++++++++-------------- pickup/lib/pickup.js | 47 +++++++++++++------- pickup/test/pickup.test.js | 6 +-- 4 files changed, 128 insertions(+), 51 deletions(-) diff --git a/README.md b/README.md index 07bd6bd..f24385b 100644 --- a/README.md +++ b/README.md @@ -74,6 +74,41 @@ Find the status of a pin } ``` +## Environment + +Set the following in the pickup worker env to tune it's behavior + +### `MAX_CAR_BYTES` + +Maximum bytes size of a CAR that pickup will fetch. Caps the anmount of data we will pull in a single job. + +**default: 31 GiB** _(33,285,996,544 bytes)_ + +### `FETCH_TIMEOUT_MS` + +How long to wait for fetching a CAR before failing the job. Caps the amount of time we spend on a job. + +**default: 4 hrs** + +### `FETCH_CHUNK_TIMEOUT_MS` + +How long to wait between chunks of data before failing a CAR. Limit the amount of time we spend waiting of a stalled fetch. + +**default: 2 min** + +2/3rs of home internet users can upload faster than 20Mbit/s (fixed broadband), at which 32GiB would transfer in 3.5hrs. + +see: https://www.speedtest.net/global-index +see: https://www.omnicalculator.com/other/download-time?c=GBP&v=fileSize:32!gigabyte,downloadSpeed:5!megabit + +### `BATCH_SIZE` + +How many pin requests to handle concurrently per worker. + +Used to set both the concurrency per worker *and* the max number of messages each worker fetches from the queue in a single batch. + +**default: 10** + ## Getting Started PR's are deployed automatically to `https://.pickup.dag.haus`. The `main` branch is deployed to https://staging.pickup.dag.haus and staging builds are promoted to prod manually via the UI at https://console.seed.run/dag-house/pickup diff --git a/pickup/lib/ipfs.js b/pickup/lib/ipfs.js index bd0e561..6d63d88 100644 --- a/pickup/lib/ipfs.js +++ b/pickup/lib/ipfs.js @@ -6,43 +6,27 @@ import fetch from 'node-fetch' import retry from 'p-retry' import { logger } from './logger.js' -export const ERROR_TIMEOUT = 'TIMEOUT' - /** @typedef {import('node:stream').Readable} Readable */ /** - * Start the fetch of a car + * Fetch a CAR from kubo * * @param {string} cid - The CID requested * @param {string} ipfsApiUrl - The IPFS server url - * @param {AbortController} abortCtl - Can i kill it? - * @param {number} timeoutMs - The timeout for each block fetch in milliseconds. + * @param {AbortSignal} signal - Cancel the fetch * @returns {Promise} */ -export async function fetchCar ({ cid, ipfsApiUrl, abortCtl = new AbortController(), timeoutMs = 30000 }) { +export async function fetchCar ({ cid, ipfsApiUrl, signal }) { if (!isCID(cid)) { throw new Error({ message: `Invalid CID: ${cid}` }) } const url = new URL(`/api/v0/dag/export?arg=${cid}`, ipfsApiUrl) - const startCountdown = debounce(() => abortCtl.abort(), timeoutMs) - startCountdown() - - const signal = abortCtl.signal const res = await fetch(url, { method: 'POST', signal }) if (!res.ok) { throw new Error(`${res.status} ${res.statusText} ${url}`) } - - async function * restartCountdown (source) { - for await (const chunk of source) { - startCountdown() - yield chunk - } - startCountdown.clear() - } - - return compose(res.body, restartCountdown) + return res.body } /** @@ -146,36 +130,79 @@ export async function testIpfsApi (ipfsApiUrl, timeoutMs = 10000) { } } +export const TOO_BIG = 'TOO_BIG' +export const FETCH_TOO_SLOW = 'FETCH_TOO_SLOW' +export const CHUNK_TOO_SLOW = 'CHUNK_TOO_SLOW' + export class CarFetcher { /** * @param {object} config * @param {string} config.ipfsApiUrl + * @param {number} config.maxCarBytes * @param {number} config.fetchTimeoutMs + * @param {number} config.fetchChunkTimeoutMs */ - constructor ({ ipfsApiUrl, fetchTimeoutMs = 60000 }) { + constructor ({ + ipfsApiUrl = 'http://127.0.0.1:5001', + maxCarBytes = 31 * (1024 ** 3), /* 31 GiB */ + fetchTimeoutMs = 4 * 60 * 60 * 1000, /* 4 hrs */ + fetchChunkTimeoutMs = 2 * 60 * 1000 /* 2 mins */ + }) { this.ipfsApiUrl = ipfsApiUrl + this.maxCarBytes = maxCarBytes this.fetchTimeoutMs = fetchTimeoutMs + this.fetchChunkTimeoutMs = fetchChunkTimeoutMs } /** * @param {object} config * @param {string} config.cid - * @param {string[]} config.origins - * @param {(body: Readable) => Promise} config.upload + * @param {AbortController} config.abortCtl */ - async fetch ({ cid, origins, upload }) { - const { ipfsApiUrl, fetchTimeoutMs } = this - try { - await connectTo(origins, ipfsApiUrl) - const body = await fetchCar({ cid, ipfsApiUrl, timeoutMs: fetchTimeoutMs }) - await upload(body) - } finally { - await disconnect(origins, ipfsApiUrl) - await waitForGC(ipfsApiUrl) + async fetch ({ cid, abortCtl }) { + const { ipfsApiUrl, maxCarBytes, fetchTimeoutMs, fetchChunkTimeoutMs } = this + + async function * streamWatcher (source) { + let size = 0 + const fetchTimer = debounce(() => abortCtl.abort(FETCH_TOO_SLOW), fetchTimeoutMs) + const chunkTimer = debounce(() => abortCtl.abort(CHUNK_TOO_SLOW), fetchChunkTimeoutMs) + fetchTimer() + chunkTimer() + for await (const chonk of source) { + chunkTimer() + size += chonk.byteLength + if (size > maxCarBytes) { + abortCtl.abort(TOO_BIG) + } + yield chonk + } + fetchTimer.clear() + chunkTimer.clear() } + + const body = await fetchCar({ cid, ipfsApiUrl, signal: abortCtl.signal }) + return compose(body, streamWatcher) } async testIpfsApi () { return retry(() => testIpfsApi(this.ipfsApiUrl), { retries: 4 }) } + + /** + * @param {string[]} origins + */ + async connectTo (origins) { + return connectTo(origins, this.ipfsApiUrl) + } + + /** + * @param {string[]} origins + */ + async disconnect (origins) { + return disconnect(origins, this.ipfsApiUrl) + } + + async waitForGc () { + return waitForGC(this.ipfsApiUrl) + } } diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js index 7b5d889..99ea0b5 100644 --- a/pickup/lib/pickup.js +++ b/pickup/lib/pickup.js @@ -1,5 +1,5 @@ import { Squiss } from 'squiss-ts' -import { CarFetcher } from './ipfs.js' +import { CarFetcher, TOO_BIG, CHUNK_TOO_SLOW, FETCH_TOO_SLOW } from './ipfs.js' import { S3Uploader } from './s3.js' import { logger } from './logger.js' @@ -13,7 +13,9 @@ export function createPickupFromEnv (env = process.env) { IPFS_API_URL, SQS_QUEUE_URL, BATCH_SIZE, - TIMEOUT_FETCH, + MAX_CAR_BYTES, + FETCH_TIMEOUT_MS, + FETCH_CHUNK_TIMEOUT_MS, VALIDATION_BUCKET } = env @@ -24,11 +26,14 @@ export function createPickupFromEnv (env = process.env) { const pickup = createPickup({ sqsPoller: createSqsPoller({ queueUrl: SQS_QUEUE_URL, - maxInFlight: BATCH_SIZE + maxInFlight: BATCH_SIZE, + noExtensionsAfterSecs: FETCH_TIMEOUT_MS }), carFetcher: new CarFetcher({ ipfsApiUrl: IPFS_API_URL, - fetchTimeoutMs: TIMEOUT_FETCH + maxCarBytes: MAX_CAR_BYTES, + fetchTimeoutMs: FETCH_TIMEOUT_MS, + fetchChunkTimeoutMs: FETCH_CHUNK_TIMEOUT_MS }), s3Uploader: new S3Uploader({ bucket: VALIDATION_BUCKET @@ -50,16 +55,32 @@ export function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { */ async function messageHandler (msg) { const { cid, origins, key } = msg.body + const abortCtl = new AbortController() + const upload = s3Uploader.createUploader({ cid, key }) try { - logger.info({ cid }, 'Fetching car') - const upload = s3Uploader.createUploader({ cid, key }) - await carFetcher.fetch({ cid, origins, upload }) + await carFetcher.connectTo(origins) + const body = await carFetcher.fetch({ cid, origins, abortCtl }) + await upload(body) logger.info({ cid }, 'OK. Car in S3') msg.del() // the message is handled, remove it from queue. } catch (err) { + if (abortCtl.signal.reason === TOO_BIG) { + logger.error({ cid, err }, 'Failed to fetch CAR: Too big') + return msg.release() + } + if (abortCtl.signal.reason === CHUNK_TOO_SLOW) { + logger.error({ cid, err }, 'Failed to fetch CAR: chunk too slow') + return msg.release() + } + if (abortCtl.signal.reason === FETCH_TOO_SLOW) { + logger.error({ cid, err }, 'Failed to fetch CAR: fetch too slow') + return msg.release() + } logger.error({ cid, err }, 'Failed to fetch CAR') - // return the msg to the queue for another go - msg.release() + return msg.release() // back to the queue, try again + } finally { + await carFetcher.disconnect(origins) + await carFetcher.waitForGc() } } @@ -71,6 +92,7 @@ export function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { await carFetcher.testIpfsApi() return pollerStart() } + return sqsPoller } @@ -82,13 +104,6 @@ export function createSqsPoller (config) { // set our default overrides here, we always want these. autoExtendTimeout: true, receiveSqsAttributes: ['ApproximateReceiveCount'], - - // allow 4hrs before timeout. 2/3rs of the world can upload faster than - // 20Mbit/s (fixed broadband), at which 32GiB would transfer in 3.5hrs. - // see: https://www.speedtest.net/global-index - // see: https://www.omnicalculator.com/other/download-time?c=GBP&v=fileSize:32!gigabyte,downloadSpeed:5!megabit - // TODO: enforce 32GiB limit - noExtensionsAfterSecs: 4 * 60 * 60, bodyFormat: 'json', ...config }) diff --git a/pickup/test/pickup.test.js b/pickup/test/pickup.test.js index 77cee5e..1d3f409 100644 --- a/pickup/test/pickup.test.js +++ b/pickup/test/pickup.test.js @@ -26,7 +26,7 @@ test('throw an error if can\'t connect to IPFS', async t => { const pickup = createPickup({ sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), - carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 2000 }), + carFetcher: new CarFetcher({ ipfsApiUrl }), s3Uploader: new S3Uploader({ s3, bucket }) }) await t.throwsAsync(() => pickup.start()) @@ -73,7 +73,7 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { const pickup = createPickup({ sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), - carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 2000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchChunkTimeoutMs: 2000 }), s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) }) @@ -160,7 +160,7 @@ test('Process 1 message that fails and returns in the list', async t => { const pickup = createPickup({ sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), - carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 5000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchChunkTimeoutMs: 5000 }), s3Uploader: new S3Uploader({ s3, bucket }) }) From e621bd2e67806892ab6a78aad95fc21784b5a1e2 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Mon, 13 Mar 2023 20:17:02 +0000 Subject: [PATCH 08/14] fix: add tests. fix bugs License: MIT Signed-off-by: Oli Evans --- pickup/lib/ipfs.js | 29 +++- pickup/lib/pickup.js | 21 +-- pickup/test/_compose.js | 2 +- pickup/test/pickup.test.js | 333 ++++++++++++++++++++++++++++++------- stacks/BasicApiStack.ts | 2 +- 5 files changed, 309 insertions(+), 78 deletions(-) diff --git a/pickup/lib/ipfs.js b/pickup/lib/ipfs.js index 6d63d88..014f170 100644 --- a/pickup/lib/ipfs.js +++ b/pickup/lib/ipfs.js @@ -161,23 +161,36 @@ export class CarFetcher { */ async fetch ({ cid, abortCtl }) { const { ipfsApiUrl, maxCarBytes, fetchTimeoutMs, fetchChunkTimeoutMs } = this - + /** + * @param {AsyncIterable} source + */ async function * streamWatcher (source) { - let size = 0 - const fetchTimer = debounce(() => abortCtl.abort(FETCH_TOO_SLOW), fetchTimeoutMs) - const chunkTimer = debounce(() => abortCtl.abort(CHUNK_TOO_SLOW), fetchChunkTimeoutMs) + const fetchTimer = debounce(() => abort(FETCH_TOO_SLOW), fetchTimeoutMs) + const chunkTimer = debounce(() => abort(CHUNK_TOO_SLOW), fetchChunkTimeoutMs) + const clearTimers = () => { + fetchTimer.clear() + chunkTimer.clear() + } + function abort (reason) { + clearTimers() + if (!abortCtl.signal.aborted) { + abortCtl.abort(reason) + } + } fetchTimer() chunkTimer() + let size = 0 for await (const chonk of source) { chunkTimer() size += chonk.byteLength if (size > maxCarBytes) { - abortCtl.abort(TOO_BIG) + abort(TOO_BIG) + throw new Error(TOO_BIG) // kill the stream now so we dont send more bytes + } else { + yield chonk } - yield chonk } - fetchTimer.clear() - chunkTimer.clear() + clearTimers() } const body = await fetchCar({ cid, ipfsApiUrl, signal: abortCtl.signal }) diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js index 99ea0b5..0f3d3e8 100644 --- a/pickup/lib/pickup.js +++ b/pickup/lib/pickup.js @@ -62,22 +62,23 @@ export function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { const body = await carFetcher.fetch({ cid, origins, abortCtl }) await upload(body) logger.info({ cid }, 'OK. Car in S3') - msg.del() // the message is handled, remove it from queue. + await msg.del() // the message is handled, remove it from queue. } catch (err) { if (abortCtl.signal.reason === TOO_BIG) { logger.error({ cid, err }, 'Failed to fetch CAR: Too big') - return msg.release() - } - if (abortCtl.signal.reason === CHUNK_TOO_SLOW) { + await msg.release() + } else if (abortCtl.signal.reason === CHUNK_TOO_SLOW) { logger.error({ cid, err }, 'Failed to fetch CAR: chunk too slow') - return msg.release() - } - if (abortCtl.signal.reason === FETCH_TOO_SLOW) { + await msg.release() + } else if (abortCtl.signal.reason === FETCH_TOO_SLOW) { logger.error({ cid, err }, 'Failed to fetch CAR: fetch too slow') - return msg.release() + await msg.release() + } else { + logger.error({ cid, err }, 'Failed to fetch CAR') + if (!msg.isHandled) { + await msg.release() // back to the queue, try again + } } - logger.error({ cid, err }, 'Failed to fetch CAR') - return msg.release() // back to the queue, try again } finally { await carFetcher.disconnect(origins) await carFetcher.waitForGc() diff --git a/pickup/test/_compose.js b/pickup/test/_compose.js index 083100c..5b1358b 100644 --- a/pickup/test/_compose.js +++ b/pickup/test/_compose.js @@ -71,7 +71,7 @@ export async function createQueue (sqsPort, sqs) { await sqs.send(new CreateQueueCommand({ QueueName, Attributes: { - DelaySeconds: '1', + DelaySeconds: '0', MessageRetentionPeriod: '10' } })) diff --git a/pickup/test/pickup.test.js b/pickup/test/pickup.test.js index 1d3f409..653c54d 100644 --- a/pickup/test/pickup.test.js +++ b/pickup/test/pickup.test.js @@ -32,34 +32,38 @@ test('throw an error if can\'t connect to IPFS', async t => { await t.throwsAsync(() => pickup.start()) }) -test('Process 3 messages concurrently and the last has a timeout', async t => { +test('Process 1 message successfully', async t => { t.timeout(1000 * 60) const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() const validationBucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' }, maxInFlight: 1, activePollIntervalMs: 10000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 1000, fetchChunkTimeoutMs: 1000 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) + }) // Preapre the data for the test const cars = [ - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 3000, expectedResult: 'failed' }) + await prepareCid({ dynamoClient, dynamoTable, expectedResult: 'success' }) ] - const ipfsApiUrl = `https://${nanoid()}:6000` // Configure nock to mock the response const nockPickup = nock(ipfsApiUrl) nockPickup .post('/api/v0/id')// Alive .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success').persist() + .optionally().reply(200, 'GC Success').persist() cars.forEach((car, index) => { nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin .reply(200, () => { - return cars[index].carReadableStream + return cars[index].car // send the whole car body at once }) }) @@ -71,48 +75,260 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { })) } + let msgCount = 0 + let msgDel = 0 + + const done = new Promise((resolve, reject) => { + const nope = (reason) => { + stopPickup(pickup) + t.fail(reason) + reject(new Error(reason)) + } + pickup.on('message', () => msgCount++) + pickup.on('released', () => nope('unexpected released event')) + pickup.on('error', () => nope('unexpected error event')) + pickup.on('timeoutReached', () => nope('unexpected timeoutReached event')) + pickup.on('deleted', () => { + msgDel++ + t.is(msgCount, 1) + t.is(msgDel, 1) + try { + stopPickup(pickup) + nockPickup.done() + resolve() + } catch (err) { + console.log('error', err) + reject(err) + } + }) + }) + + await pickup.start() + + return done +}) + +test('Fail 1 message that sends data but exceeds fetchTimeoutMs', async t => { + t.timeout(1000 * 60) + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context + + const queueUrl = await createQueue() + const bucket = await createBucket() + const validationBucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + const pickup = createPickup({ - sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), - carFetcher: new CarFetcher({ ipfsApiUrl, fetchChunkTimeoutMs: 2000 }), + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' }, maxInFlight: 1, activePollIntervalMs: 10000, idlePollIntervalMs: 10000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 500, fetchChunkTimeoutMs: 2000 }), s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) }) - // The number of the messages resolved, when is max close the test and finalize - let resolved = 0 + // Preapre the data for the test + const cars = [ + await prepareCid({ dynamoClient, dynamoTable, expectedResult: 'failed' }) + ] - const done = new Promise((resolve, reject) => { - pickup.on('message', async msg => { - const message = msg.body - const index = Number(message.requestid) - if (cars[index].expectedResult !== 'error') { - const myBuffer = await cars[index].car.arrayBuffer() + // Configure nock to mock the response + const nockPickup = nock(ipfsApiUrl) + nockPickup + .post('/api/v0/id')// Alive + .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) + .post('/api/v0/repo/gc?silent=true')// Garbage collector + .optionally().reply(200, 'GC Success').persist() - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(0, 10))) - await sleep(cars[index].timeBetweenChunks) - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(10))) - cars[index].carReadableStream.push(null) + cars.forEach((car, index) => { + nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin + .reply(200, () => { + return cars[index].carReadableStream // we would write to this stream, but we want the test to fail + }) + }) + + // Send the SQS messages in queue + for (let i = 0; i < cars.length; i++) { + await sqs.send(new SendMessageCommand({ + MessageBody: JSON.stringify({ cid: cars[i].cid, bucket, key: cars[i].key, origins: [], requestid: i }), + QueueUrl: queueUrl + })) + } + + let msgCount = 0 + let msgReleased = 0 + + const done = new Promise((resolve, reject) => { + const nope = (reason) => { + stopPickup(pickup) + t.fail(reason) + reject(new Error(reason)) + } + pickup.on('message', () => msgCount++) + pickup.on('released', () => { + msgReleased++ + t.is(msgCount, 1) + t.is(msgReleased, 1) + try { + stopPickup(pickup) + nockPickup.done() + resolve() + } catch (err) { + console.log('error', err) + reject(err) } }) + pickup.on('error', () => nope('unexpected error event')) + pickup.on('timeoutReached', () => nope('unexpected timeoutReached event')) + pickup.on('deleted', () => nope('unexpected deleted event')) + }) - pickup.on('handled', async msg => { + await pickup.start() + + return done +}) + +test('Fail 1 message that sends data but exceeds fetchChunkTimeoutMs', async t => { + t.timeout(1000 * 60) + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context + + const queueUrl = await createQueue() + const bucket = await createBucket() + const validationBucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' }, maxInFlight: 1, activePollIntervalMs: 10000, idlePollIntervalMs: 10000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 2000, fetchChunkTimeoutMs: 1000 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) + }) + + // Preapre the data for the test + const cars = [ + await prepareCid({ dynamoClient, dynamoTable, expectedResult: 'failed' }) + ] + + // Configure nock to mock the response + const nockPickup = nock(ipfsApiUrl) + nockPickup + .post('/api/v0/id')// Alive + .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) + .post('/api/v0/repo/gc?silent=true')// Garbage collector + .optionally().reply(200, 'GC Success').persist() + + cars.forEach((car, index) => { + nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin + .reply(200, () => { + return cars[index].carReadableStream // we would write to this stream, but we want the test to fail + }) + }) + + // Send the SQS messages in queue + for (let i = 0; i < cars.length; i++) { + await sqs.send(new SendMessageCommand({ + MessageBody: JSON.stringify({ cid: cars[i].cid, bucket, key: cars[i].key, origins: [], requestid: i }), + QueueUrl: queueUrl + })) + } + + let msgCount = 0 + let msgReleased = 0 + + const done = new Promise((resolve, reject) => { + const nope = (reason) => { + stopPickup(pickup) + t.fail(reason) + reject(new Error(reason)) + } + pickup.on('message', () => msgCount++) + pickup.on('released', () => { + msgReleased++ + t.is(msgCount, 1) + t.is(msgReleased, 1) try { - await verifyMessage({ msg, cars, t, bucket: validationBucket, s3 }) - resolved++ + stopPickup(pickup) + nockPickup.done() + resolve() + } catch (err) { + console.log('error', err) + reject(err) + } + }) + pickup.on('error', () => nope('unexpected error event')) + pickup.on('timeoutReached', () => nope('unexpected timeoutReached event')) + pickup.on('deleted', () => nope('unexpected deleted event')) + }) - if (resolved === cars.length) { - await sleep(5) - const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) - t.is(resultMessages, undefined) - nockPickup.done() - await stopPickup(pickup) - resolve() - } - } catch (e) { - reject(e) + await pickup.start() + + return done +}) + +test('Fail 1 message that sends data but exceeds maxCarBytes', async t => { + t.timeout(1000 * 60) + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context + + const queueUrl = await createQueue() + const bucket = await createBucket() + const validationBucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' }, maxInFlight: 1, activePollIntervalMs: 10000, idlePollIntervalMs: 10000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, maxCarBytes: 1 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) + }) + + // Preapre the data for the test + const cars = [ + await prepareCid({ dynamoClient, dynamoTable, expectedResult: 'failed' }) + ] + + // Configure nock to mock the response + const nockPickup = nock(ipfsApiUrl) + nockPickup + .post('/api/v0/id')// Alive + .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) + .post('/api/v0/repo/gc?silent=true')// Garbage collector + .optionally().reply(200, 'GC Success').persist() + + cars.forEach((car, index) => { + nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin + .reply(200, () => { + return cars[index].car + }) + }) + + // Send the SQS messages in queue + for (let i = 0; i < cars.length; i++) { + await sqs.send(new SendMessageCommand({ + MessageBody: JSON.stringify({ cid: cars[i].cid, bucket, key: cars[i].key, origins: [], requestid: i }), + QueueUrl: queueUrl + })) + } + + let msgCount = 0 + let msgReleased = 0 + + const done = new Promise((resolve, reject) => { + const nope = (reason) => { + stopPickup(pickup) + t.fail(reason) + reject(new Error(reason)) + } + pickup.on('message', () => msgCount++) + pickup.on('released', () => { + msgReleased++ + t.is(msgCount, 1) + t.is(msgReleased, 1) + try { + stopPickup(pickup) + nockPickup.done() + resolve() + } catch (err) { + console.log('error', err) + reject(err) } }) - pickup.on('error', reject) - pickup.on('timeoutReached', reject) + pickup.on('error', () => nope('unexpected error event')) + pickup.on('timeoutReached', () => nope('unexpected timeoutReached event')) + pickup.on('deleted', () => nope('unexpected deleted event')) }) await pickup.start() @@ -120,16 +336,19 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { return done }) -test('Process 1 message that fails and returns in the list', async t => { +test('Process 3 messages concurrently and the last has a timeout', async t => { t.timeout(1000 * 60) const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() + const validationBucket = await createBucket() // Preapre the data for the test const cars = [ - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }) + await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), + await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), + await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 3000, expectedResult: 'failed' }) ] const ipfsApiUrl = `https://${nanoid()}:6000` @@ -139,15 +358,13 @@ test('Process 1 message that fails and returns in the list', async t => { .post('/api/v0/id')// Alive .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') - .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') + .reply(200, 'GC Success').persist() cars.forEach((car, index) => { nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin - .reply((uri, requestBody) => [400, 'KO']) - nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin - .reply((uri, requestBody) => [200, cars[index].carReadableStream]) + .reply(200, () => { + return cars[index].carReadableStream + }) }) // Send the SQS messages in queue @@ -160,17 +377,15 @@ test('Process 1 message that fails and returns in the list', async t => { const pickup = createPickup({ sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), - carFetcher: new CarFetcher({ ipfsApiUrl, fetchChunkTimeoutMs: 5000 }), - s3Uploader: new S3Uploader({ s3, bucket }) + carFetcher: new CarFetcher({ ipfsApiUrl, fetchChunkTimeoutMs: 2000 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) }) // The number of the messages resolved, when is max close the test and finalize - let rx = 0 + let resolved = 0 const done = new Promise((resolve, reject) => { pickup.on('message', async msg => { - rx++ - if (rx !== 2) return const message = msg.body const index = Number(message.requestid) if (cars[index].expectedResult !== 'error') { @@ -183,17 +398,19 @@ test('Process 1 message that fails and returns in the list', async t => { } }) - pickup.on('deleted', async res => { - const msg = res.msg + pickup.on('handled', async msg => { try { - await verifyMessage({ msg, cars, dynamoClient, dynamoTable, t, bucket, s3 }) - await sleep(5) - const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) - t.is(resultMessages, undefined) + await verifyMessage({ msg, cars, t, bucket: validationBucket, s3 }) + resolved++ - nockPickup.done() - await stopPickup(pickup) - resolve() + if (resolved === cars.length) { + await sleep(5) + const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) + t.is(resultMessages, undefined) + nockPickup.done() + await stopPickup(pickup) + resolve() + } } catch (e) { reject(e) } diff --git a/stacks/BasicApiStack.ts b/stacks/BasicApiStack.ts index 10d5311..e6ab11e 100644 --- a/stacks/BasicApiStack.ts +++ b/stacks/BasicApiStack.ts @@ -14,7 +14,7 @@ export function BasicApiStack ({ queue: { deadLetterQueue: { queue: dlq.cdk.queue, - maxReceiveCount: 11 + maxReceiveCount: 3 } } } From 8ed4f000dc820834ca73fccc8297a86efc3f6f03 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Mon, 13 Mar 2023 22:02:19 +0000 Subject: [PATCH 09/14] chore: not testing message count License: MIT Signed-off-by: Oli Evans --- pickup/test/pickup.test.js | 9 --------- 1 file changed, 9 deletions(-) diff --git a/pickup/test/pickup.test.js b/pickup/test/pickup.test.js index 653c54d..18ab638 100644 --- a/pickup/test/pickup.test.js +++ b/pickup/test/pickup.test.js @@ -151,7 +151,6 @@ test('Fail 1 message that sends data but exceeds fetchTimeoutMs', async t => { })) } - let msgCount = 0 let msgReleased = 0 const done = new Promise((resolve, reject) => { @@ -160,10 +159,8 @@ test('Fail 1 message that sends data but exceeds fetchTimeoutMs', async t => { t.fail(reason) reject(new Error(reason)) } - pickup.on('message', () => msgCount++) pickup.on('released', () => { msgReleased++ - t.is(msgCount, 1) t.is(msgReleased, 1) try { stopPickup(pickup) @@ -227,7 +224,6 @@ test('Fail 1 message that sends data but exceeds fetchChunkTimeoutMs', async t = })) } - let msgCount = 0 let msgReleased = 0 const done = new Promise((resolve, reject) => { @@ -236,10 +232,8 @@ test('Fail 1 message that sends data but exceeds fetchChunkTimeoutMs', async t = t.fail(reason) reject(new Error(reason)) } - pickup.on('message', () => msgCount++) pickup.on('released', () => { msgReleased++ - t.is(msgCount, 1) t.is(msgReleased, 1) try { stopPickup(pickup) @@ -303,7 +297,6 @@ test('Fail 1 message that sends data but exceeds maxCarBytes', async t => { })) } - let msgCount = 0 let msgReleased = 0 const done = new Promise((resolve, reject) => { @@ -312,10 +305,8 @@ test('Fail 1 message that sends data but exceeds maxCarBytes', async t => { t.fail(reason) reject(new Error(reason)) } - pickup.on('message', () => msgCount++) pickup.on('released', () => { msgReleased++ - t.is(msgCount, 1) t.is(msgReleased, 1) try { stopPickup(pickup) From f08cf6b306ca20099a541ddec85f8d0e5c98ceef Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Tue, 14 Mar 2023 11:08:02 +0000 Subject: [PATCH 10/14] chore: only set optional env if set License: MIT Signed-off-by: Oli Evans --- stacks/PickupStack.ts | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/stacks/PickupStack.ts b/stacks/PickupStack.ts index aacb7e3..1e214bd 100644 --- a/stacks/PickupStack.ts +++ b/stacks/PickupStack.ts @@ -70,12 +70,15 @@ export function PickupStack ({ app, stack }: StackContext): void { ephemeralStorageGiB: 64, // max 200 environment: { SQS_QUEUE_URL: basicApi.queue.queueUrl, - IPFS_API_URL: 'http://127.0.0.1:5001', DYNAMO_TABLE_NAME: basicApi.dynamoDbTable.tableName, - BATCH_SIZE: process.env.BATCH_SIZE ?? '5', - TIMEOUT_FETCH: process.env.TIMEOUT_FETCH ?? '60', - MAX_RETRY: process.env.MAX_RETRY ?? '10', - VALIDATION_BUCKET: (validationBucket != null) ? validationBucket.bucketName : '' + VALIDATION_BUCKET: (validationBucket != null) ? validationBucket.bucketName : '', + ...optionalEnv([ + 'IPFS_API_URL', + 'BATCH_SIZE', + 'MAX_CAR_BYTES', + 'FETCH_TIMEOUT_MS', + 'FETCH_CHUNK_TIMEOUT_MS' + ]) }, queue: basicApi.queue.cdk.queue, enableExecuteCommand: true, @@ -283,3 +286,17 @@ function createVPCGateways (vpc: ec2.IVpc): void { throw new Error('Can\'t add gateway to undefined VPC') } } + +/** + * Create an env object to pass specified keys to an sst construct + * from keys that may be on the current process.env + */ +export function optionalEnv (keys: string[]): Record { + const res: Record = {} + for (const key of keys) { + const val = process.env[key] + if (val === undefined) continue + res[key] = val + } + return res +} From 5a83796458dd2962dbaae057f04ab26eea01bd59 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Tue, 14 Mar 2023 11:26:07 +0000 Subject: [PATCH 11/14] chore: IPFS_API_URL is not required License: MIT Signed-off-by: Oli Evans --- pickup/lib/pickup.js | 1 - 1 file changed, 1 deletion(-) diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js index 0f3d3e8..7eb4f43 100644 --- a/pickup/lib/pickup.js +++ b/pickup/lib/pickup.js @@ -19,7 +19,6 @@ export function createPickupFromEnv (env = process.env) { VALIDATION_BUCKET } = env - if (!IPFS_API_URL) throw new Error('IPFS_API_URL not found in ENV') if (!SQS_QUEUE_URL) throw new Error('SQS_QUEUE_URL not found in ENV') if (!VALIDATION_BUCKET) throw new Error('VALIDATION_BUCKET not found in ENV') From 4b6a00c8b0cdb7ea35486fa070a04851a6bc0173 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Tue, 14 Mar 2023 11:42:35 +0000 Subject: [PATCH 12/14] chore: bump up fetchChunkTimeoutMs 2 mins is still timeout after node restarts License: MIT Signed-off-by: Oli Evans --- pickup/lib/ipfs.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pickup/lib/ipfs.js b/pickup/lib/ipfs.js index 014f170..fe55049 100644 --- a/pickup/lib/ipfs.js +++ b/pickup/lib/ipfs.js @@ -146,7 +146,7 @@ export class CarFetcher { ipfsApiUrl = 'http://127.0.0.1:5001', maxCarBytes = 31 * (1024 ** 3), /* 31 GiB */ fetchTimeoutMs = 4 * 60 * 60 * 1000, /* 4 hrs */ - fetchChunkTimeoutMs = 2 * 60 * 1000 /* 2 mins */ + fetchChunkTimeoutMs = 5 * 60 * 1000 /* 5 mins */ }) { this.ipfsApiUrl = ipfsApiUrl this.maxCarBytes = maxCarBytes From 9f7dc1bff165b780df835140a393eb68452b4809 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Tue, 14 Mar 2023 12:54:51 +0000 Subject: [PATCH 13/14] chore: add origins to logs License: MIT Signed-off-by: Oli Evans --- pickup/lib/pickup.js | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js index 7eb4f43..345106c 100644 --- a/pickup/lib/pickup.js +++ b/pickup/lib/pickup.js @@ -57,23 +57,24 @@ export function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { const abortCtl = new AbortController() const upload = s3Uploader.createUploader({ cid, key }) try { + logger.info({ cid, origins }, 'Fetching CAR') await carFetcher.connectTo(origins) const body = await carFetcher.fetch({ cid, origins, abortCtl }) await upload(body) - logger.info({ cid }, 'OK. Car in S3') + logger.info({ cid, origins }, 'OK. Car in S3') await msg.del() // the message is handled, remove it from queue. } catch (err) { if (abortCtl.signal.reason === TOO_BIG) { - logger.error({ cid, err }, 'Failed to fetch CAR: Too big') + logger.error({ cid, origins, err }, 'Failed to fetch CAR: Too big') await msg.release() } else if (abortCtl.signal.reason === CHUNK_TOO_SLOW) { - logger.error({ cid, err }, 'Failed to fetch CAR: chunk too slow') + logger.error({ cid, origins, err }, 'Failed to fetch CAR: chunk too slow') await msg.release() } else if (abortCtl.signal.reason === FETCH_TOO_SLOW) { - logger.error({ cid, err }, 'Failed to fetch CAR: fetch too slow') + logger.error({ cid, origins, err }, 'Failed to fetch CAR: fetch too slow') await msg.release() } else { - logger.error({ cid, err }, 'Failed to fetch CAR') + logger.error({ cid, origins, err }, 'Failed to fetch CAR') if (!msg.isHandled) { await msg.release() // back to the queue, try again } From ab611b35bd7ad1d5e40d27a866c7be01bb0e0357 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Tue, 14 Mar 2023 15:03:35 +0000 Subject: [PATCH 14/14] feat: improved origins filtering - grab p2p addrs from origins if we can. - limit origins to 10, just so we dont accept unboundedly long origins lists License: MIT Signed-off-by: Oli Evans --- api/basic/helper/multiaddr.ts | 61 ++++++++++++++++++++++++++++++----- api/test/multiaddrs.test.js | 42 ++++++++++++++++++++++++ 2 files changed, 95 insertions(+), 8 deletions(-) create mode 100644 api/test/multiaddrs.test.js diff --git a/api/basic/helper/multiaddr.ts b/api/basic/helper/multiaddr.ts index 5100304..20568d4 100644 --- a/api/basic/helper/multiaddr.ts +++ b/api/basic/helper/multiaddr.ts @@ -5,16 +5,43 @@ import { Multiaddr } from 'multiaddr' * Convert a comma-separated list of origins into an array of * multiaddr strings that we could possibly connect to. * - * Filters out: - * - malformed multiaddrs and ones with transports we don't recognise - * - private or reserved ip address that we wouldn't be able to connect to. - */ + * Picks the first 10 non-ip based multiaddr, + * or ip based multiaddrs with a public (non-bogon) IP address + * or a /p2p/:peerId addr from a bogon ip based multiaddr. + **/ export function findUsableMultiaddrs (input = ''): string[] { if (input === '' || input === null) return [] - return input - .split(',') - .filter(isMultiaddr) - .filter(hasPublicIpAddress) + const specificAddrs: Set = new Set() + const p2pAddrs: Set = new Set() + for (const str of input.split(',')) { + const input = str.trim() + const ma = asMultiaddr(input) + if (ma === undefined) continue + // where we've got a ma with an ip we can't connect to, + // try to extract the /p2p/:peerId addr where available + if (hasBogonIpAddress(input)) { + const addr = getP2pAddr(input) + if (addr !== undefined) { + p2pAddrs.add(addr) + } + } else { + // either an ip based multiaddr with public ip, or a non-ip based multiaddr + specificAddrs.add(input) + } + } + return Array + .from(specificAddrs) + .concat(Array.from(p2pAddrs)) + .slice(1, 10) // don't return an unbounded number of multiaddrs. +} + +export function asMultiaddr (input = ''): Multiaddr | undefined { + if (input === '' || input === null) return undefined + try { + return new Multiaddr(input) // eslint-disable-line no-new + } catch (e) { + return undefined + } } export function isMultiaddr (input = ''): boolean { @@ -37,3 +64,21 @@ export function hasPublicIpAddress (input = ''): boolean { // not a IP based multiaddr, so we allow it. return true } + +export function hasBogonIpAddress (input = ''): boolean { + if (input === '' || input === null) return false + if (input.startsWith('/ip6/') || input.startsWith('/ip4/')) { + const ip = input.split('/').at(2) + if (ip === undefined) return false + return bogon(ip) + } + return false +} + +export function getP2pAddr (input = ''): string | undefined { + if (input === '' || input === null) return undefined + const match = input.match(/\/p2p\/\w+$/) + if (match != null) { + return match.at(0) + } +} diff --git a/api/test/multiaddrs.test.js b/api/test/multiaddrs.test.js new file mode 100644 index 0000000..8795c3e --- /dev/null +++ b/api/test/multiaddrs.test.js @@ -0,0 +1,42 @@ +import test from 'ava' +import { findUsableMultiaddrs } from '../basic/helper/multiaddr.js' + +const fixture = [ + '/ip4/127.0.0.1/tcp/4001/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/127.0.0.1/udp/4001/quic-v1/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/127.0.0.1/udp/4001/quic-v1/webtransport/certhash/uEiAcyORkzbKPHpd2Rq8px1APBfdnTJ1jzH10u92mYJAOMA/certhash/uEiBFkYB7Q0cp49VFSMY9ae8ffHaRJf7N0WXCGBkGp4KCIQ/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/127.0.0.1/udp/4001/quic/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/192.168.1.113/tcp/4001/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/192.168.1.113/udp/4001/quic-v1/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/192.168.1.113/udp/4001/quic-v1/webtransport/certhash/uEiAcyORkzbKPHpd2Rq8px1APBfdnTJ1jzH10u92mYJAOMA/certhash/uEiBFkYB7Q0cp49VFSMY9ae8ffHaRJf7N0WXCGBkGp4KCIQ/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/192.168.1.113/udp/4001/quic/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/45.76.79.40/udp/4001/quic-v1/p2p/12D3KooWGFLFZ9uYAqD8WschDcPDT4PgmsGzgvwrTdDmV4LD5kSe/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/45.76.79.40/udp/4001/quic/p2p/12D3KooWGFLFZ9uYAqD8WschDcPDT4PgmsGzgvwrTdDmV4LD5kSe/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/5.161.44.89/tcp/4001/p2p/12D3KooWSvYbdaYZmZucbkEHKDDoHNqCtkEMWdSm1z4udww6fyUM/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/5.161.44.89/udp/4001/quic/p2p/12D3KooWSvYbdaYZmZucbkEHKDDoHNqCtkEMWdSm1z4udww6fyUM/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip6/2a01:4ff:f0:ab6::1/tcp/4001/p2p/12D3KooWSvYbdaYZmZucbkEHKDDoHNqCtkEMWdSm1z4udww6fyUM/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip6/2a01:4ff:f0:ab6::1/udp/4001/quic/p2p/12D3KooWSvYbdaYZmZucbkEHKDDoHNqCtkEMWdSm1z4udww6fyUM/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip6/::1/tcp/4001/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip6/::1/udp/4001/quic-v1/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip6/::1/udp/4001/quic-v1/webtransport/certhash/uEiAcyORkzbKPHpd2Rq8px1APBfdnTJ1jzH10u92mYJAOMA/certhash/uEiBFkYB7Q0cp49VFSMY9ae8ffHaRJf7N0WXCGBkGp4KCIQ/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip6/::1/udp/4001/quic/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN' +] + +const expected = [ + '/ip4/5.161.44.89/tcp/4001/p2p/12D3KooWSvYbdaYZmZucbkEHKDDoHNqCtkEMWdSm1z4udww6fyUM/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip4/5.161.44.89/udp/4001/quic/p2p/12D3KooWSvYbdaYZmZucbkEHKDDoHNqCtkEMWdSm1z4udww6fyUM/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip6/2a01:4ff:f0:ab6::1/tcp/4001/p2p/12D3KooWSvYbdaYZmZucbkEHKDDoHNqCtkEMWdSm1z4udww6fyUM/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/ip6/2a01:4ff:f0:ab6::1/udp/4001/quic/p2p/12D3KooWSvYbdaYZmZucbkEHKDDoHNqCtkEMWdSm1z4udww6fyUM/p2p-circuit/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN', + '/p2p/12D3KooWADjHf2kyANQodg9z5sSdX4bGEMbWg7ojwu6SCyDAMtzN' +] + +test('findUsableMultiaddrs', t => { + const input = fixture.join(',') + const res = findUsableMultiaddrs(input) + t.is(res.length, 5) + let i = 0 + for (const ma of expected) { + t.is(res[i], ma) + i++ + } +})