diff --git a/docker-compose.yaml b/docker-compose.yaml index 9e6a6cc7..5ff1aa2e 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -145,7 +145,7 @@ services: - TVAL_AR_IO_SQLITE_BACKUP_S3_BUCKET_SECRET_KEY=${AR_IO_SQLITE_BACKUP_S3_BUCKET_SECRET_KEY:-} upload-service: - image: ghcr.io/ardriveapp/turbo-upload-service:${UPLOAD_SERVICE_IMAGE_TAG:-adae756f264362e14eef8f11d192a53c287587c6} + image: ghcr.io/ardriveapp/turbo-upload-service:${UPLOAD_SERVICE_IMAGE_TAG:-6299f5497f5de479a1fb17ec86b764a4bf018f7b} environment: ARWEAVE_GATEWAY: ${ARWEAVE_GATEWAY:-http://envoy:3000} PORT: ${UPLOAD_SERVICE_PORT:-5100} @@ -184,7 +184,7 @@ services: profiles: ['bundler'] fulfillment-service: - image: ghcr.io/ardriveapp/turbo-upload-service-fulfillment:${UPLOAD_SERVICE_IMAGE_TAG:-adae756f264362e14eef8f11d192a53c287587c6} + image: ghcr.io/ardriveapp/turbo-upload-service-fulfillment:${UPLOAD_SERVICE_IMAGE_TAG:-6299f5497f5de479a1fb17ec86b764a4bf018f7b} environment: ARWEAVE_GATEWAY: ${ARWEAVE_GATEWAY:-http://envoy:3000} NODE_ENV: ${NODE_ENV:-local} @@ -236,7 +236,7 @@ services: profiles: ['bundler'] localstack: - image: ghcr.io/ardriveapp/turbo-upload-service-localstack:${UPLOAD_SERVICE_IMAGE_TAG:-adae756f264362e14eef8f11d192a53c287587c6} + image: ghcr.io/ardriveapp/turbo-upload-service-localstack:${UPLOAD_SERVICE_IMAGE_TAG:-6299f5497f5de479a1fb17ec86b764a4bf018f7b} ports: - '4566' # don't expose localstack externally by default #- "127.0.0.1:4510-4559:4510-4559" # external services port range diff --git a/src/data/ar-io-data-source.test.ts b/src/data/ar-io-data-source.test.ts index 8dd5c07b..a633103c 100644 --- a/src/data/ar-io-data-source.test.ts +++ b/src/data/ar-io-data-source.test.ts @@ -19,7 +19,7 @@ import { strict as assert } from 'node:assert'; import { afterEach, before, beforeEach, describe, it, mock } from 'node:test'; import * as winston from 'winston'; import axios from 'axios'; -import { ArIO, ArIOReadable } from '@ar.io/sdk'; +import { AoIORead, IO } from '@ar.io/sdk'; import { Readable } from 'node:stream'; import { RequestAttributes } from '../types.js'; import { ArIODataSource } from './ar-io-data-source.js'; @@ -29,7 +29,7 @@ import { TestDestroyedReadable, axiosStreamData } from './test-utils.js'; let log: winston.Logger; let dataSource: ArIODataSource; let requestAttributes: RequestAttributes; -let mockedArIOInstance: ArIOReadable; +let mockedArIOInstance: AoIORead; let mockedAxiosGet: any; before(async () => { @@ -54,7 +54,7 @@ beforeEach(async () => { }, }); - mock.method(ArIO, 'init', () => mockedArIOInstance); + mock.method(IO, 'init', () => mockedArIOInstance); mock.method(axios, 'get', mockedAxiosGet); @@ -64,7 +64,7 @@ beforeEach(async () => { dataSource = new ArIODataSource({ log, - arIO: ArIO.init(), + arIO: IO.init(), nodeWallet: 'localNode', }); @@ -78,7 +78,7 @@ afterEach(async () => { describe('ArIODataSource', () => { describe('constructor', () => { - it('should fetch peers and upodate peer list ignoring the running node as a peer', async () => { + it('should fetch peers and update peer list ignoring the running node as a peer', async () => { assert.deepEqual(dataSource.peers, { peer1: 'http://peer1.com', peer2: 'https://peer2.com', @@ -115,6 +115,8 @@ describe('ArIODataSource', () => { }, }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore let receivedData = ''; for await (const chunk of data.stream) { @@ -203,6 +205,8 @@ describe('ArIODataSource', () => { try { const data = await dataSource.getData({ id: 'id' }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore let receivedData = ''; for await (const chunk of data.stream) { diff --git a/src/data/gateway-data-source.test.ts b/src/data/gateway-data-source.test.ts index 901dfcb7..a7d053ed 100644 --- a/src/data/gateway-data-source.test.ts +++ b/src/data/gateway-data-source.test.ts @@ -160,6 +160,8 @@ describe('GatewayDataSource', () => { try { const data = await dataSource.getData({ id: 'id', requestAttributes }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore let receivedData = ''; for await (const chunk of data.stream) { diff --git a/src/data/read-through-data-cache.test.ts b/src/data/read-through-data-cache.test.ts index 171dced9..a08428ac 100644 --- a/src/data/read-through-data-cache.test.ts +++ b/src/data/read-through-data-cache.test.ts @@ -285,6 +285,8 @@ describe('ReadThroughDataCache', function () { }); it('should increment getDataStreamErrorsTotal for broken cached data stream', async function () { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore let calledWithArgument: string; mock.method(mockContiguousDataIndex, 'getDataAttributes', () => { return Promise.resolve({ @@ -297,6 +299,8 @@ describe('ReadThroughDataCache', function () { }); }); mock.method(mockContiguousDataStore, 'get', (hash: string) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore calledWithArgument = hash; return new TestDestroyedReadable(); }); @@ -307,6 +311,8 @@ describe('ReadThroughDataCache', function () { requestAttributes, }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore let receivedData = ''; for await (const chunk of result.stream) { @@ -425,6 +431,8 @@ describe('ReadThroughDataCache', function () { requestAttributes, }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore let receivedData = ''; for await (const chunk of result.stream) { diff --git a/src/data/tx-chunks-data-source.test.ts b/src/data/tx-chunks-data-source.test.ts index 5403e409..532f688f 100644 --- a/src/data/tx-chunks-data-source.test.ts +++ b/src/data/tx-chunks-data-source.test.ts @@ -17,7 +17,6 @@ */ import { strict as assert } from 'node:assert'; import { afterEach, before, beforeEach, describe, it, mock } from 'node:test'; -import { Readable } from 'node:stream'; import * as winston from 'winston'; import { @@ -135,7 +134,7 @@ describe('TxChunksDataSource', () => { requestAttributes, }); - for await (const chunk of data.stream) { + for await (const _chunk of data.stream) { // do nothing } } catch (e) { @@ -165,7 +164,7 @@ describe('TxChunksDataSource', () => { requestAttributes, }); - for await (const chunk of data.stream) { + for await (const _chunk of data.stream) { // do nothing } } catch (e) { diff --git a/src/routes/graphql/resolvers.test.ts b/src/routes/graphql/resolvers.test.ts index 8fc7a936..60b75375 100644 --- a/src/routes/graphql/resolvers.test.ts +++ b/src/routes/graphql/resolvers.test.ts @@ -28,6 +28,7 @@ import { resolveTxQuantity, resolveTxRecipient, } from '../../../src/routes/graphql/resolvers.js'; +import { GqlTransaction } from '../../types.js'; const GQL_TX = { id: 'LXCrfCRLHB7YyLGAeQoio00qb7LwT3UO3a-2TSDli8Q', @@ -69,14 +70,14 @@ describe('getPageSize', () => { describe('resolveTxRecipient', () => { it('should return the recipient', () => { - const recipient = resolveTxRecipient(GQL_TX); + const recipient = resolveTxRecipient(GQL_TX as unknown as GqlTransaction); assert.equal(recipient, '6p817XK-yIX-hBCQ0qD5wbcP05WPQgPKFmwNYC2xtwM'); }); // arweave.net compatibility it('should return empty string if recipient is undefined', () => { const tx = { ...GQL_TX, recipient: undefined }; - const recipient = resolveTxRecipient(tx); + const recipient = resolveTxRecipient(tx as unknown as GqlTransaction); assert.equal(recipient, ''); }); }); @@ -85,28 +86,28 @@ describe('resolveTxData', () => { it('should return dataSize and contentType', () => { // TODO find a tx with content type set const tx = { ...GQL_TX, contentType: 'text/plain' }; - const dataResult = resolveTxData(tx); + const dataResult = resolveTxData(tx as unknown as GqlTransaction); assert.deepEqual(dataResult, { size: '0', type: 'text/plain' }); }); }); describe('resolveTxQuantity', () => { it('should return quantity in AR and winstons', () => { - const quantity = resolveTxQuantity(GQL_TX); + const quantity = resolveTxQuantity(GQL_TX as unknown as GqlTransaction); assert.deepEqual(quantity, { ar: '0.000007896935', winston: '7896935' }); }); }); describe('resolveTxFee', () => { it('should return quantity in AR and winstons', () => { - const fee = resolveTxFee(GQL_TX); + const fee = resolveTxFee(GQL_TX as unknown as GqlTransaction); assert.deepEqual(fee, { ar: '0.000000477648', winston: '477648' }); }); }); describe('resolveTxOwner', () => { it('should return owner address and key', () => { - const owner = resolveTxOwner(GQL_TX); + const owner = resolveTxOwner(GQL_TX as unknown as GqlTransaction); assert.equal(owner.address, 'k3hNqeW_8_WDBz6hwUAsu6DQ47sGXZUP5Q8MJP8BdsE'); assert.equal( owner.key, diff --git a/test/end-to-end/bundler-sidecar.test.ts b/test/end-to-end/bundler-sidecar.test.ts new file mode 100644 index 00000000..dc8fc338 --- /dev/null +++ b/test/end-to-end/bundler-sidecar.test.ts @@ -0,0 +1,169 @@ +/** + * AR.IO Gateway + * Copyright (C) 2022-2023 Permanent Data Solutions, Inc. All Rights Reserved. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ +import { strict as assert } from 'node:assert'; +import { after, before, describe, it } from 'node:test'; +import { rimraf } from 'rimraf'; +import { + DockerComposeEnvironment, + PullPolicy, + StartedDockerComposeEnvironment, + Wait, +} from 'testcontainers'; +import wait from 'wait'; +import axios from 'axios'; +import Sqlite, { Database } from 'better-sqlite3'; +import { fromB64Url, sha256B64Url, toB64Url } from '../../src/lib/encoding.js'; +import { Environment } from 'testcontainers/build/types.js'; +import { createData } from 'arbundles'; +import Arweave from 'arweave'; +import { ArweaveSigner } from 'arbundles/src/signing/index.js'; +import { JWKInterface } from 'arweave/node/lib/wallet.js'; + +const projectRootPath = process.cwd(); + +const cleanDb = () => + rimraf(`${projectRootPath}/data/sqlite/*.db*`, { glob: true }); +const composeUp = async ({ + START_HEIGHT = '1', + STOP_HEIGHT = '1', + ANS104_UNBUNDLE_FILTER = '{"always": true}', + ANS104_INDEX_FILTER = '{"always": true}', + ADMIN_API_KEY = 'secret', + BUNDLER_ARWEAVE_WALLET, + BUNDLER_ARWEAVE_ADDRESS, + AWS_S3_BUCKET = 'ar.io', + AWS_S3_PREFIX = 'data', + AWS_ACCESS_KEY_ID = 'test', + AWS_SECRET_ACCESS_KEY = 'test', + AWS_REGION = 'us-east-1', + AWS_ENDPOINT = 'http://localstack:4566', + ...ENVIRONMENT +}: Environment = {}) => { + await cleanDb(); + + return new DockerComposeEnvironment(projectRootPath, 'docker-compose.yaml') + .withEnvironment({ + START_HEIGHT, + STOP_HEIGHT, + ANS104_UNBUNDLE_FILTER, + ANS104_INDEX_FILTER, + ADMIN_API_KEY, + BUNDLER_ARWEAVE_WALLET, + BUNDLER_ARWEAVE_ADDRESS, + AWS_S3_BUCKET, + AWS_S3_PREFIX, + AWS_ACCESS_KEY_ID, + AWS_SECRET_ACCESS_KEY, + AWS_REGION, + AWS_ENDPOINT, + TESTCONTAINERS_HOST_OVERRIDE: 'localhost', + ...ENVIRONMENT, + }) + .withBuild() + .withProfiles('bundler') + .withPullPolicy(PullPolicy.alwaysPull()) + .withWaitStrategy( + 'upload-service-1', + Wait.forLogMessage('Listening on port 5100'), + ) + .withWaitStrategy( + 'observer-1', + Wait.forLogMessage('Listening on port 5050'), + ) + .withWaitStrategy('core-1', Wait.forLogMessage('Listening on port 4000')) + .up(); +}; + +describe('Bundler Sidecar', () => { + let bundlesDb: Database; + let compose: StartedDockerComposeEnvironment; + + const waitForIndexing = async () => { + const getAll = () => + bundlesDb.prepare('SELECT * FROM new_data_items').all(); + + while (getAll().length === 0) { + console.log('Waiting for data items to be indexed...'); + await wait(5000); + } + }; + + let jwk: JWKInterface; + before(async () => { + jwk = await Arweave.crypto.generateJWK(); + compose = await composeUp({ + BUNDLER_ARWEAVE_WALLET: JSON.stringify(jwk), + BUNDLER_ARWEAVE_ADDRESS: sha256B64Url(fromB64Url(jwk.n)), + }); + + bundlesDb = new Sqlite(`${projectRootPath}/data/sqlite/bundles.db`); + }); + + after(async () => { + await compose.down(); + bundlesDb.close(); + }); + + it('optimistically posts data item headers and uses a shared data source exposing data item payloads posted to the upload service', async () => { + const signer = new ArweaveSigner(jwk); + const dataItem = createData('test data', signer, { + tags: [{ name: 'Content-Type', value: 'text/plain' }], + }); + await dataItem.sign(signer); + + // post data to bundler + await axios({ + method: 'post', + url: `http://localhost:${3000}/bundler/tx`, + headers: { 'Content-Type': 'application/octet-stream' }, + data: dataItem.getRaw(), + }); + + // get data from gateway, should be instantly available + const res = await axios({ + method: 'get', + url: `http://localhost:${3000}/${dataItem.id}`, + validateStatus: () => true, + }); + + assert.equal(res.data, 'test data'); + + await waitForIndexing(); + + // Data item headers should be optimistically indexed by core service + const stmt = bundlesDb.prepare('SELECT * FROM new_data_items'); + const dataItems = stmt.all(); + + assert.equal(dataItems.length, 1); + const importedDataItem = dataItems[0]; + assert.equal(toB64Url(importedDataItem.id), dataItem.id); + assert.equal(importedDataItem.parent_id, null); + assert.equal(importedDataItem.root_transaction_id, null); + assert.equal(importedDataItem.data_offset, null); + assert.equal(toB64Url(importedDataItem.signature), dataItem.signature); + assert.equal(toB64Url(importedDataItem.anchor), dataItem.anchor); + assert.equal(toB64Url(importedDataItem.target), dataItem.target); + assert.equal( + toB64Url(importedDataItem.owner_address), + sha256B64Url(fromB64Url(dataItem.owner)), + ); + assert.equal(importedDataItem.data_size, 9); + assert.equal(importedDataItem.tag_count, 1); + assert.equal(importedDataItem.content_type, 'text/plain'); + }); +});