Skip to content

Commit

Permalink
Merge pull request #143 from ar-io/PE-6266-e2e-bundler-sidecar-test
Browse files Browse the repository at this point in the history
test(bundler sidecar): init data post e2e test PE-6266
  • Loading branch information
djwhitt committed Jun 18, 2024
2 parents f502095 + 78bb6b7 commit fa5f052
Show file tree
Hide file tree
Showing 7 changed files with 200 additions and 17 deletions.
6 changes: 3 additions & 3 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ services:
- TVAL_AR_IO_SQLITE_BACKUP_S3_BUCKET_SECRET_KEY=${AR_IO_SQLITE_BACKUP_S3_BUCKET_SECRET_KEY:-}

upload-service:
image: ghcr.io/ardriveapp/turbo-upload-service:${UPLOAD_SERVICE_IMAGE_TAG:-adae756f264362e14eef8f11d192a53c287587c6}
image: ghcr.io/ardriveapp/turbo-upload-service:${UPLOAD_SERVICE_IMAGE_TAG:-6299f5497f5de479a1fb17ec86b764a4bf018f7b}
environment:
ARWEAVE_GATEWAY: ${ARWEAVE_GATEWAY:-http://envoy:3000}
PORT: ${UPLOAD_SERVICE_PORT:-5100}
Expand Down Expand Up @@ -184,7 +184,7 @@ services:
profiles: ['bundler']

fulfillment-service:
image: ghcr.io/ardriveapp/turbo-upload-service-fulfillment:${UPLOAD_SERVICE_IMAGE_TAG:-adae756f264362e14eef8f11d192a53c287587c6}
image: ghcr.io/ardriveapp/turbo-upload-service-fulfillment:${UPLOAD_SERVICE_IMAGE_TAG:-6299f5497f5de479a1fb17ec86b764a4bf018f7b}
environment:
ARWEAVE_GATEWAY: ${ARWEAVE_GATEWAY:-http://envoy:3000}
NODE_ENV: ${NODE_ENV:-local}
Expand Down Expand Up @@ -236,7 +236,7 @@ services:
profiles: ['bundler']

localstack:
image: ghcr.io/ardriveapp/turbo-upload-service-localstack:${UPLOAD_SERVICE_IMAGE_TAG:-adae756f264362e14eef8f11d192a53c287587c6}
image: ghcr.io/ardriveapp/turbo-upload-service-localstack:${UPLOAD_SERVICE_IMAGE_TAG:-6299f5497f5de479a1fb17ec86b764a4bf018f7b}
ports:
- '4566' # don't expose localstack externally by default
#- "127.0.0.1:4510-4559:4510-4559" # external services port range
Expand Down
14 changes: 9 additions & 5 deletions src/data/ar-io-data-source.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import { strict as assert } from 'node:assert';
import { afterEach, before, beforeEach, describe, it, mock } from 'node:test';
import * as winston from 'winston';
import axios from 'axios';
import { ArIO, ArIOReadable } from '@ar.io/sdk';
import { AoIORead, IO } from '@ar.io/sdk';
import { Readable } from 'node:stream';
import { RequestAttributes } from '../types.js';
import { ArIODataSource } from './ar-io-data-source.js';
Expand All @@ -29,7 +29,7 @@ import { TestDestroyedReadable, axiosStreamData } from './test-utils.js';
let log: winston.Logger;
let dataSource: ArIODataSource;
let requestAttributes: RequestAttributes;
let mockedArIOInstance: ArIOReadable;
let mockedArIOInstance: AoIORead;
let mockedAxiosGet: any;

before(async () => {
Expand All @@ -54,7 +54,7 @@ beforeEach(async () => {
},
});

mock.method(ArIO, 'init', () => mockedArIOInstance);
mock.method(IO, 'init', () => mockedArIOInstance);

mock.method(axios, 'get', mockedAxiosGet);

Expand All @@ -64,7 +64,7 @@ beforeEach(async () => {

dataSource = new ArIODataSource({
log,
arIO: ArIO.init(),
arIO: IO.init(),
nodeWallet: 'localNode',
});

Expand All @@ -78,7 +78,7 @@ afterEach(async () => {

describe('ArIODataSource', () => {
describe('constructor', () => {
it('should fetch peers and upodate peer list ignoring the running node as a peer', async () => {
it('should fetch peers and update peer list ignoring the running node as a peer', async () => {
assert.deepEqual(dataSource.peers, {
peer1: 'http://peer1.com',
peer2: 'https://peer2.com',
Expand Down Expand Up @@ -115,6 +115,8 @@ describe('ArIODataSource', () => {
},
});

// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
let receivedData = '';

for await (const chunk of data.stream) {
Expand Down Expand Up @@ -203,6 +205,8 @@ describe('ArIODataSource', () => {

try {
const data = await dataSource.getData({ id: 'id' });
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
let receivedData = '';

for await (const chunk of data.stream) {
Expand Down
2 changes: 2 additions & 0 deletions src/data/gateway-data-source.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,8 @@ describe('GatewayDataSource', () => {

try {
const data = await dataSource.getData({ id: 'id', requestAttributes });
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
let receivedData = '';

for await (const chunk of data.stream) {
Expand Down
8 changes: 8 additions & 0 deletions src/data/read-through-data-cache.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -285,6 +285,8 @@ describe('ReadThroughDataCache', function () {
});

it('should increment getDataStreamErrorsTotal for broken cached data stream', async function () {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
let calledWithArgument: string;
mock.method(mockContiguousDataIndex, 'getDataAttributes', () => {
return Promise.resolve({
Expand All @@ -297,6 +299,8 @@ describe('ReadThroughDataCache', function () {
});
});
mock.method(mockContiguousDataStore, 'get', (hash: string) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
calledWithArgument = hash;
return new TestDestroyedReadable();
});
Expand All @@ -307,6 +311,8 @@ describe('ReadThroughDataCache', function () {
requestAttributes,
});

// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
let receivedData = '';

for await (const chunk of result.stream) {
Expand Down Expand Up @@ -425,6 +431,8 @@ describe('ReadThroughDataCache', function () {
requestAttributes,
});

// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
let receivedData = '';

for await (const chunk of result.stream) {
Expand Down
5 changes: 2 additions & 3 deletions src/data/tx-chunks-data-source.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
*/
import { strict as assert } from 'node:assert';
import { afterEach, before, beforeEach, describe, it, mock } from 'node:test';
import { Readable } from 'node:stream';
import * as winston from 'winston';

import {
Expand Down Expand Up @@ -135,7 +134,7 @@ describe('TxChunksDataSource', () => {
requestAttributes,
});

for await (const chunk of data.stream) {
for await (const _chunk of data.stream) {
// do nothing
}
} catch (e) {
Expand Down Expand Up @@ -165,7 +164,7 @@ describe('TxChunksDataSource', () => {
requestAttributes,
});

for await (const chunk of data.stream) {
for await (const _chunk of data.stream) {
// do nothing
}
} catch (e) {
Expand Down
13 changes: 7 additions & 6 deletions src/routes/graphql/resolvers.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import {
resolveTxQuantity,
resolveTxRecipient,
} from '../../../src/routes/graphql/resolvers.js';
import { GqlTransaction } from '../../types.js';

const GQL_TX = {
id: 'LXCrfCRLHB7YyLGAeQoio00qb7LwT3UO3a-2TSDli8Q',
Expand Down Expand Up @@ -69,14 +70,14 @@ describe('getPageSize', () => {

describe('resolveTxRecipient', () => {
it('should return the recipient', () => {
const recipient = resolveTxRecipient(GQL_TX);
const recipient = resolveTxRecipient(GQL_TX as unknown as GqlTransaction);
assert.equal(recipient, '6p817XK-yIX-hBCQ0qD5wbcP05WPQgPKFmwNYC2xtwM');
});

// arweave.net compatibility
it('should return empty string if recipient is undefined', () => {
const tx = { ...GQL_TX, recipient: undefined };
const recipient = resolveTxRecipient(tx);
const recipient = resolveTxRecipient(tx as unknown as GqlTransaction);
assert.equal(recipient, '');
});
});
Expand All @@ -85,28 +86,28 @@ describe('resolveTxData', () => {
it('should return dataSize and contentType', () => {
// TODO find a tx with content type set
const tx = { ...GQL_TX, contentType: 'text/plain' };
const dataResult = resolveTxData(tx);
const dataResult = resolveTxData(tx as unknown as GqlTransaction);
assert.deepEqual(dataResult, { size: '0', type: 'text/plain' });
});
});

describe('resolveTxQuantity', () => {
it('should return quantity in AR and winstons', () => {
const quantity = resolveTxQuantity(GQL_TX);
const quantity = resolveTxQuantity(GQL_TX as unknown as GqlTransaction);
assert.deepEqual(quantity, { ar: '0.000007896935', winston: '7896935' });
});
});

describe('resolveTxFee', () => {
it('should return quantity in AR and winstons', () => {
const fee = resolveTxFee(GQL_TX);
const fee = resolveTxFee(GQL_TX as unknown as GqlTransaction);
assert.deepEqual(fee, { ar: '0.000000477648', winston: '477648' });
});
});

describe('resolveTxOwner', () => {
it('should return owner address and key', () => {
const owner = resolveTxOwner(GQL_TX);
const owner = resolveTxOwner(GQL_TX as unknown as GqlTransaction);
assert.equal(owner.address, 'k3hNqeW_8_WDBz6hwUAsu6DQ47sGXZUP5Q8MJP8BdsE');
assert.equal(
owner.key,
Expand Down
169 changes: 169 additions & 0 deletions test/end-to-end/bundler-sidecar.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
/**
* AR.IO Gateway
* Copyright (C) 2022-2023 Permanent Data Solutions, Inc. All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { strict as assert } from 'node:assert';
import { after, before, describe, it } from 'node:test';
import { rimraf } from 'rimraf';
import {
DockerComposeEnvironment,
PullPolicy,
StartedDockerComposeEnvironment,
Wait,
} from 'testcontainers';
import wait from 'wait';
import axios from 'axios';
import Sqlite, { Database } from 'better-sqlite3';
import { fromB64Url, sha256B64Url, toB64Url } from '../../src/lib/encoding.js';
import { Environment } from 'testcontainers/build/types.js';
import { createData } from 'arbundles';
import Arweave from 'arweave';
import { ArweaveSigner } from 'arbundles/src/signing/index.js';
import { JWKInterface } from 'arweave/node/lib/wallet.js';

const projectRootPath = process.cwd();

const cleanDb = () =>
rimraf(`${projectRootPath}/data/sqlite/*.db*`, { glob: true });
const composeUp = async ({
START_HEIGHT = '1',
STOP_HEIGHT = '1',
ANS104_UNBUNDLE_FILTER = '{"always": true}',
ANS104_INDEX_FILTER = '{"always": true}',
ADMIN_API_KEY = 'secret',
BUNDLER_ARWEAVE_WALLET,
BUNDLER_ARWEAVE_ADDRESS,
AWS_S3_BUCKET = 'ar.io',
AWS_S3_PREFIX = 'data',
AWS_ACCESS_KEY_ID = 'test',
AWS_SECRET_ACCESS_KEY = 'test',
AWS_REGION = 'us-east-1',
AWS_ENDPOINT = 'http://localstack:4566',
...ENVIRONMENT
}: Environment = {}) => {
await cleanDb();

return new DockerComposeEnvironment(projectRootPath, 'docker-compose.yaml')
.withEnvironment({
START_HEIGHT,
STOP_HEIGHT,
ANS104_UNBUNDLE_FILTER,
ANS104_INDEX_FILTER,
ADMIN_API_KEY,
BUNDLER_ARWEAVE_WALLET,
BUNDLER_ARWEAVE_ADDRESS,
AWS_S3_BUCKET,
AWS_S3_PREFIX,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
AWS_REGION,
AWS_ENDPOINT,
TESTCONTAINERS_HOST_OVERRIDE: 'localhost',
...ENVIRONMENT,
})
.withBuild()
.withProfiles('bundler')
.withPullPolicy(PullPolicy.alwaysPull())
.withWaitStrategy(
'upload-service-1',
Wait.forLogMessage('Listening on port 5100'),
)
.withWaitStrategy(
'observer-1',
Wait.forLogMessage('Listening on port 5050'),
)
.withWaitStrategy('core-1', Wait.forLogMessage('Listening on port 4000'))
.up();
};

describe('Bundler Sidecar', () => {
let bundlesDb: Database;
let compose: StartedDockerComposeEnvironment;

const waitForIndexing = async () => {
const getAll = () =>
bundlesDb.prepare('SELECT * FROM new_data_items').all();

while (getAll().length === 0) {
console.log('Waiting for data items to be indexed...');
await wait(5000);
}
};

let jwk: JWKInterface;
before(async () => {
jwk = await Arweave.crypto.generateJWK();
compose = await composeUp({
BUNDLER_ARWEAVE_WALLET: JSON.stringify(jwk),
BUNDLER_ARWEAVE_ADDRESS: sha256B64Url(fromB64Url(jwk.n)),
});

bundlesDb = new Sqlite(`${projectRootPath}/data/sqlite/bundles.db`);
});

after(async () => {
await compose.down();
bundlesDb.close();
});

it('optimistically posts data item headers and uses a shared data source exposing data item payloads posted to the upload service', async () => {
const signer = new ArweaveSigner(jwk);
const dataItem = createData('test data', signer, {
tags: [{ name: 'Content-Type', value: 'text/plain' }],
});
await dataItem.sign(signer);

// post data to bundler
await axios({
method: 'post',
url: `http://localhost:${3000}/bundler/tx`,
headers: { 'Content-Type': 'application/octet-stream' },
data: dataItem.getRaw(),
});

// get data from gateway, should be instantly available
const res = await axios({
method: 'get',
url: `http://localhost:${3000}/${dataItem.id}`,
validateStatus: () => true,
});

assert.equal(res.data, 'test data');

await waitForIndexing();

// Data item headers should be optimistically indexed by core service
const stmt = bundlesDb.prepare('SELECT * FROM new_data_items');
const dataItems = stmt.all();

assert.equal(dataItems.length, 1);
const importedDataItem = dataItems[0];
assert.equal(toB64Url(importedDataItem.id), dataItem.id);
assert.equal(importedDataItem.parent_id, null);
assert.equal(importedDataItem.root_transaction_id, null);
assert.equal(importedDataItem.data_offset, null);
assert.equal(toB64Url(importedDataItem.signature), dataItem.signature);
assert.equal(toB64Url(importedDataItem.anchor), dataItem.anchor);
assert.equal(toB64Url(importedDataItem.target), dataItem.target);
assert.equal(
toB64Url(importedDataItem.owner_address),
sha256B64Url(fromB64Url(dataItem.owner)),
);
assert.equal(importedDataItem.data_size, 9);
assert.equal(importedDataItem.tag_count, 1);
assert.equal(importedDataItem.content_type, 'text/plain');
});
});

0 comments on commit fa5f052

Please sign in to comment.