Skip to content

Commit

Permalink
Merge pull request #1 from AcalaNetwork/add-whole-stack
Browse files Browse the repository at this point in the history
add euphrates analytics stack
  • Loading branch information
shunjizhan authored Oct 31, 2023
2 parents 7511ef4 + 5a51ad6 commit 40917d9
Show file tree
Hide file tree
Showing 28 changed files with 3,764 additions and 1 deletion.
4 changes: 4 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
Dockerfile
.dockerignore
.git
node_modules
39 changes: 39 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
name: Build Docker Images

on:
push:
branches:
- master
tags:
- 'v*'

jobs:
build-image:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3

- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}

- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: acala/data-porter
tags: |
type=semver,pattern={{raw}}
type=raw,value={{sha}}
type=raw,value=latest,enable={{is_default_branch}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
14 changes: 14 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
FROM node:16-alpine

WORKDIR /app

COPY ./data-porter/package.json ./package.json
RUN yarn install --immutable

COPY ./data-porter .

EXPOSE 3000

USER node

ENTRYPOINT ["yarn"]
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
# euphrates-analytics
# Analytics
Tools for Acala Analytics, currently for Euphrates only
3 changes: 3 additions & 0 deletions data-porter/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
PASSWORD_PROD=
PASSWORD_DEV=
API_KEY=
64 changes: 64 additions & 0 deletions data-porter/.eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint', 'import', 'sort-imports-es6-autofix'],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:import/recommended',
'plugin:import/typescript'
],
rules: {
indent: [2, 2, { SwitchCase: 1 }],
quotes: [2, 'single'],
semi: [1, 'always'],
'no-trailing-spaces': [2],
'quote-props': [2, 'as-needed'],
'eol-last': [2, 'always'],
'object-curly-spacing': [2, 'always'],
'comma-dangle': [2, {
arrays: 'always-multiline',
objects: 'always-multiline',
imports: 'always-multiline',
exports: 'always-multiline',
functions: 'only-multiline',
}],

/* ---------- turn off ---------- */
'@typescript-eslint/no-extra-semi': 0,
'@typescript-eslint/no-use-before-define': 0,
'@typescript-eslint/explicit-member-accessibility': 0,
'@typescript-eslint/naming-convention': 0,
'@typescript-eslint/no-explicit-any': 0, // any is sometimes unavoidable
'@typescript-eslint/consistent-type-definitions': 0, // can use Type and Interface
'@typescript-eslint/explicit-function-return-type': 0, // type inference on return type is useful
'@typescript-eslint/no-parameter-properties': 0,
'@typescript-eslint/typedef': 0,
'no-unused-expressions': 0, // short ciucuit if
'max-lines': 0,
'@typescript-eslint/no-empty-function': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'sort-imports-es6-autofix/sort-imports-es6': 'warn',
'@typescript-eslint/ban-ts-comment': 'off',
'no-useless-escape': 'off',
'@typescript-eslint/no-non-null-asserted-optional-chain': 'off',
'import/no-named-as-default-member': 'off',
'import/no-named-as-default': 'off',
'@typescript-eslint/no-non-null-assertion': 'off',
'@typescript-eslint/no-unused-vars': [
'warn',
{
argsIgnorePattern: '^_',
varsIgnorePattern: '^_',
caughtErrorsIgnorePattern: '^_'
}
]
},
settings: {
'import/resolver': {
typescript: {
project: 'tsconfig.json'
}
}
}
};
8 changes: 8 additions & 0 deletions data-porter/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Acala Analytics Scripts
first fill `.env.example` and `mv .env.example .env`

pull data from postgres and upload to dune
```
yarn update:dune
```

32 changes: 32 additions & 0 deletions data-porter/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
{
"name": "acala-data-porter",
"version": "1.0.0",
"main": "src/index.ts",
"license": "MIT",
"scripts": {
"update:dune": "ts-node src/index.ts"
},
"dependencies": {
"axios": "^1.5.1",
"dotenv": "^16.3.1",
"envalid": "^8.0.0",
"papaparse": "^5.4.1",
"pg": "^8.11.3",
"lodash": "^4.17.21"
},
"devDependencies": {
"@types/axios": "^0.14.0",
"@types/lodash": "^4.14.200",
"@types/node": "^20.8.4",
"@types/papaparse": "^5",
"@types/pg": "^8.10.4",
"@typescript-eslint/eslint-plugin": "^6.7.5",
"@typescript-eslint/parser": "^6.7.5",
"eslint": "^8.51.0",
"eslint-import-resolver-typescript": "^3.5.5",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-sort-imports-es6-autofix": "^0.6.0",
"ts-node": "^10.9.1",
"typescript": "^5.2.2"
}
}
3 changes: 3 additions & 0 deletions data-porter/src/actions/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export * from './pull';
export * from './transform';
export * from './upload';
72 changes: 72 additions & 0 deletions data-porter/src/actions/pull.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import { Client, ClientConfig } from 'pg';
import dotenv from 'dotenv';

dotenv.config();

export interface Tx {
timestamp: string;
pool_id: string;
amount: string;
from: string;
type: string;
}

export interface Row extends Tx {
[key: string]: any;
}

interface QueryTarget {
schema: string,
tables: string[],
filenames: string[],
}
type QueryParams = ClientConfig & QueryTarget;

const getAllTables = async (client: Client, schema: string) => {
console.log(`querying all tables under schema ${schema} ...`);

const res = await client.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = $1
`, [schema]);

return res.rows.map(row => row.table_name);
};

export const pullDataFromDb = async ({
host,
port,
database,
user,
password,
schema,
tables,
}: QueryParams): Promise<Row[][]> => {
const client = new Client({
host: host,
port: port,
database: database,
user: user,
password: password,
});

const res = [];
try {
await client.connect();

const tableNames = tables ?? await getAllTables(client, schema);

for (const table of tableNames) {
const data = await client.query(`SELECT * FROM "${schema}"."${table}"`);
res.push(data.rows);
}

} catch (err) {
console.error('Error fetching data:', err);
} finally {
await client.end();
}

return res;
};
33 changes: 33 additions & 0 deletions data-porter/src/actions/transform.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import { Row, Tx } from './pull';
import { flow } from 'lodash';
import { unparse } from 'papaparse';

const pickColumns = (columns: string[]) => (csvData: Row[]) => csvData.map(d =>
columns.reduce<Tx>((acc, col) => ({ ...acc, [col]: d[col] }), {} as Tx),
);

// this shape is compatible with dune
// TODO: there should be a lib that does this?
const formatDate = (input: string): string => {
const date = new Date(input);

const YYYY = date.getFullYear();
const MM = String(date.getMonth() + 1).padStart(2, '0'); // Months are 0-based, so we add 1
const DD = String(date.getDate()).padStart(2, '0');
const HH = String(date.getHours()).padStart(2, '0');
const mm = String(date.getMinutes()).padStart(2, '0');
const ss = String(date.getSeconds()).padStart(2, '0');

return `${YYYY}-${MM}-${DD} ${HH}:${mm}:${ss}`;
};

const toSimpleTimestamp = (csvData: Tx[]) => csvData.map<Tx>(rowData => ({
...rowData,
timestamp: formatDate(rowData.timestamp),
}));

export const transformCSV = flow<[Row[]], Tx[], Tx[], string>(
pickColumns(['timestamp', 'pool_id', 'amount', 'from', 'type']),
toSimpleTimestamp,
unparse,
);
39 changes: 39 additions & 0 deletions data-porter/src/actions/upload.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import { DUNE_URL } from '../consts';
import axios from 'axios';

interface UploadParams {
data: string,
description: string,
tableName: string,
apiKey: string,
}

export const uploadToDune = async ({
data,
apiKey,
tableName,
description,
}: UploadParams) => {
console.log(`uploading data to dune table ${tableName} ...`);

const headers = {
'X-Dune-Api-Key': apiKey,
};

const payload = {
table_name: tableName,
description: description,
is_private: false,
data,
};

const res = await axios.post(DUNE_URL, payload, { headers });

if (res.status !== 200 || res.data.success !== true) {
throw new Error(`upload data to Dune failed: ${JSON.stringify(res)}`);
}

console.log('upload finished!');

return res.data;
};
15 changes: 15 additions & 0 deletions data-porter/src/consts.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
export const DUNE_URL = 'https://api.dune.com/api/v1/table/upload/csv';

export const DB_CONFIG_PROD = {
host: 'evm-subql-cluster.cluster-ro-cwi35kgo8jvg.ap-southeast-1.rds.amazonaws.com',
port: 5432,
database: 'postgres',
user: 'postgres_ro',
};

export const DB_CONFIG_DEV = {
host: 'subql-evm.cluster-cspmstlhvanj.ap-southeast-1.rds.amazonaws.com',
port: 5432,
database: 'postgres',
user: 'postgres',
};
41 changes: 41 additions & 0 deletions data-porter/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import { cleanEnv, str } from 'envalid';
import dotenv from 'dotenv';

import { DB_CONFIG_DEV } from './consts';
import {
pullDataFromDb,
transformCSV,
uploadToDune,
} from './actions';

dotenv.config();

const env = cleanEnv(process.env, {
PASSWORD_DEV: str(),
API_KEY: str(),
});

const main = async () => {
console.log('fetching data from db ...');

const [rawData] = await pullDataFromDb({
...DB_CONFIG_DEV,
schema: 'euphrates-2',
tables: ['stake_txes'],
filenames: ['euphrates_stake.csv'],
password: env.PASSWORD_DEV,
});

console.log('data fetching finished!');

const data = transformCSV(rawData);

await uploadToDune({
data,
apiKey: env.API_KEY,
tableName: 'euphrates_stake',
description: 'euphrates_stake',
});
};

main();
Loading

0 comments on commit 40917d9

Please sign in to comment.