diff --git a/.eslintrc.json b/.eslintrc.json index 9916294..561c99c 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -3,7 +3,7 @@ "parser": "babel-eslint", "plugins": ["flowtype", "standard"], "rules": { - "camelcase": "error", + "camelcase": "off", "flowtype/generic-spacing": "off", "no-throw-literal": "error", "no-var": "error", diff --git a/.flowconfig b/.flowconfig index 85771d0..044d40c 100644 --- a/.flowconfig +++ b/.flowconfig @@ -18,6 +18,8 @@ /lib/.*.js.flow +.*/node_modules/oboe/test/json/incomplete.json + [include] [libs] diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..d9ac6d9 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,14 @@ +### CHANGELOG + +Does this branch warrant an entry to the CHANGELOG? + +- [ ] Yes +- [ ] No + +### Dependencies + + none + +### Description + + none diff --git a/.github/workflows/asana-attachment.yml b/.github/workflows/asana-attachment.yml new file mode 100644 index 0000000..50aa506 --- /dev/null +++ b/.github/workflows/asana-attachment.yml @@ -0,0 +1 @@ +# removed \ No newline at end of file diff --git a/.github/workflows/asana-comment.yml b/.github/workflows/asana-comment.yml new file mode 100644 index 0000000..50aa506 --- /dev/null +++ b/.github/workflows/asana-comment.yml @@ -0,0 +1 @@ +# removed \ No newline at end of file diff --git a/.github/workflows/pr-checks.yml b/.github/workflows/pr-checks.yml new file mode 100644 index 0000000..08adff0 --- /dev/null +++ b/.github/workflows/pr-checks.yml @@ -0,0 +1,9 @@ +name: PR Checks +on: [pull_request] +jobs: + block-wip-pr: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.0.0 + - name: Block WIP PR + uses: samholmes/block-wip-pr-action@v1.2.0 diff --git a/.github/workflows/pr-rebase.yml b/.github/workflows/pr-rebase.yml new file mode 100644 index 0000000..b24687d --- /dev/null +++ b/.github/workflows/pr-rebase.yml @@ -0,0 +1,28 @@ +name: PR Rebase +on: + issue_comment: + types: [created] +jobs: + rebase: + name: Rebase + if: >- + github.event.issue.pull_request != '' && + ( + contains(github.event.comment.body, '/autosquash') || + contains(github.event.comment.body, '/fixup') || + contains(github.event.comment.body, '/rebase') + ) + runs-on: ubuntu-latest + steps: + - name: Checkout the latest code + uses: actions/checkout@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + fetch-depth: 0 # otherwise, you will fail to push refs to dest repo + - name: Automatic Rebase + uses: EdgeApp/rebase@changelog-resolver + with: + autosquash: ${{ true }} + changelogResolver: ${{ true }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.yarnrc b/.yarnrc new file mode 100644 index 0000000..d52997a --- /dev/null +++ b/.yarnrc @@ -0,0 +1 @@ +--ignore-scripts true diff --git a/config.json.sample b/config.json.sample index 54b75d9..bd8dd15 100644 --- a/config.json.sample +++ b/config.json.sample @@ -7,6 +7,7 @@ "coinMarketCapAPiKey": "xxx", "shapeShiftApiKey": "xxx", "shapeShiftToken": "xxx", + "banxaToken": "xxx", "libertyXApiKey": "xxx", "changellyApiKey": "xxx", "changenowApiKey": "xxx", @@ -32,6 +33,16 @@ "wyre": { "periscopeClientKey": "xxx" }, + "bity": { + "clientId": "", + "clientSecret": "" + }, + "paytrieCredentials": { + "apiKey": "xxx", + "secretToken": "xxx" + }, + "switchainApiKey" : "", + "transak_api_secret": "xxx", "coinMarketCapExcludeLookup": ["USD", "EUR", "GBP"], "coinApiExcludeLookup": ["USD", "EUR", "GBP"] } diff --git a/package.json b/package.json index bf2467e..bc46a9f 100644 --- a/package.json +++ b/package.json @@ -20,12 +20,14 @@ "dependencies": { "api-changelly": "git://github.com/changelly/api-changelly.git#8e350f3", "await-sleep": "0.0.1", + "axios": "^0.19.2", "biggystring": "^3.0.1", "changelly_api": "git://github.com/changelly/api-changelly.git#8e350f3", "csvtojson": "^2.0.10", "json-format": "^1.0.1", "jsonfile": "^4.0.0", "node-fetch": "^1.7.3", + "sleep": "^6.3.0", "sprintf-js": "^1.1.1", "web3": "^1.0.0-beta.52" }, diff --git a/src/banxa.js b/src/banxa.js index ae8ebdf..12a3af8 100644 --- a/src/banxa.js +++ b/src/banxa.js @@ -1,12 +1,29 @@ // @flow import type { StandardTx, SwapFuncParams } from './checkSwapService.js' const js = require('jsonfile') -const fs = require('fs') +const fetch = require('node-fetch') const { checkSwapService } = require('./checkSwapService.js') -const csv = require('csvtojson') +const crypto = require('crypto') +const sleep = require('sleep') +const confFileName = './config.json' +const config = js.readFileSync(confFileName) const BANXA_CACHE = './cache/banRaw.json' -const BANXA_FOLDER = './cache/banxa' + +const MONTH_MAP = { + 'Jan': '01', + 'Feb': '02', + 'Mar': '03', + 'Apr': '04', + 'May': '05', + 'Jun': '06', + 'Jul': '07', + 'Aug': '08', + 'Sep': '09', + 'Oct': '10', + 'Nov': '11', + 'Dec': '12' +} async function doBanxa (swapFuncParams: SwapFuncParams) { return checkSwapService(fetchBanxa, @@ -16,50 +33,144 @@ async function doBanxa (swapFuncParams: SwapFuncParams) { ) } +async function callBanxaAPI (queryDate, pageLimit, page) { + const nonce = Math.floor(new Date() / 1000) + + const apiQuery = `/api/orders?start_date=${queryDate}&end_date=${queryDate}&per_page=${pageLimit}&page=${page}` + + const text = `GET\n${apiQuery}\n${nonce}` + const secret = config.banxaToken + const key = 'EDGE' + const hmac = crypto.createHmac('sha256', secret) + .update(text) + .digest('hex') + const authHeader = key + ':' + hmac + ':' + nonce + + const headers = { + 'Authorization': 'Bearer ' + authHeader, + 'Content-Type': 'application/json' + } + + return fetch(`https://edge.banxa.com${apiQuery}`, {headers: headers}) +} + +function processOrders (orders, ssFormatTxs) { + for (const order of orders) { + if (order.status === 'complete') { + // Reformat the date from DD-MMM-YYYY HH:MM:SS to YYYY-MM-DDTHH:MM:SS + const origDateTime = order.created_at + const dateTimeParts = origDateTime.split(' ') + const dateParts = dateTimeParts[0].split('-') + const month = MONTH_MAP[dateParts[1]] + const reformattedDate = `${dateParts[2]}-${month}-${dateParts[0]}T${dateTimeParts[1]}Z` + + // Flip the amounts if the order is a SELL + let inputAmount = order.fiat_amount + let inputCurrency = order.fiat_code + let outputAmount = order.coin_amount + let outputCurrency = order.coin_code + if (order.order_type === 'CRYPTO-SELL') { + inputAmount = order.coin_amount + inputCurrency = order.coin_code + outputAmount = order.fiat_amount + outputCurrency = order.fiat_code + } + + const ssTx: StandardTx = { + status: 'complete', + inputTXID: order.ref.toString(), + inputAddress: '', + inputCurrency: inputCurrency, + inputAmount: inputAmount, + outputAddress: order.wallet_address, + outputCurrency: outputCurrency, + outputAmount: outputAmount, + timestamp: new Date(reformattedDate).getTime() / 1000 + } + ssFormatTxs.push(ssTx) + } + } +} + async function fetchBanxa (swapFuncParams: SwapFuncParams) { if (!swapFuncParams.useCache) { - console.log('Fetching Banxa from CSV...') + console.log('Fetching Banxa from API...') } - let diskCache = { txs: [] } + let diskCache = { last_date: '2019-08-26', txs: [] } try { diskCache = js.readFileSync(BANXA_CACHE) } catch (e) {} - - const transactionMap = {} const ssFormatTxs: Array = [] - const files = await fs.readdirSync(BANXA_FOLDER) + const cachedLastDate = new Date(diskCache.last_date) - for (const fileName of files) { - const filePath = `./cache/banxa/${fileName}` - const csvData = await csv().fromFile(filePath) - for (const order of csvData) { - const date = new Date(order['UTC Time']) - const timestamp = date.getTime() / 1000 - const uniqueIdentifier = order['Order Id'] - const ssTx: StandardTx = { - status: 'complete', - inputTXID: uniqueIdentifier, - inputAddress: '', - inputCurrency: order['Source Currency'], - inputAmount: parseFloat(order['Source Amount']), - outputAddress: '', - outputCurrency: order['Target Currency'], - outputAmount: order['Target Amount'], - timestamp: timestamp + // Go back a week just to make sure you capture any late completing orders + const startQueryDate = new Date(cachedLastDate.getTime() - 7 * 86400000) + + const now = new Date() + const today = new Date(now.toISOString().split('T')[0]).getTime() + + let queryDate = startQueryDate.toISOString().split('T')[0] + + if (!swapFuncParams.useCache) { + console.log(`BANXA: Loading orders starting from ${queryDate}`) + // Loop through the days + while (startQueryDate.getTime() !== today) { + let page = 1 + const pageLimit = 100 + queryDate = startQueryDate.toISOString().split('T')[0] + // Move last date on 1 day + startQueryDate.setTime(startQueryDate.getTime() + 86400000) + let attempt = 0 + + // Loop through the pages for this day + while (1) { + let orders = [] + + let apiResponse + while (attempt < 3) { + console.log( + `BANXA: Calling API with date ${queryDate}, result size ${pageLimit} and offset ${page} for attempt ${attempt}` + ) + apiResponse = await callBanxaAPI(queryDate, pageLimit, page) + const status = await apiResponse.status + // Handle the situation where the API is rate limiting the requests + if (status !== 200) { + console.log( + `BANXA: Response code ${status}. Retrying after 2 second sleep...` + ) + sleep.sleep(2) + attempt++ + } else { + break + } + } + if (attempt === 3) break + + if (apiResponse) { + const ordersData = await apiResponse.json() + + if (ordersData && ordersData.data && ordersData.data.orders.length) { + orders = ordersData.data.orders + } else break + + processOrders(orders, ssFormatTxs) + + if (orders.length < pageLimit) break + page++ + } + } + if (attempt === 3) { + console.log(`BANXA: Unable to process date ${queryDate}`) + break } - // console.log('ssTx: ', ssTx) - transactionMap[uniqueIdentifier] = ssTx } } - for (const id in transactionMap) { - ssFormatTxs.push(transactionMap[id]) - ssFormatTxs.sort((a, b) => a.timestamp - b.timestamp) - } - // console.log('ssFormatTxs is: ', ssFormatTxs) + diskCache.last_date = queryDate > cachedLastDate ? queryDate : cachedLastDate + console.log(`lastDate ${queryDate}`) const out = { diskCache, newTransactions: ssFormatTxs diff --git a/src/bity.js b/src/bity.js index 4dd1742..a9cca57 100644 --- a/src/bity.js +++ b/src/bity.js @@ -1,11 +1,16 @@ // @flow import type { StandardTx, SwapFuncParams } from './checkSwapService.js' -const fs = require('fs') +const bns = require('biggystring') const { checkSwapService } = require('./checkSwapService.js') const js = require('jsonfile') +const fetch = require('node-fetch') +const confFileName = './config.json' +const config = js.readFileSync(confFileName) const BITY_CACHE = './cache/bityRaw.json' -const BITY_FOLDER = './cache/bity' +const BITY_TOKEN_URL = 'https://connect.bity.com/oauth2/token' +const BITY_API_URL = 'https://reporting.api.bity.com/exchange/v1/summary/monthly/' +const PAGE_SIZE = 100 async function doBity (swapFuncParams: SwapFuncParams) { return checkSwapService(fetchBity, @@ -15,57 +20,109 @@ async function doBity (swapFuncParams: SwapFuncParams) { ) } +let queryYear = '2020' +let queryMonth = '1' +const todayMonth = bns.add(new Date().getMonth().toString(), '1') +const todayYear = new Date().getFullYear().toString() + async function fetchBity (swapFuncParams: SwapFuncParams) { if (!swapFuncParams.useCache) { console.log('Fetching Bity from JSON...') } - const diskCache = { txs: [] } + let diskCache = { txs: [], offset: {lastCheckedMonth: queryMonth, lastCheckedYear: queryYear} } + try { + const diskCacheOnDisk = js.readFileSync(BITY_CACHE) + diskCache = {...diskCache, ...diskCacheOnDisk} + // Get most recent query from cache and subtract a month + queryMonth = diskCache.offset.lastCheckedMonth + queryYear = diskCache.offset.lastCheckedYear + } catch (e) {} + + // Get auth token + const credentials = { + 'grant_type': 'client_credentials', + scope: 'https://auth.bity.com/scopes/reporting.exchange', + client_id: config.bity.clientId, + client_secret: config.bity.clientSecret + } + + const tokenParams = Object.keys(credentials).map((key) => { + return encodeURIComponent(key) + '=' + encodeURIComponent(credentials[key]) + }).join('&') + + const newTransactions = [] - const transactionMap = {} - const ssFormatTxs: Array = [] + try { + const tokenResponse = await fetch(BITY_TOKEN_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded' + }, + body: tokenParams + }) + const tokenReply = await tokenResponse.json() + const authToken = tokenReply.access_token - const files = await fs.readdirSync(BITY_FOLDER) - // console.log('files: ', files) + // Query monthly orders - for (const fileName of files) { - const filePath = `./cache/bity/${fileName}` - // console.log('filePath is: ', filePath) - const jsonData = js.readFileSync(filePath) + let keepQuerying = true + let page = 1 - for (const order of jsonData.orders) { - if (!order.input.amount || !order.output.amount || !order.timestamp_executed) { - continue + while (keepQuerying) { + const monthlyResponse = await fetch(`${BITY_API_URL}${queryYear}-${queryMonth}/orders?page=${page}`, + { + method: 'GET', + headers: { Authorization: `Bearer ${authToken}` } + } + ) + let monthlyTxs = [] + if (monthlyResponse.ok) { + monthlyTxs = await monthlyResponse.json().catch(e => []) } - const date = new Date(order.timestamp_executed + 'Z') - const timestamp = date.getTime() / 1000 - const uniqueIdentifier = `${order.id}` - const inputAmount = Number(order.input.amount) - const outputAmount = Number(order.output.amount) - const ssTx: StandardTx = { - status: 'complete', - inputTXID: uniqueIdentifier, - inputAddress: '', - inputCurrency: order.input.currency, - inputAmount, - outputAddress: '', - outputCurrency: order.output.currency, - outputAmount: outputAmount.toString(), - timestamp: timestamp + + for (const tx of monthlyTxs) { + const ssTx: StandardTx = { + status: 'complete', + inputTXID: tx.id, + inputAddress: '', + inputCurrency: tx.input.currency.toUpperCase(), + inputAmount: parseFloat(tx.input.amount), + outputAddress: '', + outputCurrency: tx.output.currency.toUpperCase(), + outputAmount: tx.output.amount.toString(), + timestamp: Date.parse(tx.timestamp_executed.concat('Z')) / 1000 + } + newTransactions.push(ssTx) + } + + if (monthlyTxs.length < PAGE_SIZE && queryMonth === todayMonth && queryYear === todayYear) { + if (queryMonth === '1') { + diskCache.offset.lastCheckedMonth = '12' + diskCache.offset.lastCheckedYear = bns.sub(queryYear, '1') + } else { + diskCache.offset.lastCheckedMonth = bns.sub(queryMonth, '1') + diskCache.offset.lastCheckedYear = queryYear + } + keepQuerying = false + } else if (monthlyTxs.length === PAGE_SIZE) { + page += 1 + } else { + page = 1 + if (bns.lt(queryMonth, '12')) { + queryMonth = bns.add(queryMonth, '1') + } else { + queryMonth = '1' + queryYear = bns.add(queryYear, '1') + } } - // console.log('ssTx: ', ssTx) - transactionMap[uniqueIdentifier] = ssTx } + } catch (e) { + console.log(e) } - for (const id in transactionMap) { - ssFormatTxs.push(transactionMap[id]) - ssFormatTxs.sort((a, b) => a.timestamp - b.timestamp) - } - - // console.log('ssFormatTxs is: ', ssFormatTxs) const out = { diskCache, - newTransactions: ssFormatTxs + newTransactions } return out } diff --git a/src/checkSwapService.js b/src/checkSwapService.js index a4c9a7a..c858be6 100644 --- a/src/checkSwapService.js +++ b/src/checkSwapService.js @@ -97,24 +97,34 @@ async function queryCoinApiForUsdRate (currencyCode: string, date: string) { } } -async function queryCoinMarketCapForUsdRate (currencyCode: string, date: string) { +async function queryCoinMarketCapForUsdRate ( + currencyCode: string, + date: string +) { const currentTimestamp = Date.now() const targetDate = new Date(date) const targetTimestamp = targetDate.getTime() // if less than 90 days old (cmc API restriction) - const soonerThan90Days = currentTimestamp - targetTimestamp < 89 * 86400 * 1000 + const soonerThan90Days = + currentTimestamp - targetTimestamp < 89 * 86400 * 1000 const isApiKeyConfigured = config.coinMarketCapAPiKey - const isCurrencyExcluded = coinMarketCapExcludeLookup.find(c => c === currencyCode.toUpperCase()) + const isCurrencyExcluded = coinMarketCapExcludeLookup.find( + (c) => c === currencyCode.toUpperCase() + ) if (currencyCode === 'USDT20' || currencyCode === 'USDTERC20') { currencyCode = 'USDT' } - if ( - soonerThan90Days && - isApiKeyConfigured && - !isCurrencyExcluded - ) { + if (currencyCode === 'BCHABC') { + currencyCode = 'BCH' + } + + if (currencyCode === 'BCHSV') { + currencyCode = 'BSV' + } + + if (soonerThan90Days && isApiKeyConfigured && !isCurrencyExcluded) { const url = `https://pro-api.coinmarketcap.com/v1/cryptocurrency/quotes/historical?symbol=${currencyCode}&time_end=${date}&count=1` let response @@ -129,8 +139,17 @@ async function queryCoinMarketCapForUsdRate (currencyCode: string, date: string) try { response = await fetch(url, fetchOptions) const jsonObj = await response.json() - if (!jsonObj || !jsonObj.data || !jsonObj.data.quotes || !jsonObj.data.quotes[0] || !jsonObj.data.quotes[0].quote || !jsonObj.data.quotes[0].quote.USD) { - console.log(`No rate from CMC: ${currencyCode} date:${date} response.status:${response.status}`) + if ( + !jsonObj || + !jsonObj.data || + !jsonObj.data.quotes || + !jsonObj.data.quotes[0] || + !jsonObj.data.quotes[0].quote || + !jsonObj.data.quotes[0].quote.USD + ) { + console.log( + `No rate from CMC: ${currencyCode} date:${date} response.status:${response.status}` + ) return '' } return jsonObj.data.quotes[0].quote.USD.price.toString() @@ -142,6 +161,49 @@ async function queryCoinMarketCapForUsdRate (currencyCode: string, date: string) return '' } } +async function queryEdgeRatesForUsdRate (currencyCode: string, date: string) { + const targetDate = new Date(date) + + if (currencyCode === 'USDT20' || currencyCode === 'USDTERC20') { + currencyCode = 'USDT' + } + + if (currencyCode === 'BCHABC') { + currencyCode = 'BCH' + } + + if (currencyCode === 'BCHSV') { + currencyCode = 'BSV' + } + + { + const url = `https://rates1.edge.app/v1/exchangeRate?currency_pair=${currencyCode}_USD&date=${targetDate.toISOString()}` + + let response + const fetchOptions = { + method: 'GET', + json: true + } + + try { + response = await fetch(url, fetchOptions) + const jsonObj = await response.json() + if ( + !jsonObj || + !jsonObj.exchangeRate + ) { + console.log( + `No rate from rates.edge: ${url} response.status:${response.status}` + ) + return '' + } + return jsonObj.exchangeRate + } catch (e) { + console.log(`No rates.edge ${url}`, e) + return '' + } + } +} async function checkSwapService ( theFetch: Function, @@ -384,11 +446,14 @@ async function getHistoricalUsdRate (currencyCode: string, date: string) { const now = Date.now() let usdRate = queryRatePairs(currencyCode, date) if (usdRate !== COINAPI_RATE_PAIR_ERROR) { + if (!usdRate) { + usdRate = await queryEdgeRatesForUsdRate(currencyCode, date) + } if (!usdRate) { usdRate = await queryCoinMarketCapForUsdRate(currencyCode, date) - if (!usdRate) { - usdRate = await queryCoinApiForUsdRate(currencyCode, date) - } + } + if (!usdRate) { + usdRate = await queryCoinApiForUsdRate(currencyCode, date) } if (!usdRate && USD_COINS[currencyCode]) { diff --git a/src/paytrie.js b/src/paytrie.js new file mode 100644 index 0000000..6e940d8 --- /dev/null +++ b/src/paytrie.js @@ -0,0 +1,65 @@ +// @flow +import type {StandardTx, SwapFuncParams} from './checkSwapService.js' +const js = require('jsonfile') +const fetch = require('node-fetch') +const confFileName = './config.json' +const config = js.readFileSync(confFileName) +const {checkSwapService} = require('./checkSwapService.js') + +const CACHE_FILE = './cache/ptRaw.json' + +async function doPaytrie (swapFuncParams: SwapFuncParams) { + return checkSwapService(fetchPaytrie, + CACHE_FILE, + 'PT', + swapFuncParams + ) +} + +async function fetchPaytrie (swapFuncParams: SwapFuncParams) { + if (!swapFuncParams.useCache) console.log('Fetching Paytrie...') + let diskCache = {offset: '2020-01-01', txs: []} + try { + diskCache = js.readFileSync(CACHE_FILE) + } catch (e) { + } + const ssFormatTxs: Array = [] + const startDate = diskCache.offset || '2020-01-01' + const endDate = new Date().toISOString().slice(0, 10) + + while (1 && !swapFuncParams.useCache) { + const apiResponse = await fetch(`https://api1.paytrie.com/getEdgeTransactions?startDate=${startDate}&endDate=${endDate}`, { + headers: { + 'x-api-key': config.paytrieCredentials.apiKey, + 'Authorization': 'Bearer ' + config.paytrieCredentials.secretToken + }, + method: 'post' + }).catch(err => console.error(err)) + + const orders = await apiResponse.json() + + if (orders && orders.length > 1) { + for (const order of orders) { + const ssTx: StandardTx = { + status: 'complete', + inputTXID: order.inputTXID, + inputAddress: order.inputAddress, + inputCurrency: order.inputCurrency, + inputAmount: order.inputAmount, + outputAddress: order.outputAddress, + outputCurrency: order.outputCurrency, + outputAmount: order.outputAmount.toString(), + timestamp: new Date(order.timestamp).getTime() / 1000 + } + ssFormatTxs.push(ssTx) + } + } else return {} + break + } + + diskCache.offset = new Date(new Date(endDate).getTime() - (14 * 24 * 60 * 60 * 1000)).toISOString().slice(0, 10) + const out = {diskCache, newTransactions: ssFormatTxs} + return out +} + +module.exports = {doPaytrie} diff --git a/src/reporter.js b/src/reporter.js index 1e137d4..d69ef40 100644 --- a/src/reporter.js +++ b/src/reporter.js @@ -10,6 +10,7 @@ const { doFox } = require('./fox.js') const { doFaast } = require('./faast.js') const { doCoinswitch } = require('./coinswitch.js') const { doMoonpay } = require('./moonpay.js') +const { doTransak } = require('./transak.js') const { doWyre } = require('./wyre.js') const { doBog } = require('./bitsOfGold.js') const { doGodex } = require('./godex.js') @@ -17,6 +18,8 @@ const { doSafello } = require('./safello.js') const { doSimplex } = require('./simplex.js') const { doBanxa } = require('./banxa.js') const { doBity } = require('./bity.js') +const { doSwitchain } = require('./switchain.js') +const { doPaytrie } = require('./paytrie.js') const { bns } = require('biggystring') const config = require('../config.json') const { sprintf } = require('sprintf-js') @@ -26,6 +29,10 @@ async function main (swapFuncParams: SwapFuncParams) { console.error('doChangenow failed') return {} }) + const rTnk = await doTransak(swapFuncParams).catch(e => { + console.error('doTransak failed') + return {} + }) const rCha = await doChangelly(swapFuncParams).catch(e => { console.error('doChangelly failed') return {} @@ -95,6 +102,16 @@ async function main (swapFuncParams: SwapFuncParams) { return {} }) + const rSwi = await doSwitchain(swapFuncParams).catch(e => { + console.error('doSwitchain failed') + return {} + }) + + const rPt = await doPaytrie(swapFuncParams).catch(e => { + console.error('doPaytrie failed', e) + return {} + }) + printTxDataMap('CHN', rChn) printTxDataMap('CHA', rCha) printTxDataMap('FAA', rFaa) @@ -106,12 +123,15 @@ async function main (swapFuncParams: SwapFuncParams) { printTxDataMap('CS', rCs) printTxDataMap('GDX', rGdx) printTxDataMap('MNP', rMnp) + printTxDataMap('TNK', rTnk) printTxDataMap('WYR', rWyr) printTxDataMap('SAF', rSaf) printTxDataMap('BOG', rBog) printTxDataMap('SIM', rSim) printTxDataMap('BAN', rBan) printTxDataMap('BITY', rBity) + printTxDataMap('SWI', rSwi) + printTxDataMap('PT', rPt) console.log(new Date(Date.now())) } @@ -269,6 +289,9 @@ async function report (argv: Array) { config.godex && config.godex.apiKey ? await doSummaryFunction(doGodex) : {} + const swResults = + config.switchainApiKey ? await doSummaryFunction(doSwitchain) + : {} // non-swap (crypto-to-fiat and vice-versa) const lxResults = config.libertyXApiKey @@ -281,6 +304,9 @@ async function report (argv: Array) { const mnpResults = config.moonpayApiKey ? await doSummaryFunction(doMoonpay) : {} + const tnkResults = config.transak_api_secret + ? await doSummaryFunction(doTransak) + : {} const wyrResults = config.wyre && config.wyre.periscopeClientKey ? await doSummaryFunction(doWyre) : {} @@ -290,6 +316,10 @@ async function report (argv: Array) { const bogResults = config.bog && config.bog.apiKey ? await doSummaryFunction(doBog) : {} + const ptResults = + config.paytrieCredentials && config.paytrieCredentials.apiKey + ? await doSummaryFunction(doPaytrie) + : {} const simResults = await doSummaryFunction(doSimplex) const banResults = await doSummaryFunction(doBanxa) @@ -303,6 +333,7 @@ async function report (argv: Array) { combineResults(results, foxResults) combineResults(results, csResults) combineResults(results, gxResults) + combineResults(results, swResults) console.log('\n***** Change NOW Daily *****') printTxDataMap('CHN', cnResults.daily) @@ -344,6 +375,11 @@ async function report (argv: Array) { console.log('\n***** GoDex Monthly *****') printTxDataMap('GX', gxResults.monthly) + console.log('\n***** Switchain Daily *****') + printTxDataMap('SWI', swResults.daily) + console.log('\n***** Switchain Monthly *****') + printTxDataMap('SWI', swResults.monthly) + console.log('\n***** Libertyx Daily *****') printTxDataMap('LBX', lxResults.daily) console.log('\n***** Libertyx Monthly *****') @@ -359,6 +395,11 @@ async function report (argv: Array) { console.log('\n***** Moonpay Daily *****') printTxDataMap('MNP', mnpResults.daily) + console.log('\n***** Transak Monthly *****') + printTxDataMap('TNK', tnkResults.monthly) + console.log('\n***** Transak Daily *****') + printTxDataMap('TNK', tnkResults.daily) + console.log('\n***** Wyre Monthly *****') printTxDataMap('WYR', wyrResults.monthly) console.log('\n***** Wyre Daily *****') @@ -389,6 +430,11 @@ async function report (argv: Array) { console.log('\n***** Bity Daily *****') printTxDataMap('BITY', bityResults.daily) + console.log('\n***** Paytrie Monthly *****') + printTxDataMap('PT', ptResults.monthly) + console.log('\n***** Transak Daily *****') + printTxDataMap('PT', ptResults.daily) + console.log('\n***** Swap Totals Monthly*****') printTxDataMap('TTS', results.monthly) console.log('\n***** Swap Totals Daily *****') @@ -399,12 +445,14 @@ async function report (argv: Array) { combineResults(fiatResults, lxResults) combineResults(fiatResults, btResults) combineResults(fiatResults, mnpResults) + combineResults(fiatResults, tnkResults) combineResults(fiatResults, wyrResults) combineResults(fiatResults, safResults) combineResults(fiatResults, bogResults) combineResults(fiatResults, simResults) combineResults(fiatResults, banResults) combineResults(fiatResults, bityResults) + combineResults(fiatResults, ptResults) console.log('\n***** Fiat Totals Monthly *****') printTxDataMap('TTF', fiatResults.monthly) diff --git a/src/safello.js b/src/safello.js index 411154f..21457fe 100644 --- a/src/safello.js +++ b/src/safello.js @@ -30,9 +30,9 @@ async function fetchSafello (swapFuncParams: SwapFuncParams) { const ssFormatTxs: Array = [] let offset = 0 - const url = `https://app.safello.com/v1/partner/get-orders?offset=${offset}` while (1 && !swapFuncParams.useCache) { + const url = `https://app.safello.com/v1/partner/get-orders?offset=${offset}` const response = await fetch(url, { method: 'GET', headers: { diff --git a/src/simplex.js b/src/simplex.js index 11cef76..cc00915 100644 --- a/src/simplex.js +++ b/src/simplex.js @@ -1,12 +1,13 @@ // @flow import type { StandardTx, SwapFuncParams } from './checkSwapService.js' const js = require('jsonfile') -const fs = require('fs') +const confFileName = './config.json' +const CONFIG = js.readFileSync(confFileName) const { checkSwapService } = require('./checkSwapService.js') -const csv = require('csvtojson') +const axios = require('axios') const SIMPLEX_CACHE = './cache/simRaw.json' -const SIMPLEX_FOLDER = './cache/simplex' +const API_START_DATE = new Date('2020-08-10T00:00:00.000Z').getTime() / 1000 async function doSimplex (swapFuncParams: SwapFuncParams) { return checkSwapService(fetchSimplex, @@ -18,47 +19,101 @@ async function doSimplex (swapFuncParams: SwapFuncParams) { async function fetchSimplex (swapFuncParams: SwapFuncParams) { if (!swapFuncParams.useCache) { - console.log('Fetching Simplex from CSV...') + console.log('Fetching Simplex...') } - let diskCache = { txs: [] } + let diskCache = { txs: [], lastTxTimestamp: 0 } const transactionMap = {} const ssFormatTxs: Array = [] try { diskCache = js.readFileSync(SIMPLEX_CACHE) + // console.log('diskCache: ', diskCache) } catch (e) {} - const files = await fs.readdirSync(SIMPLEX_FOLDER) - // console.log('files: ', files) + // flag for fresh vs already-populated cache + const initialLastTxTimestamp = diskCache.lastTxTimestamp || 0 + let maxTimestamp = diskCache.lastTxTimestamp || 0 + let continueFromSyntax = '' + let has_more_pages = false + let next_page_cursor = '' + let retry = 4 - for (const fileName of files) { - const filePath = `./cache/simplex/${fileName}` - // console.log('filePath is: ', filePath) - const csvData = await csv().fromFile(filePath) + try { + while (1 && !swapFuncParams.useCache) { + // console.log('----------------') + // console.log('initiallastTxTimestamp: ', initiallastTxTimestamp) + // console.log('lastTxTimestamp: ', lastTxTimestamp) + // console.log('maxTimestamp: ', maxTimestamp) + // console.log('minTimestamp: ', minTimestamp) + if (next_page_cursor) continueFromSyntax = `continue_from=${next_page_cursor}&` + const url = `https://turnkey.api.simplex.com/transactions?${continueFromSyntax}limit=1000&starting_at=${initialLastTxTimestamp}` + console.log('url: ', url) + const csvData = await axios({ + url, + headers: { + 'X-API-KEY': CONFIG.simplex.apiKey + } + }).catch(e => { + if (!--retry) { + throw e + } + return null + }) - for (const order of csvData) { - if (!order.total_amount_usd || !order.total_amount_crypto) { + if (!csvData) { continue } - const date = new Date(order.processed_at_utc + ':00.000Z') - const timestamp = date.getTime() / 1000 - const uniqueIdentifier = `${timestamp}-${order.total_amount_crypto.replace('.', '')}` - const ssTx: StandardTx = { - status: 'complete', - inputTXID: uniqueIdentifier, - inputAddress: '', - inputCurrency: order.currency, - inputAmount: parseFloat(order.total_amount_usd.replace('$', '').replace(',', '')), - outputAddress: '', - outputCurrency: order.crypto_currency, - outputAmount: order.total_amount_crypto, - timestamp: timestamp + + has_more_pages = csvData.data.has_more_pages + next_page_cursor = csvData.data.next_page_cursor + + const responseTxs = csvData.data.data + + for (const order of responseTxs) { + if (!order.fiat_total_amount || !order.amount_crypto) { + continue + } + const timestamp = order.created_at + if (timestamp < API_START_DATE) { + continue + } + + const uniqueIdentifier = order.order_id + const ssTx: StandardTx = { + status: 'complete', + inputTXID: uniqueIdentifier, + inputAddress: '', + inputCurrency: 'USD', + inputAmount: parseFloat(order.amount_usd), + outputAddress: '', + outputCurrency: order.crypto_currency, + outputAmount: order.amount_crypto, + timestamp: timestamp + } + + if (timestamp > maxTimestamp) maxTimestamp = timestamp + + transactionMap[uniqueIdentifier] = ssTx + + // if transaction is before the cutoff timestamp + // then stop the loop + + if (timestamp < initialLastTxTimestamp) { + has_more_pages = false + } + } + if (has_more_pages === false) { + console.log('responseTxs.length: ', responseTxs.length) + // set the lastTxTimestamp for the cache to two weeks before latest tx + diskCache.lastTxTimestamp = maxTimestamp - 60 * 60 * 24 * 7 + break } - // console.log('ssTx: ', ssTx) - transactionMap[uniqueIdentifier] = ssTx } + } catch (error) { + console.log('error: ', error) } + for (const id in transactionMap) { ssFormatTxs.push(transactionMap[id]) ssFormatTxs.sort((a, b) => a.timestamp - b.timestamp) diff --git a/src/switchain.js b/src/switchain.js new file mode 100644 index 0000000..85b251c --- /dev/null +++ b/src/switchain.js @@ -0,0 +1,84 @@ +// @flow + +import type { StandardTx, SwapFuncParams } from './checkSwapService.js' +const js = require('jsonfile') +const fetch = require('node-fetch') +const SS_QUERY_PAGES = 3 +const confFileName = './config.json' +const config = js.readFileSync(confFileName) +const { checkSwapService } = require('./checkSwapService.js') + +const FILE_CACHE = './cache/switchainRaw.json' +const PAGE_LIMIT = 100 + +async function doSwitchain (swapFuncParams: SwapFuncParams) { + return checkSwapService(fetchSwitchain, FILE_CACHE, 'SWI', swapFuncParams) +} + +async function fetchSwitchain (swapFuncParams: SwapFuncParams) { + if (!swapFuncParams.useCache) { + console.log('Fetching Switchain...') + } + + let diskCache = { txs: [], queryAll: true } + try { + diskCache = js.readFileSync(FILE_CACHE) + } catch (e) {} + + const newTransactions = [] + let page = 1 + + while (1 && !swapFuncParams.useCache) { + try { + const request = `https://api.switchain.com/rest/v1/ordersinfo?limit=${PAGE_LIMIT}&page=${page}` + const options = { + method: 'GET', + headers: { + 'authorization': `Bearer ${config.switchainApiKey}` + } + } + const response = await fetch(request, options) + const result = await response.json() + const txs = result.orders + for (const tx of txs) { + if (tx.status === 'confirmed' && tx.appId === config.switchainApiKey) { + const date = new Date(tx.createdAt) + const timestamp = date.getTime() / 1000 + const pair = tx.pair.split('-') + + const ssTx: StandardTx = { + status: 'complete', + inputTXID: tx.depositTxId, + inputAddress: tx.depositAddress, + inputCurrency: pair[0].toUpperCase(), + inputAmount: parseFloat(tx.amountFrom), + outputAddress: tx.withdrawAddress, + outputCurrency: pair[1].toUpperCase(), + outputAmount: tx.rate, + timestamp + } + newTransactions.push(ssTx) + } + } + + if (txs.length < PAGE_LIMIT) { + break + } + } catch (e) { + break + } + page++ + if (page > SS_QUERY_PAGES && !diskCache.queryAll) { + break + } + } + + diskCache.queryAll = false + const out = { + diskCache, + newTransactions + } + return out +} + +module.exports = { doSwitchain } diff --git a/src/transak.js b/src/transak.js new file mode 100644 index 0000000..46090e3 --- /dev/null +++ b/src/transak.js @@ -0,0 +1,65 @@ +// @flow +import type {StandardTx, SwapFuncParams} from './checkSwapService.js' +const js = require('jsonfile') +const fetch = require('node-fetch') +const confFileName = './config.json' +const config = js.readFileSync(confFileName) +const {checkSwapService} = require('./checkSwapService.js') + +const CACHE_FILE = './cache/tnkRaw.json' +const pageLimit = 100 + +async function doTransak (swapFuncParams: SwapFuncParams) { + return checkSwapService(fetchTransak, + CACHE_FILE, + 'TNK', + swapFuncParams + ) +} + +async function fetchTransak (swapFuncParams: SwapFuncParams) { + if (!swapFuncParams.useCache) console.log('Fetching Transak...') + let diskCache = {offset: 0, txs: []} + try { + diskCache = js.readFileSync(CACHE_FILE) + } catch (e) { + } + let offset = diskCache.offset >= 0 ? diskCache.offset : 0 + const ssFormatTxs: Array = [] + + while (1 && !swapFuncParams.useCache) { + let orders = [] + + const apiResponse = await fetch(`https://api.transak.com/api/v1/partners/orders/?partnerAPISecret=${config.transak_api_secret}&limit=${pageLimit}&skip=${offset}`) + const ordersData = await apiResponse.json() + + if (ordersData && ordersData.response && ordersData.response.length) orders = ordersData.response + else return {} + + for (const order of orders) { + if (order.status === 'COMPLETED') { + const ssTx: StandardTx = { + status: 'complete', + inputTXID: order.id, + inputAddress: order.fromWalletAddress, + inputCurrency: order.fiatCurrency, + inputAmount: order.fiatAmount, + outputAddress: order.walletAddress, + outputCurrency: order.cryptocurrency, + outputAmount: order.cryptoAmount.toString(), + timestamp: new Date(order.completedAt).getTime() / 1000 + } + ssFormatTxs.push(ssTx) + } + } + + if (orders.length < pageLimit) break + offset += pageLimit + } + + diskCache.offset = offset - 500 + const out = {diskCache, newTransactions: ssFormatTxs} + return out +} + +module.exports = {doTransak} diff --git a/src/wyre.js b/src/wyre.js index 7e265e0..fc49120 100644 --- a/src/wyre.js +++ b/src/wyre.js @@ -32,9 +32,10 @@ function parseTxStr (txStr) { completedAt: txItems[4], sourceAmount: txItems[5], sourceCurrency: txItems[6], - destAmount: txItems[7], - destCurrency: txItems[8], - failureReason: txItems[9] + usdFeeEquiv: txItems[7], + destAmount: txItems[8], + destCurrency: txItems[9], + usdEquiv: txItems[10] } } diff --git a/yarn.lock b/yarn.lock index 0269603..c6d5b80 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,6 +221,13 @@ aws4@^1.8.0: resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== +axios@^0.19.2: + version "0.19.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.19.2.tgz#3ea36c5d8818d0d5f8a8a97a6d36b86cdc00cb27" + integrity sha512-fjgm5MvRHLhx+osE2xoekY70AhARk3a6hkN+3Io1jc00jtquGvxYlKlsFUhmUET0V5te6CcZI7lcv2Ym61mjHA== + dependencies: + follow-redirects "1.5.10" + babel-code-frame@^6.22.0, babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" @@ -849,6 +856,13 @@ debug@2.6.9, debug@^2.2.0, debug@^2.6.8, debug@^2.6.9: dependencies: ms "2.0.0" +debug@=3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" + integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== + dependencies: + ms "2.0.0" + decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" @@ -1564,6 +1578,13 @@ flow-remove-types@^1.2.3: babylon "^6.15.0" vlq "^0.2.1" +follow-redirects@1.5.10: + version "1.5.10" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.5.10.tgz#7b7a9f9aea2fdff36786a94ff643ed07f4ff5e2a" + integrity sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ== + dependencies: + debug "=3.1.0" + for-each@^0.3.3: version "0.3.3" resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" @@ -2318,20 +2339,10 @@ locate-path@^2.0.0: p-locate "^2.0.0" path-exists "^3.0.0" -lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.4: - version "4.17.11" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d" - integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg== - -lodash@^4.17.3: - version "4.17.15" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" - integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== - -lodash@^4.3.0: - version "4.17.5" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.5.tgz#99a92d65c0272debe8c96b6057bc8fbfa3bed511" - integrity sha512-svL3uiZf1RwhH+cWrfZn3A4+U58wbP0tGVTLQPbjplZxZ8ROD9VLuNgsRniTlLe7OlSqR79RUehXgpBW/s0IQw== +lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.3.0: + version "4.17.19" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" + integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== loose-envify@^1.0.0: version "1.4.0" @@ -2522,6 +2533,11 @@ nan@^2.14.0: resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== +nan@^2.14.1: + version "2.14.1" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.1.tgz#d7be34dfa3105b91494c3147089315eff8874b01" + integrity sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw== + nano-json-stream-parser@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/nano-json-stream-parser/-/nano-json-stream-parser-0.1.2.tgz#0cc8f6d0e2b622b479c40d499c46d64b755c6f5f" @@ -3269,6 +3285,13 @@ simple-get@^2.7.0: once "^1.3.1" simple-concat "^1.0.0" +sleep@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/sleep/-/sleep-6.3.0.tgz#c524e0e6d8d2e45d3f14e0ba5650fbe45f2ae876" + integrity sha512-+WgYl951qdUlb1iS97UvQ01pkauoBK9ML9I/CMPg41v0Ze4EyMlTgFTDDo32iYj98IYqxIjDMRd+L71lawFfpQ== + dependencies: + nan "^2.14.1" + slice-ansi@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-1.0.0.tgz#044f1a49d8842ff307aad6b505ed178bd950134d"