-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #35 from vinkoS993/master
PR for Indexing service (sdk & CLI)
- Loading branch information
Showing
12 changed files
with
904 additions
and
68 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
import { Command } from 'commander'; | ||
import { deployIndexer } from './indexing.service'; | ||
|
||
export function createIndexingCommands(cli: Command) { | ||
const indexing = cli | ||
.command('indexing') | ||
.description('Commands for deployment of indexers on Apillon platform'); | ||
|
||
indexing | ||
.command('deploy') | ||
.description('Deploy an indexer') | ||
.argument('<path>', 'path to indexer root folder') | ||
.requiredOption('-i, --indexer-uuid <uuid>', 'UUID of indexer') | ||
.action(async function (path: string) { | ||
await deployIndexer(path, this.optsWithGlobals()); | ||
}); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
import { Indexing } from '@apillon/sdk'; | ||
import { GlobalOptions } from '../../lib/types'; | ||
import { withErrorHandler } from '../../lib/utils'; | ||
|
||
export async function deployIndexer( | ||
path: string, | ||
optsWithGlobals: GlobalOptions, | ||
) { | ||
await withErrorHandler(async () => { | ||
console.log(`Deploying indexer: ${path}`); | ||
const res = await new Indexing(optsWithGlobals) | ||
.indexer(optsWithGlobals.indexerUuid) | ||
.deployIndexer(path); | ||
|
||
if (res) { | ||
console.log( | ||
`Indexer deployment successfully started! Check Apillon console for status.`, | ||
); | ||
} | ||
}); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
import axios from 'axios'; | ||
import fs from 'fs'; | ||
import { ApillonModel } from '../../lib/apillon'; | ||
import { ApillonApi } from '../../lib/apillon-api'; | ||
import { ApillonLogger } from '../../lib/apillon-logger'; | ||
import { IDeployIndexer } from '../../types/indexer'; | ||
import { compressIndexerSourceCode } from '../../util/indexer-utils'; | ||
import { LogLevel } from '../../docs-index'; | ||
|
||
export class Indexer extends ApillonModel { | ||
/** | ||
* User assigned name of the indexer. | ||
*/ | ||
public name: string = null; | ||
|
||
/** | ||
* User assigned description of the indexer. | ||
*/ | ||
public description: string = null; | ||
|
||
/** | ||
* Constructor which should only be called via Indexing class. | ||
* @param uuid Unique identifier of the indexer. | ||
* @param data Data to populate the indexer with. | ||
*/ | ||
constructor(uuid: string, data?: Partial<Indexer>) { | ||
super(uuid); | ||
this.API_PREFIX = `/indexing/indexers/${uuid}`; | ||
this.populate(data); | ||
} | ||
|
||
override async get(): Promise<this> { | ||
throw new Error('Method not supported.'); | ||
} | ||
|
||
/** | ||
* Prepare indexer source code, upload it to s3 and deploy the indexer. | ||
* @param path Path to the indexer source code directory. | ||
*/ | ||
public async deployIndexer(path: string): Promise<any> { | ||
//Check directory and if squid.yaml exists in it | ||
if (!fs.existsSync(path)) { | ||
return console.error('Invalid path'); | ||
} | ||
if (!fs.existsSync(`${path}/squid.yaml`)) { | ||
return console.error('squid.yaml not found in directory'); | ||
} | ||
|
||
//Create tar.gz file | ||
const numOfFiles = await compressIndexerSourceCode( | ||
path, | ||
`${path}/builds/${this.uuid}.tar.gz`, | ||
); | ||
|
||
if (numOfFiles === 0) { | ||
return console.error('Source directory is empty'); | ||
} | ||
ApillonLogger.log(`Compressed ${numOfFiles} files. Uploading to s3...`); | ||
|
||
//Get s3 URL for upload | ||
const url = await ApillonApi.get<string>(`${this.API_PREFIX}/upload-url`); | ||
|
||
//Upload tar.gz to s3 | ||
const content = fs.readFileSync(`${path}/builds/${this.uuid}.tar.gz`); | ||
await axios.put(url, content, { | ||
headers: { 'Content-Type': 'application/gzip' }, | ||
}); | ||
|
||
ApillonLogger.log(`'Upload complete. Deploying indexer...'`); | ||
|
||
//Call deploy API | ||
const deployResponse = await ApillonApi.post<IDeployIndexer>( | ||
`${this.API_PREFIX}/deploy`, | ||
); | ||
if (deployResponse.deployment.failed != 'NO') { | ||
ApillonLogger.log(deployResponse.deployment, LogLevel.ERROR); | ||
return console.error('Indexer deployment failed!'); | ||
} | ||
|
||
return deployResponse; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
import { ApillonModule } from '../../lib/apillon'; | ||
import { Indexer } from './indexer'; | ||
|
||
export class Indexing extends ApillonModule { | ||
/** | ||
* @param uuid Unique indexer identifier. | ||
* @returns An instance of Indexer class. | ||
*/ | ||
public indexer(uuid: string): Indexer { | ||
return new Indexer(uuid); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
import { Indexing } from '../modules/indexing/indexing'; | ||
import { getConfig, getIndexerUUID } from './helpers/helper'; | ||
|
||
describe('Indexing tests', () => { | ||
let indexing: Indexing = undefined; | ||
let indexer_uuid: string = undefined; | ||
|
||
beforeAll(async () => { | ||
indexing = new Indexing(getConfig()); | ||
indexer_uuid = getIndexerUUID(); | ||
}); | ||
|
||
test('Deploy a indexer', async () => { | ||
const response = await indexing | ||
.indexer(indexer_uuid) | ||
.deployIndexer('D:\\Sqd\\moonbeam-squid'); | ||
|
||
expect(response).toBeDefined(); | ||
expect(response.lastDeploymentId).toBeTruthy(); | ||
expect(response.status).toBe(5); | ||
expect(response.deployment).toBeDefined(); | ||
}); | ||
|
||
test('Deploy a indexer with invalid path, should return error', async () => { | ||
const logSpy = jest.spyOn(global.console, 'error'); | ||
await indexing.indexer(indexer_uuid).deployIndexer('some invalid path'); | ||
expect(logSpy).toHaveBeenCalled(); | ||
}); | ||
|
||
test('Deploy a indexer with valid path but invalid content, should return error', async () => { | ||
const logSpy = jest.spyOn(global.console, 'error'); | ||
await indexing.indexer(indexer_uuid).deployIndexer('D:\\Sqd'); | ||
expect(logSpy).toHaveBeenCalled(); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
export interface IUrlForIndexerSourceCodeUpload { | ||
url: string; | ||
} | ||
|
||
export enum DeploymentType { | ||
DEPLOY = 'DEPLOY', | ||
DEPLOY_HARD_RESET = 'DEPLOY_HARD_RESET', | ||
RESTART = 'RESTART', | ||
HIBERNATE = 'HIBERNATE', | ||
DELETE = 'DELETE', | ||
SCALE = 'SCALE', | ||
SET_TAG = 'SET_TAG', | ||
REMOVE_TAG = 'REMOVE_TAG', | ||
} | ||
export enum DeploymentStatus { | ||
UNPACKING = 'UNPACKING', | ||
IMAGE_BUILDING = 'IMAGE_BUILDING', | ||
RESETTING = 'RESETTING', | ||
ADDING_INGRESS = 'ADDING_INGRESS', | ||
REMOVING_INGRESS = 'REMOVING_INGRESS', | ||
SQUID_SYNCING = 'SQUID_SYNCING', | ||
SQUID_DELETING = 'SQUID_DELETING', | ||
ADDONS_SYNCING = 'ADDONS_SYNCING', | ||
ADDONS_DELETING = 'ADDONS_DELETING', | ||
OK = 'OK', | ||
DEPLOYING = 'DEPLOYING', | ||
} | ||
export enum DeploymentFailed { | ||
NO = 'NO', | ||
UNEXPECTED = 'UNEXPECTED', | ||
PERMISSIONS = 'PERMISSIONS', | ||
REQUIREMENTS = 'REQUIREMENTS', | ||
REQUIRED_SOURCE_FILE_MISSED = 'REQUIRED_SOURCE_FILE_MISSED', | ||
REQUIRED_SOURCE_FILE_INVALID = 'REQUIRED_SOURCE_FILE_INVALID', | ||
SOURCE_FILES_BUILD_FAILED = 'SOURCE_FILES_BUILD_FAILED', | ||
} | ||
|
||
export interface IDeployIndexer { | ||
/** | ||
* Indexer unique identifier. | ||
*/ | ||
indexer_uuid: string; | ||
/** | ||
* Indexer status (1 = draft, 5 = active) | ||
*/ | ||
status: number; | ||
/** | ||
* Indexer last deployment id - this is the deployment id from the sqd. | ||
*/ | ||
lastDeploymentId: number; | ||
/** | ||
* Indexer sqd deployment details. | ||
*/ | ||
deployment: { | ||
/** | ||
* Deployment id | ||
*/ | ||
id: number; | ||
type: DeploymentType; | ||
status: DeploymentStatus; | ||
failed: DeploymentFailed; | ||
}; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,118 @@ | ||
import fs from 'node:fs'; | ||
import path from 'node:path'; | ||
import { globSync } from 'glob'; | ||
import ignore from 'ignore'; | ||
import targz from 'targz'; | ||
|
||
export function createSquidIgnore(squidDir: string) { | ||
const ig = ignore().add( | ||
// default ignore patterns | ||
['node_modules', '.git'], | ||
); | ||
|
||
const ignoreFilePaths = globSync(['.squidignore', '**/.squidignore'], { | ||
cwd: squidDir, | ||
nodir: true, | ||
posix: true, | ||
}); | ||
|
||
if (!ignoreFilePaths.length) { | ||
return ig.add([ | ||
// squid uploaded archives directory | ||
'/builds', | ||
// squid built files | ||
'/lib', | ||
// IDE files | ||
'.idea', | ||
'.vscode', | ||
]); | ||
} | ||
|
||
for (const ignoreFilePath of ignoreFilePaths) { | ||
const raw = fs | ||
.readFileSync(path.resolve(squidDir, ignoreFilePath)) | ||
.toString(); | ||
|
||
const ignoreDir = path.dirname(ignoreFilePath); | ||
const patterns = getIgnorePatterns(ignoreDir, raw); | ||
|
||
ig.add(patterns); | ||
} | ||
|
||
return ig; | ||
} | ||
|
||
export function getIgnorePatterns(ignoreDir: string, raw: string) { | ||
const lines = raw.split('\n'); | ||
|
||
const patterns: string[] = []; | ||
for (let line of lines) { | ||
line = line.trim(); | ||
|
||
if (line.length === 0) continue; | ||
if (line.startsWith('#')) continue; | ||
|
||
let pattern = | ||
line.startsWith('/') || line.startsWith('*/') || line.startsWith('**/') | ||
? line | ||
: `**/${line}`; | ||
pattern = | ||
ignoreDir === '.' | ||
? pattern | ||
: `${toRootPattern(ignoreDir)}${toRootPattern(pattern)}`; | ||
|
||
patterns.push(pattern); | ||
} | ||
|
||
return patterns; | ||
} | ||
|
||
function toRootPattern(pattern: string) { | ||
return pattern.startsWith('/') ? pattern : `/${pattern}`; | ||
} | ||
|
||
export function compressIndexerSourceCode( | ||
srcDir: string, | ||
destDir: string, | ||
): Promise<any> { | ||
const squidIgnore = createSquidIgnore(srcDir); | ||
let filesCount = 0; | ||
|
||
fs.mkdirSync(path.dirname(destDir), { recursive: true }); | ||
|
||
return new Promise((resolve, reject) => { | ||
targz.compress( | ||
{ | ||
src: srcDir, | ||
dest: destDir, | ||
tar: { | ||
ignore: (name) => { | ||
const relativePath = path.relative( | ||
path.resolve(srcDir), | ||
path.resolve(name), | ||
); | ||
|
||
if (squidIgnore.ignores(relativePath)) { | ||
console.log('ignoring ' + relativePath); | ||
return true; | ||
} else { | ||
console.log('adding ' + relativePath); | ||
filesCount++; | ||
return false; | ||
} | ||
}, | ||
}, | ||
}, | ||
function (err) { | ||
if (err) { | ||
console.error(err); | ||
reject( | ||
`Compression failed. ${err.message ? 'Error: ' + err.message : ''}`, | ||
); | ||
} else { | ||
resolve(filesCount); | ||
} | ||
}, | ||
); | ||
}); | ||
} |