Skip to content

Commit

Permalink
feat: allow specifying the list of tests (#23)
Browse files Browse the repository at this point in the history
  • Loading branch information
derevnjuk authored Mar 30, 2023
1 parent 0ab0958 commit 7406ee5
Show file tree
Hide file tree
Showing 6 changed files with 365 additions and 29 deletions.
56 changes: 56 additions & 0 deletions README.md

Large diffs are not rendered by default.

3 changes: 3 additions & 0 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ inputs:
description: 'Scan Name'
default: 'GitHub Scan'
required: false
tests:
description: 'A list of tests which you want to run during a scan.'
required: false

outputs:
url:
Expand Down
120 changes: 120 additions & 0 deletions src/config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import { Discovery, validateDiscovery } from './discovery';
import { TestType, validateTests } from './tests';
import { URL } from 'url';

export interface RequestExclusion {
patterns?: string[];
methods?: string[];
}

export interface Exclusions {
params?: string[];
requests?: RequestExclusion[];
}

export interface Config {
name: string;
discoveryTypes: Discovery[];
exclusions?: Exclusions;
module?: string;
crawlerUrls?: string[];
fileId?: string;
hostsFilter?: string[];
tests?: TestType[];
}

const invalidUrlProtocols: ReadonlySet<string> = new Set<string>([
'javascript:',
'file:',
'data:',
'mailto:',
'ftp:',
'blob:',
'about:',
'ssh:',
'tel:',
'view-source:',
'ws:',
'wss:'
]);

export const isValidUrl = (url: string) => {
try {
const { protocol } = new URL(url);

return !invalidUrlProtocols.has(protocol);
} catch {
return false;
}
};

function validateCrawlerUrls(
crawlerUrls: string[] | undefined,
discoveryTypes: Discovery[]
) {
if (crawlerUrls) {
if (!discoveryTypes.includes(Discovery.CRAWLER)) {
throw new Error(
`Invalid discovery. When specifying a crawler URLs, the discovery type must be "crawler". The current discovery types are: ${discoveryTypes.join(
', '
)}`
);
}

if (!crawlerUrls.length) {
throw new Error('No crawler URLs configured.');
}
} else {
if (discoveryTypes.includes(Discovery.CRAWLER)) {
throw new Error(
`Invalid discovery. When setting a discovery type to either "crawler", the crawler URLs must be provided.`
);
}
}
}

function validateFileId(
fileId: string | undefined,
discoveryTypes: Discovery[]
) {
if (fileId) {
if (
!(
discoveryTypes.includes(Discovery.OAS) ||
discoveryTypes.includes(Discovery.ARCHIVE)
)
) {
throw new Error(
`Invalid discovery. When specifying a file ID, the discovery type must be either "oas" or "archive". The current discovery types are: ${discoveryTypes.join(
', '
)}`
);
}
} else {
if (
discoveryTypes.includes(Discovery.OAS) ||
discoveryTypes.includes(Discovery.ARCHIVE)
) {
throw new Error(
`Invalid discovery. When setting a discovery type to either "oas" or "archive", the file ID must be provided.`
);
}
}
}

export const validateConfig = ({
fileId,
crawlerUrls,
discoveryTypes,
tests
}: Config) => {
validateDiscovery(discoveryTypes);

validateFileId(fileId, discoveryTypes);

validateCrawlerUrls(crawlerUrls, discoveryTypes);

if (tests) {
validateTests(tests);
}
};
49 changes: 49 additions & 0 deletions src/discovery.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
export enum Discovery {
ARCHIVE = 'archive',
CRAWLER = 'crawler',
OAS = 'oas'
}

export const validateDiscovery = (discoveryTypes: Discovery[]) => {
if (discoveryTypes.some((x: Discovery) => !isValidDiscovery(x))) {
throw new Error('Unknown discovery type supplied.');
}

const uniqueDiscoveryTypes = new Set<Discovery>(discoveryTypes);

if (uniqueDiscoveryTypes.size !== discoveryTypes.length) {
throw new Error('Discovery contains duplicate values.');
}

if (uniqueDiscoveryTypes.size !== 1) {
disallowDiscoveryCombination(uniqueDiscoveryTypes);
}
};

const isValidDiscovery = (x: Discovery) => Object.values(Discovery).includes(x);

const disallowDiscoveryCombination = (discoveryTypes: Set<Discovery>): void => {
const disallowedCombinations = getDisallowedDiscoveryCombination([
...discoveryTypes
]);

if (disallowedCombinations.length) {
const [firstInvalidCombination]: [Discovery, readonly Discovery[]][] =
disallowedCombinations;

throw new Error(
`The discovery list cannot include both ${
firstInvalidCombination?.[0]
} and any of ${firstInvalidCombination?.[1].join(', ')} simultaneously.`
);
}
};

const disallowedDiscoveryCombinations = new Map([
[Discovery.OAS, [Discovery.CRAWLER, Discovery.ARCHIVE]]
]);

const getDisallowedDiscoveryCombination = (discoveryTypes: Discovery[]) =>
[...disallowedDiscoveryCombinations].filter(
([x]: [Discovery, readonly Discovery[]]) => discoveryTypes.includes(x)
);
53 changes: 24 additions & 29 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,8 @@
import * as core from '@actions/core';
import { TestType } from './tests';
import { Discovery } from './discovery';
import { Config, RequestExclusion, validateConfig } from './config';
import { HttpClient } from '@actions/http-client';

interface RequestExclusion {
patterns: string[];
methods: string[];
}

interface Exclusions {
params?: string[];
requests?: RequestExclusion[];
}

interface NewScan {
name: string;
discoveryTypes: string[];
exclusions?: Exclusions;
module?: string;
crawlerUrls?: string[];
fileId?: string;
hostsFilter?: string[];
}
import * as core from '@actions/core';

interface Scan {
id: string;
Expand All @@ -44,7 +27,8 @@ const fileId = core.getInput('file_id');
const crawlerUrls = getArray('crawler_urls');
const excludedParams = getArray('exclude_params');
const excludedEntryPoints = getArray<RequestExclusion>('exclude_entry_points');
const discoveryTypesIn = getArray('discovery_types');
const tests = getArray<TestType>('tests');
const discoveryTypesIn = getArray<Discovery>('discovery_types');
const module_in = core.getInput('module');
const hostsFilter = getArray('hosts_filter');
const type = core.getInput('type');
Expand Down Expand Up @@ -81,11 +65,11 @@ const retest = async (uuid: string, scanName?: string) => {
}
};

const create = async (scan: NewScan) => {
const create = async (config: Config) => {
try {
const response = await client.postJson<Scan>(
`${baseUrl}/api/v1/scans`,
scan
config
);

if (response.statusCode < 300 && response.result) {
Expand All @@ -110,7 +94,8 @@ if (restartScanID) {
discoveryTypesIn ||
module_in ||
hostsFilter ||
type
type ||
tests
)
) {
retest(restartScanID, name);
Expand All @@ -122,19 +107,29 @@ if (restartScanID) {
} else {
const module = module_in || 'dast';
const discoveryTypes = !discoveryTypesIn?.length
? ['archive']
? [Discovery.ARCHIVE]
: discoveryTypesIn;

create({
const uniqueTests = tests ? [...new Set(tests)] : undefined;
const config: Config = {
name,
discoveryTypes,
module,
crawlerUrls,
fileId,
hostsFilter,
tests: uniqueTests,
exclusions: {
requests: excludedEntryPoints,
params: excludedParams
}
});
};

try {
validateConfig(config);
} catch (e: any) {
core.setFailed(e.message);
throw e;
}

create(config);
}
113 changes: 113 additions & 0 deletions src/tests.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import * as core from '@actions/core';

export enum TestType {
ANGULAR_CSTI = 'angular_csti',
BACKUP_LOCATIONS = 'backup_locations',
BROKEN_SAML_AUTH = 'broken_saml_auth',
BRUTE_FORCE_LOGIN = 'brute_force_login',
BUSINESS_CONSTRAINT_BYPASS = 'business_constraint_bypass',
COMMON_FILES = 'common_files',
COOKIE_SECURITY = 'cookie_security',
CSRF = 'csrf',
CVE = 'cve_test',
DATE_MANIPULATION = 'date_manipulation',
DEFAULT_LOGIN_LOCATION = 'default_login_location',
DIRECTORY_LISTING = 'directory_listing',
DOM_XSS = 'dom_xss',
EMAIL_INJECTION = 'email_injection',
EXCESSIVE_DATA_EXPOSURE = 'excessive_data_exposure',
EXPOSED_COUCH_DB_APIS = 'exposed_couch_db_apis',
FILE_UPLOAD = 'file_upload',
FULL_PATH_DISCLOSURE = 'full_path_disclosure',
GRAPHQL_INTROSPECTION = 'graphql_introspection',
HEADER_SECURITY = 'header_security',
HRS = 'hrs',
HTML_INJECTION = 'html_injection',
HTTP_METHOD_FUZZING = 'http_method_fuzzing',
HTTP_RESPONSE_SPLITTING = 'http_response_splitting',
ID_ENUMERATION = 'id_enumeration',
IMPROPER_ASSET_MANAGEMENT = 'improper_asset_management',
INSECURE_TLS_CONFIGURATION = 'insecure_tls_configuration',
JWT = 'jwt',
LDAPI = 'ldapi',
LFI = 'lfi',
LRRL = 'lrrl',
MASS_ASSIGNMENT = 'mass_assignment',
NOSQL = 'nosql',
OPEN_BUCKETS = 'open_buckets',
OPEN_DATABASE = 'open_database',
OSI = 'osi',
PROTO_POLLUTION = 'proto_pollution',
RETIRE_JS = 'retire_js',
RFI = 'rfi',
S3_TAKEOVER = 'amazon_s3_takeover',
SECRET_TOKENS = 'secret_tokens',
SERVER_SIDE_JS_INJECTION = 'server_side_js_injection',
SQLI = 'sqli',
SSRF = 'ssrf',
SSTI = 'ssti',
UNVALIDATED_REDIRECT = 'unvalidated_redirect',
VERSION_CONTROL_SYSTEMS = 'version_control_systems',
WEBDAV = 'webdav',
WORDPRESS = 'wordpress',
XPATHI = 'xpathi',
XSS = 'xss',
XXE = 'xxe'
}

export const expensiveTests: readonly TestType[] = [
TestType.BUSINESS_CONSTRAINT_BYPASS,
TestType.CVE,
TestType.DATE_MANIPULATION,
TestType.EXCESSIVE_DATA_EXPOSURE,
TestType.ID_ENUMERATION,
TestType.LRRL,
TestType.MASS_ASSIGNMENT,
TestType.RETIRE_JS,
// not implemented yet by the engine
TestType.ANGULAR_CSTI,
TestType.BACKUP_LOCATIONS,
TestType.EXPOSED_COUCH_DB_APIS,
TestType.HTTP_RESPONSE_SPLITTING,
TestType.HRS
];

export const exclusiveTests: readonly TestType[] = [TestType.LRRL];

export const isValidTest = (test: TestType) =>
Object.values(TestType).includes(test);

export const hasExpensiveTests = (tests: TestType[]) =>
tests.some(x => expensiveTests.includes(x));

export const hasExclusiveTests = (tests: TestType[]) =>
tests.some(x => exclusiveTests.includes(x)) && tests.length !== 1;

export const validateTests = (uniqueTests: TestType[]): void => {
const invalidTests = uniqueTests.filter(x => !isValidTest(x));

if (invalidTests.length) {
throw new Error(
`${invalidTests.join(
', '
)} tests are invalid. Please re-configure the scan.`
);
}

if (hasExclusiveTests(uniqueTests)) {
const chosenTests = uniqueTests.filter(x => exclusiveTests.includes(x));
throw new Error(
`${chosenTests.join(
', '
)} tests are mutually exclusive with other tests. Please re-configure the scan.`
);
}

if (hasExpensiveTests(uniqueTests)) {
const chosenTests = uniqueTests.filter(x => expensiveTests.includes(x));
const warningMessage = `${chosenTests.join(
', '
)} tests are expensive. Please use them with caution.`;
core.warning(warningMessage);
}
};

0 comments on commit 7406ee5

Please sign in to comment.