Skip to content

Commit

Permalink
RIPE NCC has merged 4449509c1
Browse files Browse the repository at this point in the history
* Add control to verify the duration of testing changes [0f27560fe]
* SonarQube feedback [d638a3d96]
* Incorporate SonarQube feedback [b6935586a]
* Finish added updatedAt + rename types with VRPs from ROA* to ApiRoaPrefix* [13ac67c5c]
* Add updatedAt to ROA-prefixes in API [27748c78b]
* Sonar nits [7ed56db9b]
* Add two test cases and document strange behaviour [e63ad9722]
* Refactor test style in BgpRisEntryRepositoryBeanTest [2dba60b08]
* clearer log message on unusable dir [41ff9925a]
* chore(deps): update dependency org.sonarqube:org.sonarqube.gradle.plugin to v4.4.1.3373 [8e46ac777]
  • Loading branch information
RPKI Team at RIPE NCC committed Apr 10, 2024
1 parent 28aeef1 commit 579f41a
Show file tree
Hide file tree
Showing 26 changed files with 485 additions and 216 deletions.
11 changes: 11 additions & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,17 @@ sonarqube:
- if: $CI_COMMIT_BRANCH == "main"
- if: $CI_COMMIT_BRANCH == "next"

control/run-on-staging:
image: node:21-alpine
stage: qa
script:
- ./scripts/gitlab-deploy-check
allow_failure: true
rules:
- if: $CI_COMMIT_BRANCH == "main"
when: always
- when: never

######## Deploy stage ########
.delivr-deploy: &delivr-deploy
stage: deploy
Expand Down
2 changes: 1 addition & 1 deletion buildSrc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,5 @@ dependencies {
exclude group: 'org.eclipse.jgit', module: 'org.eclipse.jgit'
}
implementation 'org.eclipse.jgit:org.eclipse.jgit:5.13.3.202401111512-r'
implementation 'org.sonarqube:org.sonarqube.gradle.plugin:4.2.1.3168'
implementation 'org.sonarqube:org.sonarqube.gradle.plugin:4.4.1.3373'
}
2 changes: 1 addition & 1 deletion dependencies.gradle
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
ext {
rpki_commons_version = '1.37'
rpki_commons_version = '1.39.1'
spring_boot_version = '2.7.18'
}
217 changes: 217 additions & 0 deletions scripts/gitlab-deploy-check
Original file line number Diff line number Diff line change
@@ -0,0 +1,217 @@
#!/usr/bin/env node

const GitLab = ({apiUrl, apiToken, projectId}) => {
const repeat = (x) => (n) => [...new Array(n)].map(() => x);

const fetchFromGitLab = async (path, params) => {
const query = Object.entries({"per_page": 100, ...params})
.filter(([_, value]) => value != null)
.map(([name, value]) => `${name}=${encodeURIComponent(value)}`)
.reduce((acc, x) => `${acc}${acc != "" ? "&" : ""}${x}`, "");
const response = await fetch(
`${apiUrl}/${path}?${query}`,
{
headers: {
"PRIVATE-TOKEN": apiToken,
"accept": "application/json",
}
}
);
if (!response.ok) {
switch (response.status) {
case 401:
throw "The configured access token is invalid. A project access token with scope read_api and role reporter is required.";
case 403:
throw "The configured access token has insufficient permissions. The token must have scope read_api and role reporter.";
default:
throw `Failed to GET ${apiUrl}${path}?${query}, server returned: ${response.status}`;
}
}
return response.json();
};

const fetchAll = (f) => async function* (params={}, {concurrency, maxPages}={concurrency: 3, maxPages: 100}) {
const fetcher = (page) => f({...params, page});
let page = 0;
while (page < maxPages) {
const pages = Math.min(concurrency, maxPages - page);
const results = await Promise.all(repeat(fetcher)(pages).map((f) => f(page++)));
for (const result of results) {
if (result.length === 0) {
break;
}
yield* result;
}
}
};

return {
queries: {
commit: (sha) => fetchFromGitLab(`/projects/${projectId}/repository/commits/${sha}`),
deployments: (params={}) => fetchFromGitLab(`/projects/${projectId}/deployments`, params),
merge_requests: (params={}) => fetchFromGitLab(`/projects/${projectId}/merge_requests`, params),
},

combinators: {
findFirst: (f) => (params={}, fetchConfig) => async (p) => {
for await (const result of fetchAll(f)(params, fetchConfig)) {
if (p(result)) {
return result;
}
}
},
findAll: (f) => (params={}, fetchConfig) => async (p=() => true) => {
let acc = [];
for await (const result of fetchAll(f)(params, fetchConfig)) {
if (p(result)) {
acc.push(result);
}
}
return acc;
},
},
};
};

const Calendar = {
isAfter: (x, y) => {
const xd = Date.parse(x);
const yd = Date.parse(y);
return xd > yd;
},
max: (x, y) => Calendar.isAfter(x, y) ? x : y,
compare: (key=(x)=>x) => (x, y) =>
Calendar.isAfter(key(x), key(y)) ? 1 : Calendar.isAfter(key(y), key(x)) ? -1 : 0,
showTimestamp: (x) => {
const d = new Date(Date.parse(x));
const pad = (n) => String(n).padStart(2, "0");
return `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())} ${d.getUTCHours()}:${pad(d.getUTCMinutes())}:${pad(d.getUTCSeconds())} UTC`
}
}

const die = (msg) => {
console.error(msg);
process.exit(1);
};

const env = (name, def) => {
const val = process.env[name] || def;
if (val == null) {
die(`no value set for $${name}`);
}
return val;
};

const context = () => ({
apiToken: env("DEPLOY_CHECK_ACCESS_TOKEN"),
apiUrl: env("CI_API_V4_URL"),
projectId: env("CI_PROJECT_ID"),
commitSha: env("CI_COMMIT_SHA"),
stagingEnv: env("DEPLOY_CHECK_STAGING_ENV", "prepdev"),
stagingDuration: env("DEPLOY_CHECK_DURATION", 12*60*60),
});

const showDuration = (duration) => {
const r = [
[Math.floor(duration / 86400), "d"],
[Math.floor((duration % 86400) / 3600), "h"],
[Math.floor((duration % 86400 % 3600) / 60), "m"],
[Math.floor(duration % 86400 % 3600 % 60), "s"]
]
.reduce((acc, [count, suffix]) => count > 0 ? `${acc} ${count}${suffix}` : acc, "")
.trim();
return r.length > 0 ? r : "zero seconds";
};

const calculateDeploymentDurations = (timeline, mergeRequest) => {
const matchesMR = ({ref, sha}) => {
return sha === mergeRequest.sha
|| ref === mergeRequest.source_branch
|| ref === `refs/merge-requests/${mergeRequest.iid}/merge`;
};

const mapPairs = (f) => (xs) => {
if (xs.length === 0) {
return xs;
}
const {result, last} = xs.reduce(({result, last}, x) => ({
result: last != null ? [...result, f (last, x)] : result,
last: x,
}), {result: [], last: null});
return [...result, f(last, null)];
};

const refsWithDuration = mapPairs(
(deployment, replacement) => {
const start = Date.parse(deployment.deployable.finished_at);
const end = Date.parse(replacement?.deployable.started_at || mergeRequest.merged_at);
return [Math.floor((end - start) / 1000), {
iid: deployment.iid,
ref: deployment.ref,
sha: deployment.sha,
started_at: deployment.deployable.started_at,
finished_at: deployment.deployable.finished_at,
}];
}
);

const durations = refsWithDuration(timeline);
return {
merge_request: durations
.filter(([_, refs]) => matchesMR(refs))
.reduce((acc, [duration]) => acc + duration, 0),
last_commit: durations
.filter(([duration, refs]) => refs.sha === mergeRequest.sha)
.reduce((acc, [duration]) => acc + duration, 0),
};
};

const main = async (ctx) => {
const gitlab = GitLab(ctx);

const mergeRequest = await gitlab.combinators.findFirst(gitlab.queries.merge_requests)({state: "merged"})(
({merge_commit_sha}) => merge_commit_sha === ctx.commitSha
);
if (mergeRequest == null) {
throw `No merge request found with merge commit ${ctx.commitSha}`;
}

const lastCommit = await gitlab.queries.commit(mergeRequest.sha);

const deployments = await gitlab.combinators.findAll(gitlab.queries.deployments)({
environment: ctx.stagingEnv,
finished_after: Calendar.max(lastCommit.created_at, mergeRequest.created_at),
order_by: "finished_at",
status: "success",
})();
const timeline = [...deployments].sort(Calendar.compare((x) => x.finished_at))
.reduce((acc, x) => acc.find(({id}) => id === x.id) != null ? acc : [...acc, x], [])
.reduce(({result, pastMerge}, x) =>
pastMerge ? { result, pastMerge } : {
result: [...result, x],
pastMerge: Calendar.isAfter(x.finished_at, mergeRequest.merged_at)
},
{result: [], pastMerge: false}
).result;

const durations = calculateDeploymentDurations(timeline, mergeRequest);
const passesThreshold = durations.merge_request > ctx.stagingDuration;

process.stdout.write(`
[#${mergeRequest.iid}] ${mergeRequest.title}
Branch ${mergeRequest.source_branch} at ${lastCommit.id} was merged into ${mergeRequest.target_branch} at ${Calendar.showTimestamp(mergeRequest.merged_at)}.
The changes in #${mergeRequest.iid} ran on ${ctx.stagingEnv} for ${showDuration(durations.merge_request)}. The last commit ran for ${showDuration(durations.last_commit)}.
${passesThreshold ? "✅" : "❌"} Minimum required staging period of ${showDuration (ctx.stagingDuration)} is ${passesThreshold ? "met" : "not met"}.
`);

if (!passesThreshold) {
throw "Merge request failed the staging deployment control.";
}
};

main(context()).catch(
(err) => die(err)
);
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import javax.persistence.Column;
import javax.persistence.Embeddable;
import java.math.BigInteger;
import java.time.Instant;
import java.util.List;
import java.util.stream.Collectors;

Expand All @@ -36,6 +37,10 @@ public class RoaConfigurationPrefix {
@Column(name = "maximum_length", nullable = true)
private Integer maximumLength;

@Getter
@Column(name = "updated_at", insertable = false, updatable = false)
private Instant updatedAt;

protected RoaConfigurationPrefix() {
// JPA uses this
}
Expand Down Expand Up @@ -69,7 +74,7 @@ public int getMaximumLength() {
}

public RoaConfigurationPrefixData toData() {
return new RoaConfigurationPrefixData(getAsn(), getPrefix(), getMaximumLength());
return new RoaConfigurationPrefixData(getAsn(), getPrefix(), getMaximumLength(), getUpdatedAt());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,16 @@
import lombok.Data;
import lombok.NoArgsConstructor;

/**
* An intent for a Validated Roa Payload.
*
* This is <emph>one</emph> asn-prefix pair with an optional maxlength. This is <b>not</b> a ROA since a ROA is
* 1:n mapping from ASN to prefixes and maxlengths.
*/
@NoArgsConstructor
@AllArgsConstructor
@Data
public class ROA {
public class ApiRoaPrefix {
private String asn;
private String prefix;
// external API (and portal) use maximalLength (sic)
Expand All @@ -17,6 +23,7 @@ public class ROA {

@Override
public String toString() {
// The term 'ROA' is kept to have a consistent API.
return "ROA{" +
"asn='" + asn + '\'' +
", prefix='" + prefix + '\'' +
Expand Down
31 changes: 31 additions & 0 deletions src/main/java/net/ripe/rpki/rest/pojo/ApiRoaPrefixExtended.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package net.ripe.rpki.rest.pojo;

import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;

import javax.annotation.Nullable;
import java.time.Instant;

@EqualsAndHashCode(callSuper = true)
public class ApiRoaPrefixExtended extends ApiRoaPrefix {
@JsonProperty("_numberOfValidsCaused")
@Getter @Setter private int numberOfValidsCaused;

@JsonProperty("_numberOfInvalidsCaused")
@Getter @Setter private int numberOfInvalidsCaused;

@JsonProperty("_updatedAt")
@JsonInclude(value = JsonInclude.Include.NON_NULL)
@Getter @Setter private Instant updatedAt;

@SuppressWarnings("java:S117")
public ApiRoaPrefixExtended(String asn, String prefix, int maximalLength, int numberOfValidsCaused, int numberOfInvalidsCaused, @Nullable Instant updatedAt) {
super(asn, prefix, maximalLength);
this.numberOfValidsCaused = numberOfValidsCaused;
this.numberOfInvalidsCaused = numberOfInvalidsCaused;
this.updatedAt = updatedAt;
}
}
4 changes: 2 additions & 2 deletions src/main/java/net/ripe/rpki/rest/pojo/PublishSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
public class PublishSet {

private String ifMatch;
private List<ROA> added = Collections.emptyList();
private List<ROA> deleted = Collections.emptyList();
private List<ApiRoaPrefix> added = Collections.emptyList();
private List<ApiRoaPrefix> deleted = Collections.emptyList();

}
20 changes: 0 additions & 20 deletions src/main/java/net/ripe/rpki/rest/pojo/ROAExtended.java

This file was deleted.

Loading

0 comments on commit 579f41a

Please sign in to comment.