From 079a5044245484143c52bfcc3aed8a535f5b23ec Mon Sep 17 00:00:00 2001 From: barreiro Date: Thu, 14 Nov 2024 03:51:05 +0000 Subject: [PATCH] Quarkus logging --- .../tools/horreum/api/data/PersistentLog.java | 14 --- .../action/GitHubIssueCommentAction.java | 10 +- .../action/GitHubIssueCreateAction.java | 9 +- .../horreum/action/GitHubPluginBase.java | 6 +- .../tools/horreum/action/HttpAction.java | 7 +- .../action/SlackChannelMessageAction.java | 28 ++--- .../tools/horreum/action/SlackPluginBase.java | 8 +- .../horreum/bus/BlockingTaskDispatcher.java | 16 ++- .../changedetection/FixedThresholdModel.java | 18 ++- .../changedetection/HunterEDivisiveModel.java | 50 ++++----- ...elativeDifferenceChangeDetectionModel.java | 33 +++--- .../datastore/CollectorApiDatastore.java | 19 ++-- .../datastore/ElasticsearchDatastore.java | 27 ++--- .../RelativeDifferenceExperimentModel.java | 2 +- .../tools/horreum/hibernate/IntArrayType.java | 4 +- .../horreum/hibernate/JsonBinaryType.java | 6 +- .../tools/horreum/hibernate/JsonbSetType.java | 4 +- .../horreum/mapper/ValidationErrorMapper.java | 7 +- .../horreum/notification/EmailPlugin.java | 16 +-- .../server/ApiKeyIdentityProvider.java | 2 +- .../ConstraintViolationExceptionMapper.java | 5 +- .../server/JDBCConnectionInterceptor.java | 2 +- .../tools/horreum/server/RoleManager.java | 6 +- .../horreum/server/SecurityBootstrap.java | 10 +- .../tools/horreum/svc/ActionServiceImpl.java | 16 ++- .../horreum/svc/AlertingServiceImpl.java | 73 ++++++------- .../tools/horreum/svc/BannerServiceImpl.java | 5 +- .../tools/horreum/svc/ConfigServiceImpl.java | 14 +-- .../tools/horreum/svc/DatasetServiceImpl.java | 21 ++-- .../horreum/svc/ExperimentServiceImpl.java | 7 +- .../tools/horreum/svc/LogServiceImpl.java | 12 +- .../horreum/svc/NotificationServiceImpl.java | 22 ++-- .../tools/horreum/svc/ReportServiceImpl.java | 41 ++++--- .../tools/horreum/svc/RunServiceImpl.java | 103 +++++++++--------- .../tools/horreum/svc/SchemaServiceImpl.java | 29 +++-- .../tools/horreum/svc/ServiceMediator.java | 8 +- .../tools/horreum/svc/SqlServiceImpl.java | 2 - .../horreum/svc/SubscriptionServiceImpl.java | 6 +- .../tools/horreum/svc/TestServiceImpl.java | 17 ++- .../tools/horreum/svc/UIServiceImpl.java | 2 +- .../tools/horreum/svc/UserServiceImpl.java | 29 +++-- .../io/hyperfoil/tools/horreum/svc/Util.java | 41 ++++--- .../horreum/svc/user/DatabaseUserBackend.java | 22 ++-- .../horreum/svc/user/KeycloakUserBackend.java | 83 +++++++------- .../horreum/svc/AlertingServiceTest.java | 2 - .../horreum/svc/BaseServiceNoRestTest.java | 2 +- .../tools/horreum/svc/BaseServiceTest.java | 20 ++-- .../tools/horreum/svc/BasicAuthTest.java | 2 +- .../tools/horreum/svc/DatasetServiceTest.java | 9 +- .../horreum/svc/SchemaServiceNoRestTest.java | 22 ++-- .../tools/horreum/svc/SlackDummyService.java | 20 ++-- .../horreum/svc/TestServiceNoRestTest.java | 2 +- ...mKeycloakTestResourceLifecycleManager.java | 2 +- .../tools/horreum/test/TestUtil.java | 7 +- .../tools/horreum/it/ItResource.java | 13 +-- .../HorreumDevServicesProcessor.java | 14 +-- .../common/resources/HorreumResource.java | 2 +- 57 files changed, 444 insertions(+), 535 deletions(-) diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/PersistentLog.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/PersistentLog.java index 4ac45a810..dd71d44be 100644 --- a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/PersistentLog.java +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/PersistentLog.java @@ -6,7 +6,6 @@ import org.eclipse.microprofile.openapi.annotations.enums.SchemaType; import org.eclipse.microprofile.openapi.annotations.media.Schema; -import org.jboss.logging.Logger; import com.fasterxml.jackson.annotation.JsonProperty; @@ -38,17 +37,4 @@ public PersistentLog(int level, String message) { this.timestamp = Instant.now(); } - public static Logger.Level logLevel(int level) { - switch (level) { - case DEBUG: - return Logger.Level.DEBUG; - case INFO: - return Logger.Level.INFO; - case WARN: - return Logger.Level.WARN; - case ERROR: - default: - return Logger.Level.ERROR; - } - } } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubIssueCommentAction.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubIssueCommentAction.java index 01f8091ee..c2d588512 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubIssueCommentAction.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubIssueCommentAction.java @@ -70,15 +70,13 @@ public Uni execute(JsonNode config, JsonNode secrets, Object payload) { return post(path, secrets, JsonNodeFactory.instance.objectNode().put("body", comment)) .onItem().transformToUni(response -> { if (response.statusCode() < 400) { - return Uni.createFrom() - .item(String.format("Successfully(%d) added comment to %s", response.statusCode(), path)); + return Uni.createFrom().item("Successfully(" + response.statusCode() + ") added comment to " + path); } else if (response.statusCode() == 403 && response.getHeader("Retry-After") != null) { return retry(response, config, secrets, payload); - } else { - return Uni.createFrom().failure(new RuntimeException( - String.format("Failed to add comment to %s, response %d: %s", - path, response.statusCode(), response.bodyAsString()))); + String message = "Failed to add comment to " + path + ", response" + response.statusCode() + ":\n" + + response.bodyAsString(); + return Uni.createFrom().failure(new RuntimeException(message)); } }).onFailure().transform(t -> new RuntimeException("Failed to add comment to " + path + ": " + t.getMessage())); } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubIssueCreateAction.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubIssueCreateAction.java index 1efef864d..1453a41a8 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubIssueCreateAction.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubIssueCreateAction.java @@ -47,14 +47,13 @@ public Uni execute(JsonNode config, JsonNode secrets, Object payload) { .put("title", title).put("body", body).set("labels", JsonNodeFactory.instance.arrayNode().add("horreum"))) .onItem().transformToUni(response -> { if (response.statusCode() < 400) { - return Uni.createFrom() - .item(String.format("Successfully(%d) created issue in %s", response.statusCode(), path)); + return Uni.createFrom().item("Successfully(" + response.statusCode() + ") created issue in " + path); } else if (response.statusCode() == 403 && response.getHeader("Retry-After") != null) { return retry(response, config, secrets, payload); } else { - return Uni.createFrom().failure(new RuntimeException( - String.format("Failed to create issue in %s, response %d: %s", - path, response.statusCode(), response.bodyAsString()))); + String message = "Failed to create issue in " + path + ", response" + response.statusCode() + ":\n" + + response.bodyAsString(); + return Uni.createFrom().failure(new RuntimeException(message)); } }).onFailure() .transform(t -> new RuntimeException("Failed to create issue in " + path + ": " + t.getMessage())); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubPluginBase.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubPluginBase.java index d6ff79611..46cd60ce0 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubPluginBase.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/GitHubPluginBase.java @@ -5,10 +5,9 @@ import jakarta.enterprise.inject.Instance; import jakarta.inject.Inject; -import org.jboss.logging.Logger; - import com.fasterxml.jackson.databind.JsonNode; +import io.quarkus.logging.Log; import io.smallrye.mutiny.Uni; import io.vertx.core.http.HttpMethod; import io.vertx.core.http.RequestOptions; @@ -17,7 +16,6 @@ import io.vertx.mutiny.ext.web.client.HttpResponse; public abstract class GitHubPluginBase { - protected static final Logger log = Logger.getLogger(GitHubPluginBase.class); @Inject Vertx vertx; @@ -62,7 +60,7 @@ protected Uni> post(String path, JsonNode secrets, JsonNode protected Uni retry(HttpResponse response, JsonNode config, JsonNode secrets, Object payload) { int retryAfter = Integer.parseInt(response.getHeader("Retry-After")); - log.warnf("Exceeded Github request limits, retrying after %d seconds", retryAfter); + Log.warnf("Exceeded Github request limits, retrying after %d seconds", retryAfter); return Uni.createFrom() .emitter(em -> vertx.setTimer(TimeUnit.SECONDS.toMillis(retryAfter), id -> execute(config, secrets, payload) .subscribe().with(em::complete, em::fail))); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/HttpAction.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/HttpAction.java index 95c0d103d..c77b87673 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/HttpAction.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/HttpAction.java @@ -8,13 +8,13 @@ import jakarta.inject.Inject; import org.eclipse.microprofile.config.inject.ConfigProperty; -import org.jboss.logging.Logger; import com.fasterxml.jackson.databind.JsonNode; import io.hyperfoil.tools.horreum.entity.data.AllowedSiteDAO; import io.hyperfoil.tools.horreum.svc.ServiceException; import io.hyperfoil.tools.horreum.svc.Util; +import io.quarkus.logging.Log; import io.smallrye.mutiny.Uni; import io.vertx.core.http.HttpMethod; import io.vertx.core.http.HttpVersion; @@ -26,7 +26,6 @@ @ApplicationScoped public class HttpAction implements ActionPlugin { - private static final Logger log = Logger.getLogger(HttpAction.class); public static final String TYPE_HTTP = "http"; @@ -89,13 +88,13 @@ public Uni execute(JsonNode config, JsonNode secrets, Object payload) { .setPort(url.getPort() >= 0 ? url.getPort() : url.getDefaultPort()) .setURI(url.getFile()) .setSsl("https".equalsIgnoreCase(url.getProtocol())); - log.infof("Sending event to %s", url); + Log.infof("Sending event to %s", url); return http1xClient.request(HttpMethod.POST, options) .putHeader("Content-Type", "application/json") .sendBuffer(Buffer.buffer(body.toString())) .onItem().transform(response -> { if (response.statusCode() < 400) { - return String.format("Successfully(%d) notified hook: %s", response.statusCode(), url); + return "Successfully(" + response.statusCode() + ") notified hook: " + url; } else { throw new IllegalArgumentException("Failed to POST " + url + ", response " + response.statusCode() + ": " + response.bodyAsString()); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/SlackChannelMessageAction.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/SlackChannelMessageAction.java index a962a37ce..e3a6fe79f 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/SlackChannelMessageAction.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/SlackChannelMessageAction.java @@ -6,19 +6,18 @@ import org.apache.http.HttpStatus; import org.eclipse.microprofile.config.ConfigProvider; -import org.jboss.logging.Logger; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.hyperfoil.tools.horreum.svc.Util; +import io.quarkus.logging.Log; import io.smallrye.mutiny.Uni; import io.vertx.core.json.JsonObject; @ApplicationScoped public class SlackChannelMessageAction extends SlackPluginBase implements ActionPlugin { - private static final Logger log = Logger.getLogger(SlackChannelMessageAction.class); public static final String TYPE_SLACK_MESSAGE = "slack-channel-message"; @Override @@ -28,7 +27,7 @@ public String type() { @Override public void validate(JsonNode config, JsonNode secrets) { - log.tracef("Validating config %s, secrets %s", config, secrets); + Log.tracef("Validating config %s, secrets %s", config, secrets); requireProperties(secrets, "token"); requireProperties(config, "formatter", "channel"); } @@ -64,29 +63,26 @@ public Uni execute(JsonNode config, JsonNode secrets, Object payload) { text.put("text", comment); blocks.add(section); - log.debugf("Slack URL %s, token %s, body %s", url, token, body); + Log.debugf("Slack URL %s, token %s, body %s", url, token, body); return post(url, secrets, body) .onItem().transformToUni(response -> { if (response.statusCode() < 400) { JsonObject status = response.bodyAsJsonObject(); if (!status.getBoolean("ok")) { - return Uni.createFrom().failure( - new RuntimeException( - String.format("Failed to post to channel %s, response %s", channel, - status.getString("error")))); + return Uni.createFrom().failure(new RuntimeException( + "Failed to post to channel " + channel + ", response " + status.getString("error"))); } - return Uni.createFrom() - .item(String.format("Successfully(%d) posted to channel %s", response.statusCode(), channel)); + return Uni.createFrom().item("Successfully(" + response.statusCode() + ") posted to channel " + channel); } else if (response.statusCode() == HttpStatus.SC_TOO_MANY_REQUESTS && response.getHeader("Retry-After") != null) { - log.debugf("Slack POST needs retry: %s (%s)", response.toString(), response.bodyAsString()); + Log.debugf("Slack POST needs retry: %s (%s)", response.toString(), response.bodyAsString()); return retry(response, config, secrets, payload); - } else { - log.debugf("Slack POST failed: %s (%s)", response.statusCode(), response.bodyAsString()); - return Uni.createFrom().failure(new RuntimeException( - String.format("Failed to post to channel %s, response %d: %s", - channel, response.statusCode(), response.bodyAsString()))); + Log.debugf("Slack POST failed: %s (%s)", response.statusCode(), response.bodyAsString()); + + String message = "Failed to create issue in " + channel + ", response" + response.statusCode() + ":\n" + + response.bodyAsString(); + return Uni.createFrom().failure(new RuntimeException(message)); } }).onFailure() .transform(t -> new RuntimeException("Failed to post message to " + channel + ": " + t.getMessage())); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/SlackPluginBase.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/SlackPluginBase.java index 935a127b1..f0c32e5d0 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/SlackPluginBase.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/action/SlackPluginBase.java @@ -7,10 +7,9 @@ import jakarta.enterprise.inject.Instance; import jakarta.inject.Inject; -import org.jboss.logging.Logger; - import com.fasterxml.jackson.databind.JsonNode; +import io.quarkus.logging.Log; import io.smallrye.mutiny.Uni; import io.vertx.core.http.HttpMethod; import io.vertx.core.http.RequestOptions; @@ -19,7 +18,6 @@ import io.vertx.mutiny.ext.web.client.HttpResponse; public abstract class SlackPluginBase { - protected static final Logger log = Logger.getLogger(SlackPluginBase.class); @Inject Vertx vertx; @@ -48,7 +46,7 @@ protected Uni> post(String path, JsonNode secrets, JsonNode if (token == null || token.isBlank()) { throw new IllegalArgumentException("Missing access token!"); } - log.debugf("POST %s (%s): %s", path, token, payload.toString()); + Log.debugf("POST %s (%s): %s", path, token, payload.toString()); URL url; try { url = new URL(path); @@ -72,7 +70,7 @@ protected Uni> post(String path, JsonNode secrets, JsonNode protected Uni retry(HttpResponse response, JsonNode config, JsonNode secrets, Object payload) { int retryAfter = Integer.parseInt(response.getHeader("Retry-After")); - log.warnf("Exceeded server request limits, retrying after %d seconds", retryAfter); + Log.warnf("Exceeded server request limits, retrying after %d seconds", retryAfter); return Uni.createFrom() .emitter(em -> vertx.setTimer(TimeUnit.SECONDS.toMillis(retryAfter), id -> execute(config, secrets, payload) .subscribe().with(em::complete, em::fail))); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/bus/BlockingTaskDispatcher.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/bus/BlockingTaskDispatcher.java index c03102464..2fe16af5e 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/bus/BlockingTaskDispatcher.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/bus/BlockingTaskDispatcher.java @@ -9,16 +9,15 @@ import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; -import org.jboss.logging.Logger; - import io.hyperfoil.tools.horreum.svc.Util; +import io.quarkus.logging.Log; import io.quarkus.runtime.Startup; import io.vertx.core.Vertx; @Startup @ApplicationScoped public class BlockingTaskDispatcher { - private static final Logger log = Logger.getLogger(BlockingTaskDispatcher.class); + @Inject Vertx vertx; @@ -31,7 +30,7 @@ public void executeForTest(int testId, Runnable runnable) { TaskQueue queue = taskQueues.computeIfAbsent(testId, TaskQueue::new); queue.executeOrAdd(task); } catch (Exception e) { - log.error("Failed to execute blocking task", e); + Log.error("Failed to execute blocking task", e); } finally { promise.complete(); } @@ -41,7 +40,6 @@ public void executeForTest(int testId, Runnable runnable) { } class TaskQueue { - private static final Logger log = Logger.getLogger(TaskQueue.class); private final int testId; private final Queue queue = new ConcurrentLinkedQueue<>(); private final ReentrantLock lock = new ReentrantLock(); @@ -54,7 +52,7 @@ public void executeOrAdd(Runnable runnable) { queue.add(runnable); do { if (lock.tryLock()) { - log.debugf("This thread is going to execute tasks (%d) for test %d, lock level %d", queue.size(), testId, + Log.debugf("This thread is going to execute tasks (%d) for test %d, lock level %d", queue.size(), testId, lock.getHoldCount()); try { while (!queue.isEmpty()) { @@ -62,13 +60,13 @@ public void executeOrAdd(Runnable runnable) { task.run(); } } catch (Throwable t) { - log.errorf(t, "Error executing task in the queue for test %d", testId); + Log.errorf(t, "Error executing task in the queue for test %d", testId); } finally { - log.debugf("Finished executing tasks for test %d", testId); + Log.debugf("Finished executing tasks for test %d", testId); lock.unlock(); } } else { - log.debugf("There's another thread executing the tasks (%d) for test %d", queue.size(), testId); + Log.debugf("There's another thread executing the tasks (%d) for test %d", queue.size(), testId); return; } } while (!queue.isEmpty()); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/FixedThresholdModel.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/FixedThresholdModel.java index d5cc7b6c1..b268319b2 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/FixedThresholdModel.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/FixedThresholdModel.java @@ -6,8 +6,6 @@ import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; -import org.jboss.logging.Logger; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -18,10 +16,10 @@ import io.hyperfoil.tools.horreum.api.data.changeDetection.FixedThresholdDetectionConfig; import io.hyperfoil.tools.horreum.entity.alerting.ChangeDAO; import io.hyperfoil.tools.horreum.entity.alerting.DataPointDAO; +import io.quarkus.logging.Log; @ApplicationScoped public class FixedThresholdModel implements ChangeDetectionModel { - private static final Logger log = Logger.getLogger(FixedThresholdModel.class); @Inject ObjectMapper mapper; @@ -36,7 +34,6 @@ public ConditionConfig config() { "Upper bound for acceptable datapoint values."); conditionConfig.defaults.put("model", new TextNode(ChangeDetectionModelType.names.FIXED_THRESHOLD)); return conditionConfig; - } @Override @@ -55,9 +52,9 @@ public void analyze(List dataPoints, JsonNode configuration, Consu if (config.min.enabled) { if ((!config.min.inclusive && dp.value <= config.min.value) || dp.value < config.min.value) { ChangeDAO c = ChangeDAO.fromDatapoint(dp); - c.description = String.format("%f is below lower bound %f (%s)", dp.value, config.min.value, + c.description = "%f is below lower bound %f (%s)".formatted(dp.value, config.min.value, config.min.inclusive ? "inclusive" : "exclusive"); - log.debug(c.description); + Log.debug(c.description); changeConsumer.accept(c); return; } @@ -65,19 +62,18 @@ public void analyze(List dataPoints, JsonNode configuration, Consu if (config.max.enabled) { if ((!config.max.inclusive && dp.value >= config.max.value) || dp.value > config.max.value) { ChangeDAO c = ChangeDAO.fromDatapoint(dp); - c.description = String.format("%f is above upper bound %f (%s)", dp.value, config.max.value, + c.description = "%f is above upper bound %f (%s)".formatted(dp.value, config.max.value, config.max.inclusive ? "inclusive" : "exclusive"); - log.debug(c.description); + Log.debug(c.description); changeConsumer.accept(c); } } } catch (JsonProcessingException e) { - String errMsg = String.format("Failed to parse configuration for variable %d", dp.variable.id); - log.error(errMsg, e); + String errMsg = "Failed to parse configuration for variable %d".formatted(dp.variable.id); + Log.error(errMsg, e); throw new ChangeDetectionException(errMsg, e); } - } @Override diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/HunterEDivisiveModel.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/HunterEDivisiveModel.java index ab2394a1f..89ecebf06 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/HunterEDivisiveModel.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/HunterEDivisiveModel.java @@ -27,8 +27,6 @@ import jakarta.enterprise.context.ApplicationScoped; -import org.jboss.logging.Logger; - import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.TextNode; @@ -36,10 +34,10 @@ import io.hyperfoil.tools.horreum.api.data.changeDetection.ChangeDetectionModelType; import io.hyperfoil.tools.horreum.entity.alerting.ChangeDAO; import io.hyperfoil.tools.horreum.entity.alerting.DataPointDAO; +import io.quarkus.logging.Log; @ApplicationScoped public class HunterEDivisiveModel implements ChangeDetectionModel { - private static final Logger log = Logger.getLogger(HunterEDivisiveModel.class); public static final String HUNTER_CONFIG = "HUNTER_CONFIG"; private static String[] HEADERS = { "kpi", "timestamp", "datasetid" }; @@ -71,7 +69,7 @@ public void analyze(List dataPoints, JsonNode configuration, Consu tmpFiles = TmpFiles.instance(); } catch (IOException e) { String errMsg = "Could not create temporary file for Hunter eDivisive algorithm"; - log.error(errMsg, e); + Log.error(errMsg, e); throw new ChangeDetectionException(errMsg, e); } @@ -83,19 +81,19 @@ public void analyze(List dataPoints, JsonNode configuration, Consu //write out csv fields pw.println(Arrays.stream(HEADERS).collect(Collectors.joining(","))); dataPoints.forEach(dataPointDAO -> pw.println( - String.format("%.2f,%s,%d", dataPointDAO.value, dataPointDAO.timestamp.toString(), dataPointDAO.id))); + "%.2f,%s,%d".formatted(dataPointDAO.value, dataPointDAO.timestamp.toString(), dataPointDAO.id))); } catch (IOException e) { String errMsg = "Could not create file writer for Hunter eDivisive algorithm"; - log.error(errMsg, e); + Log.error(errMsg, e); throw new ChangeDetectionException(errMsg, e); } - log.debugf("created csv output : %s", tmpFiles.inputFile.getAbsolutePath()); + Log.debugf("Created .csv output: %s", tmpFiles.inputFile.getAbsolutePath()); if (!validateInputCsv(tmpFiles)) { - String errMsg = String.format("could not validate: %s", tmpFiles.inputFile); - log.error(errMsg); + String errMsg = "Could not validate:" + tmpFiles.inputFile; + Log.error(errMsg); throw new ChangeDetectionException(errMsg); } @@ -116,7 +114,7 @@ public void analyze(List dataPoints, JsonNode configuration, Consu protected void processChangePoints(Function> changePointSupplier, Consumer changeConsumer, TmpFiles tmpFiles, Instant sinceInstance) { String command = "hunter analyze horreum --since '" + sinceInstance.toString() + "'"; - log.debugf("Running command: %s", command); + Log.debugf("Running command: %s", command); List results = executeProcess(tmpFiles, false, "bash", "-l", "-c", command); @@ -153,29 +151,29 @@ protected void processChangePoints(Function> cha String timestamp = foundChange.group("timestamp"); Integer datapointID = Integer.parseInt(foundChange.group("dataPointId")); - log.debugf("Found change point `%s` at `%s` for dataset: %d", change, timestamp, datapointID); + Log.debugf("Found change point `%s` at `%s` for dataset: %d", change, timestamp, datapointID); Optional foundDataPoint = changePointSupplier.apply(datapointID); if (foundDataPoint.isPresent()) { ChangeDAO changePoint = ChangeDAO.fromDatapoint(foundDataPoint.get()); - changePoint.description = String.format("eDivisive change `%s` at `%s` for dataset: %d", change, - timestamp, datapointID); + changePoint.description = "eDivisive change `%s` at `%s` for dataset: %d".formatted( + change, timestamp, datapointID); - log.trace(changePoint.description); + Log.trace(changePoint.description); changeConsumer.accept(changePoint); } else { - log.errorf("Could not find datapoint (%d) in set!", datapointID); + Log.errorf("Could not find datapoint (%d) in set!", datapointID); } } else { - log.errorf("Could not parse hunter line: '%s'", changeDetails); + Log.errorf("Could not parse hunter line: '%s'", changeDetails); } } } } else { - log.debugf("No change points were detected in : %s", tmpFiles.tmpdir.getAbsolutePath()); + Log.debugf("No change points were detected in : %s", tmpFiles.tmpdir.getAbsolutePath()); } } @@ -187,11 +185,11 @@ protected boolean validateInputCsv(TmpFiles tmpFiles) { Optional optLine = reader.lines().filter(line -> line.contains("Validation finished")).findFirst(); if (optLine.isEmpty()) { - log.errorf("Could not validate: %s", tmpFiles.tmpdir.getAbsolutePath()); + Log.errorf("Could not validate: %s", tmpFiles.tmpdir.getAbsolutePath()); return false; } if (optLine.get().contains("INVALID")) { - log.errorf("Invalid format for: %s; see log for details: %s", tmpFiles.tmpdir.getAbsolutePath(), + Log.errorf("Invalid format for: %s; see log for details: %s", tmpFiles.tmpdir.getAbsolutePath(), tmpFiles.logFile.getAbsolutePath()); return false; } @@ -230,18 +228,18 @@ public TmpFiles() throws IOException { .getResourceAsStream("changeDetection/hunter.yaml")) { if (confInputStream == null) { - log.error("Could not extract Hunter configuration from archive"); + Log.error("Could not extract Hunter configuration from archive"); return; } try (OutputStream confOut = new FileOutputStream(confFile)) { confOut.write(confInputStream.readAllBytes()); } catch (IOException e) { - log.error("Could not extract Hunter configuration from archive"); + Log.error("Could not extract Hunter configuration from archive"); } } catch (IOException e) { - log.error("Could not create temporary file for Hunter eDivisive algorithm", e); + Log.error("Could not create temporary file for Hunter eDivisive algorithm", e); } } @@ -250,7 +248,7 @@ protected void cleanup() { if (tmpdir.exists()) { clearDir(tmpdir); } else { - log.debugf("Trying to cleanup temp files, but they do not exist!"); + Log.debugf("Trying to cleanup temp files, but they do not exist!"); } } @@ -262,7 +260,7 @@ private void clearDir(File dir) { file.delete(); }); if (!dir.delete()) { - log.errorf("Failed to cleanup up temporary files: %s", dir.getAbsolutePath()); + Log.errorf("Failed to cleanup up temporary files: %s", dir.getAbsolutePath()); } } } @@ -285,8 +283,8 @@ protected List executeProcess(TmpFiles tmpFiles, boolean redirectOutput, int exitCode = process.waitFor(); if (exitCode != 0) { - log.errorf("Hunter process failed with exit code: %d", exitCode); - log.errorf("See error log for details: %s", tmpFiles.logFile.getAbsolutePath()); + Log.errorf("Hunter process failed with exit code: %d", exitCode); + Log.errorf("See error log for details: %s", tmpFiles.logFile.getAbsolutePath()); return null; } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/RelativeDifferenceChangeDetectionModel.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/RelativeDifferenceChangeDetectionModel.java index 7a698da8b..f15bf66c4 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/RelativeDifferenceChangeDetectionModel.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/changedetection/RelativeDifferenceChangeDetectionModel.java @@ -7,7 +7,6 @@ import jakarta.inject.Inject; import org.apache.commons.math3.stat.descriptive.SummaryStatistics; -import org.jboss.logging.Logger; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -19,12 +18,11 @@ import io.hyperfoil.tools.horreum.api.data.changeDetection.RelativeDifferenceDetectionConfig; import io.hyperfoil.tools.horreum.entity.alerting.ChangeDAO; import io.hyperfoil.tools.horreum.entity.alerting.DataPointDAO; +import io.quarkus.logging.Log; @ApplicationScoped public class RelativeDifferenceChangeDetectionModel implements ChangeDetectionModel { - private static final Logger log = Logger.getLogger(RelativeDifferenceChangeDetectionModel.class); - @Inject ObjectMapper mapper; @@ -74,7 +72,7 @@ public void analyze(List dataPoints, JsonNode configuration, Consu int minPrevious = Math.max(window, config.minPrevious); if (dataPoints.size() < minPrevious + window) { - log.debugf("Too few (%d) previous datapoints for variable %d, skipping analysis", dataPoints.size() - window, + Log.debugf("Too few (%d) previous datapoints for variable %d, skipping analysis", dataPoints.size() - window, dataPoint.variable.id); return; } @@ -97,14 +95,14 @@ public void analyze(List dataPoints, JsonNode configuration, Consu filteredValue = windowStats.getMean(); break; default: - String errMsg = String.format("Unsupported option 'filter'='%s' for variable %d, skipping analysis.", - config.filter, dataPoint.variable.id); - log.error(errMsg); + String errMsg = "Unsupported option 'filter'='%s' for variable %d, skipping analysis" + .formatted(config.filter, dataPoint.variable.id); + Log.error(errMsg); throw new ChangeDetectionException(errMsg); } double ratio = filteredValue / previousStats.getMean(); - log.tracef("Previous mean %f, filtered value %f, ratio %f", previousStats.getMean(), filteredValue, ratio); + Log.tracef("Previous mean %f, filtered value %f, ratio %f", previousStats.getMean(), filteredValue, ratio); if (ratio < 1 - config.threshold || ratio > 1 + config.threshold) { DataPointDAO dp = null; // We cannot know which datapoint is first with the regression; as a heuristic approach @@ -122,20 +120,19 @@ public void analyze(List dataPoints, JsonNode configuration, Consu ChangeDAO change = ChangeDAO.fromDatapoint(dp); DataPointDAO prevDataPoint = dataPoints.get(window - 1); DataPointDAO lastDataPoint = dataPoints.get(0); - change.description = String.format( - "Datasets %d/%d (%s) - %d/%d (%s): %s %f, previous mean %f (stddev %f), relative change %.2f%%", - prevDataPoint.dataset.run.id, prevDataPoint.dataset.ordinal, prevDataPoint.timestamp, - lastDataPoint.dataset.run.id, lastDataPoint.dataset.ordinal, lastDataPoint.timestamp, - config.filter, filteredValue, previousStats.getMean(), previousStats.getStandardDeviation(), - 100 * (ratio - 1)); - - log.debug(change.description); + change.description = "Datasets %d/%d (%s) - %d/%d (%s): %s %f, previous mean %f (stddev %f), relative change %.2f%%" + .formatted(prevDataPoint.dataset.run.id, prevDataPoint.dataset.ordinal, prevDataPoint.timestamp, + lastDataPoint.dataset.run.id, lastDataPoint.dataset.ordinal, lastDataPoint.timestamp, + config.filter, filteredValue, previousStats.getMean(), previousStats.getStandardDeviation(), + 100 * (ratio - 1)); + + Log.debug(change.description); changeConsumer.accept(change); } } catch (JsonProcessingException e) { - String errMsg = String.format("Failed to parse configuration for variable %d", dataPoint.variable.id); - log.error(errMsg, e); + String errMsg = "Failed to parse configuration for variable %d".formatted(dataPoint.variable.id); + Log.error(errMsg, e); throw new ChangeDetectionException(errMsg, e); } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/CollectorApiDatastore.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/CollectorApiDatastore.java index 0e359721a..260119fd4 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/CollectorApiDatastore.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/CollectorApiDatastore.java @@ -16,8 +16,6 @@ import jakarta.ws.rs.BadRequestException; import jakarta.ws.rs.core.Response; -import org.jboss.logging.Logger; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -27,12 +25,11 @@ import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType; import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO; import io.hyperfoil.tools.horreum.svc.ServiceException; +import io.quarkus.logging.Log; @ApplicationScoped public class CollectorApiDatastore implements Datastore { - protected static final Logger log = Logger.getLogger(CollectorApiDatastore.class); - @Inject ObjectMapper mapper; @@ -44,7 +41,7 @@ public DatastoreResponse handleRun(JsonNode payload, throws BadRequestException { if (metaData != null) { - log.warn("Empty request: " + metaData); + Log.warnf("Empty request: %s", metaData); throw ServiceException.badRequest("Empty request: " + metaData); } metaData = payload; @@ -71,7 +68,7 @@ public DatastoreResponse handleRun(JsonNode payload, HttpRequest request = builder.build(); HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); if (response.statusCode() != Response.Status.OK.getStatusCode()) { - log.error("Collector API returned " + response.statusCode() + " body : " + response.body()); + Log.errorf("Collector API returned %d body : %s", response.statusCode(), response.body()); throw ServiceException .serverError("Collector API returned " + response.statusCode() + " body : " + response.body()); } @@ -79,10 +76,10 @@ public DatastoreResponse handleRun(JsonNode payload, payload = mapper.readTree(response.body()); return new DatastoreResponse(payload, metaData); } catch (JsonProcessingException e) { - log.error("Error while parsing responde from collector API ", e); + Log.error("Error while parsing response from collector API ", e); throw ServiceException.serverError("Error while sending request to collector API"); } catch (IOException | InterruptedException e) { - log.error("Error while sending request to collector API", e); + Log.error("Error while sending request to collector API", e); throw ServiceException.serverError("Error while sending request to collector API"); } } @@ -101,11 +98,11 @@ private static void verifyPayload(ObjectMapper mapper, CollectorApiDatastoreConf try { distinctTags = mapper.readValue(response.body(), String[].class); } catch (JsonProcessingException e) { - log.error("Error while parsing response from collector API: " + response.body(), e); + Log.errorf("Error while parsing response from collector API: %s", response.body(), e); throw ServiceException.badRequest("Error while parsing response from collector API " + response.body()); } if (distinctTags == null || distinctTags.length == 0) { - log.warn("No tags found in collector API"); + Log.warn("No tags found in collector API"); throw ServiceException.badRequest("No tags found in collector API"); } if (Arrays.stream(distinctTags).noneMatch(tag::equals)) { @@ -138,7 +135,7 @@ private static CollectorApiDatastoreConfig getCollectorApiDatastoreConfig(Datast throw new RuntimeException(e); } if (jsonDatastoreConfig == null) { - log.error("Could not find collector API datastore: " + configuration.name); + Log.errorf("Could not find collector API datastore: %s", configuration.name); throw ServiceException.serverError("Could not find CollectorAPI datastore: " + configuration.name); } assert jsonDatastoreConfig.apiKey != null : "API key must be set"; diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/ElasticsearchDatastore.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/ElasticsearchDatastore.java index c5f6f1304..31f58453a 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/ElasticsearchDatastore.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/ElasticsearchDatastore.java @@ -21,7 +21,6 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; -import org.jboss.logging.Logger; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.core.JsonProcessingException; @@ -33,12 +32,11 @@ import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType; import io.hyperfoil.tools.horreum.api.data.datastore.ElasticsearchDatastoreConfig; import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO; +import io.quarkus.logging.Log; @ApplicationScoped public class ElasticsearchDatastore implements Datastore { - protected static final Logger log = Logger.getLogger(ElasticsearchDatastore.class); - @Inject ObjectMapper mapper; @@ -54,9 +52,8 @@ public DatastoreResponse handleRun(JsonNode payload, RestClient restClient = null; try { - if (metaData != null) { - log.warn("Empty request: " + metaData.toString()); + Log.warnf("Empty request: %s", metaData); throw new BadRequestException("Empty request: " + metaData); } metaData = payload; @@ -84,7 +81,7 @@ public DatastoreResponse handleRun(JsonNode payload, restClient = builder.build(); if (restClient == null) { - log.warn("Could not find elasticsearch datastore: " + configuration.name); + Log.warnf("Could not find elasticsearch datastore: %s", configuration.name); throw new BadRequestException("Could not find elasticsearch datastore: " + configuration.name); } @@ -92,8 +89,8 @@ public DatastoreResponse handleRun(JsonNode payload, try { apiRequest = mapper.treeToValue(payload, ElasticRequest.class); } catch (JsonProcessingException e) { - String msg = String.format("Could not parse request: %s, %s", metaData.toString(), e.getMessage()); - log.warn(msg); + String msg = "Could not parse request: %s, %s".formatted(metaData, e.getMessage()); + Log.warn(msg); throw new BadRequestException(msg); } @@ -112,9 +109,8 @@ public DatastoreResponse handleRun(JsonNode payload, try { finalString = extracted(restClient, request); } catch (IOException e) { - String msg = String.format("Could not query doc request: %s, %s", metaData.toString(), - e.getMessage()); - log.warn(msg); + String msg = "Could not query doc request: %s, %s".formatted(metaData, e.getMessage()); + Log.warn(msg); throw new BadRequestException(msg); } @@ -182,19 +178,16 @@ public DatastoreResponse handleRun(JsonNode payload, docString = extracted(finalRestClient, subRequest); } catch (IOException e) { - docString.replaceAll("ERR_MSG", e.getMessage()); - String msg = String.format("Could not query doc request: index: %s; docID: %s (%s)", + Log.errorf("Could not query doc request: index: %s; docID: %s (%s)", multiIndexQuery.targetIndex, multiIndexQuery.docField, e.getMessage()); - log.error(msg); } try { result.put("$doc", mapper.readTree(docString)); } catch (JsonProcessingException e) { docString.replaceAll("ERR_MSG", e.getMessage()); - String msg = String.format("Could not parse doc result: %s, %s", docString, e.getMessage()); - log.error(msg); + Log.errorf("Could not parse doc result: %s, %s", docString, e.getMessage()); } extractedResults.add(result); @@ -218,7 +211,7 @@ public DatastoreResponse handleRun(JsonNode payload, try { restClient.close(); } catch (IOException e) { - log.errorf("Error closing rest client: %s", e.getMessage()); + Log.errorf("Error closing rest client: %s", e.getMessage()); } } } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/experiment/RelativeDifferenceExperimentModel.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/experiment/RelativeDifferenceExperimentModel.java index f2560831a..850d85b24 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/experiment/RelativeDifferenceExperimentModel.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/experiment/RelativeDifferenceExperimentModel.java @@ -47,6 +47,6 @@ public ExperimentService.ComparisonResult compare(JsonNode config, List { mailer.send(Mail.withHtml(data, subject, content)).await().atMost(sendMailTimeout); - Log.debug("Sending mail: " + content); + Log.debugf("Sending mail: %s", content); }); } @Override public void notifyMissingDataset(String testName, int testId, String ruleName, long maxStaleness, Instant lastTimestamp) { - String subject = String.format("%s Missing expected data for %s/%s", subjectPrefix, testName, ruleName); + String subject = "%s Missing expected data for %s/%s".formatted(subjectPrefix, testName, ruleName); missingDatasetNotificationEmail .data("username", username) .data("testName", testName) @@ -115,13 +115,13 @@ public void notifyMissingDataset(String testName, int testId, String ruleName, l .data("lastTimestamp", lastTimestamp == null ? null : dateFormat.format(Date.from(lastTimestamp))) .createUni().subscribe().with(content -> { mailer.send(Mail.withHtml(data, subject, content)).await().atMost(sendMailTimeout); - Log.debug("Sending mail: " + content); + Log.debugf("Sending mail %s", content); }); } @Override public void notifyMissingValues(String testName, String fingerprint, MissingValuesEvent event) { - String subject = String.format("%s Missing change detection values in test %s, dataset %d#%d", + String subject = "%s Missing change detection values in test %s, dataset %d#%d".formatted( subjectPrefix, testName, event.dataset.runId, event.dataset.ordinal); missingValuesNotificationEmail .data("username", username) @@ -134,7 +134,7 @@ public void notifyMissingValues(String testName, String fingerprint, MissingValu .data("variables", event.variables) .createUni().subscribe().with(content -> { mailer.send(Mail.withHtml(data, subject, content)).await().atMost(sendMailTimeout); - Log.debug("Sending mail: " + content); + Log.debugf("Sending mail: %s", content); }); } @@ -151,14 +151,14 @@ public void notifyExpectedRun(String testName, int testId, long before, String e .data("backlink", backlink) .createUni().subscribe().with(content -> { mailer.send(Mail.withHtml(data, subject, content)).await().atMost(sendMailTimeout); - Log.debug("Sending mail: " + content); + Log.debugf("Sending mail: %s", content); }); } @Override public void notifyApiKeyExpiration(String keyName, Instant creation, Instant lastAccess, long toExpiration, long active) { - String subject = String.format("%s API key \"%s\" %s", subjectPrefix, keyName, + String subject = "%s API key '%s' %s".formatted(subjectPrefix, keyName, toExpiration == -1 ? "EXPIRED" : "about to expire"); String content = apiKeyExpirationEmail .data("baseUrl", baseUrl) @@ -170,7 +170,7 @@ public void notifyApiKeyExpiration(String keyName, Instant creation, Instant las .data("active", active) .render(); mailer.send(Mail.withHtml(data, subject, content)).await().atMost(sendMailTimeout); - Log.debug("Sending mail: " + content); + Log.debugf("Sending mail: %s", content); } } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/ApiKeyIdentityProvider.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/ApiKeyIdentityProvider.java index c6dcf5674..6d32bef11 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/ApiKeyIdentityProvider.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/ApiKeyIdentityProvider.java @@ -39,7 +39,7 @@ SecurityIdentity identityFromKey(String key) { return UserApiKey.findOptional(key) .filter(k -> !k.revoked) .map((userKey) -> { - Log.debugv("Authentication of user {0} with key \"{1}\" {2}", userKey.user.username, userKey.name, key); + Log.debugf("Authentication of user %s with key '%s'", userKey.user.username, userKey.name); // update last access userKey.access = timeService.now(); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/ConstraintViolationExceptionMapper.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/ConstraintViolationExceptionMapper.java index 4580ea5fc..161b79425 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/ConstraintViolationExceptionMapper.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/ConstraintViolationExceptionMapper.java @@ -8,15 +8,14 @@ import jakarta.ws.rs.ext.ExceptionMapper; import jakarta.ws.rs.ext.Provider; -import org.jboss.logging.Logger; +import io.quarkus.logging.Log; @Provider public class ConstraintViolationExceptionMapper implements ExceptionMapper { - private static final Logger log = Logger.getLogger(ConstraintViolationExceptionMapper.class); @Override public Response toResponse(ConstraintViolationException exception) { - log.error("Mapping exception to response", exception); + Log.error("Mapping exception to response", exception); JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); for (ConstraintViolation cv : exception.getConstraintViolations()) { arrayBuilder.add(Json.createObjectBuilder() diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/JDBCConnectionInterceptor.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/JDBCConnectionInterceptor.java index 427edd347..f9e88bb0d 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/JDBCConnectionInterceptor.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/JDBCConnectionInterceptor.java @@ -18,7 +18,7 @@ public void onConnectionCreate(Connection connection) { try (Statement statement = connection.createStatement()) { statement.execute("SELECT set_config('horreum.userroles', '" + Roles.HORREUM_SYSTEM + "', false)"); } catch (SQLException e) { - Log.warnv(e, "Unable to set default role " + Roles.HORREUM_SYSTEM + " on the JDBC connection."); + Log.warnf(e, "Unable to set default role '%s' on the JDBC connection", Roles.HORREUM_SYSTEM); } } } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/RoleManager.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/RoleManager.java index 5c0b1ab1e..2253679c3 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/RoleManager.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/RoleManager.java @@ -36,11 +36,11 @@ public String setRoles(String roles) { } Object[] row = (Object[]) em.createNativeQuery(SET_ROLES).setParameter(1, roles).getSingleResult(); - if (Log.isDebugEnabled()) { // enabe with: `quarkus.log.category."io.hyperfoil.tools.horreum.server.RoleManager".level=DEBUG` + if (Log.isDebugEnabled()) { // enable with: `quarkus.log.category."io.hyperfoil.tools.horreum.server.RoleManager".level=DEBUG` try { - Log.debugv("Setting roles {0} (replacing {1}) on transaction {2}", roles, row[0], txManager.getTransaction()); + Log.debugf("Setting roles '%s' (replacing '%s') on transaction %s", roles, row[0], txManager.getTransaction()); } catch (SystemException e) { - Log.debugv("Setting roles {0} (replacing {1}), but obtaining current transaction failed due to {2}", roles, + Log.debugf("Setting roles '%s' (replacing '%s'), but obtaining current transaction failed due to %s", roles, row[0], e.getMessage()); } } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/SecurityBootstrap.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/SecurityBootstrap.java index c54cbba10..d1e2deb3b 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/SecurityBootstrap.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/server/SecurityBootstrap.java @@ -80,7 +80,7 @@ private boolean performRolesMigration() { @Transactional void performUserMigration(UserRepresentation kcUser, List kcRoles) { - Log.infov("Migration of user {0} {1} with username {2}", kcUser.getFirstName(), kcUser.getLastName(), + Log.infof("Migration of user '%s %s' with username '%s'", kcUser.getFirstName(), kcUser.getLastName(), kcUser.getUsername()); String previousRoles = roleManager.setRoles(kcUser.getUsername()); try { @@ -104,12 +104,12 @@ void performUserMigration(UserRepresentation kcUser, List kc } else if ("admin".equals(role)) { userInfo.roles.add(UserRole.ADMIN); } else { - Log.infov("Dropping role {0} for user {1} {2}", role, kcUser.getFirstName(), kcUser.getLastName()); + Log.infof("Dropping role '%s' for user '%s %s'}", role, kcUser.getFirstName(), kcUser.getLastName()); } } userInfo.persist(); } catch (Exception e) { - Log.warnv("Unable to perform migration for user {0} {1} due to {2}", kcUser.getFirstName(), kcUser.getLastName(), + Log.warnf("Unable to perform migration for user '%s %s' due to '%s'", kcUser.getFirstName(), kcUser.getLastName(), e.getMessage()); } finally { roleManager.setRoles(previousRoles); @@ -154,9 +154,9 @@ public void checkBootstrapAccount() { userInfo.defaultTeam = "dev-team"; userInfo.persist(); - Log.infov("\n>>>\n>>> Created temporary account {0} with password {1}\n>>>", BOOTSTRAP_ACCOUNT, user.password); + Log.infof("\n>>>\n>>> Created temporary account '%s' with password %s\n>>>", BOOTSTRAP_ACCOUNT, user.password); } else if (administrators.size() > 1 && administrators.contains(BOOTSTRAP_ACCOUNT)) { - Log.warnv("The temporary account {0} can be removed", BOOTSTRAP_ACCOUNT); + Log.warnf("The temporary account '%s' can be removed", BOOTSTRAP_ACCOUNT); } } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ActionServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ActionServiceImpl.java index 7ba8d8099..89fc222c2 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ActionServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ActionServiceImpl.java @@ -15,8 +15,6 @@ import jakarta.persistence.EntityManager; import jakarta.transaction.Transactional; -import org.jboss.logging.Logger; - import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -38,6 +36,7 @@ import io.hyperfoil.tools.horreum.mapper.AllowedSiteMapper; import io.hyperfoil.tools.horreum.server.WithRoles; import io.quarkus.hibernate.orm.panache.PanacheQuery; +import io.quarkus.logging.Log; import io.quarkus.panache.common.Page; import io.quarkus.panache.common.Sort; import io.quarkus.runtime.Startup; @@ -47,7 +46,6 @@ @ApplicationScoped @Startup public class ActionServiceImpl implements ActionService { - private static final Logger log = Logger.getLogger(ActionServiceImpl.class); @Inject Instance actionPlugins; @@ -75,14 +73,14 @@ private void executeActions(AsyncEventChannels event, int testId, Object payload } for (ActionDAO action : actions) { if (!notify && !action.runAlways) { - log.debugf("Ignoring action for event %s in test %d, type %s as this event should not notfiy", event, testId, + Log.debugf("Ignoring action for event %s in test %d, type %s as this event should not notfiy", event, testId, action.type); continue; } try { ActionPlugin plugin = plugins.get(action.type); if (plugin == null) { - log.errorf("No plugin for action type %s", action.type); + Log.errorf("No plugin for action type %s", action.type); new ActionLogDAO(PersistentLogDAO.ERROR, testId, event.name(), action.type, "No plugin for action type " + action.type).persist(); continue; @@ -91,7 +89,7 @@ private void executeActions(AsyncEventChannels event, int testId, Object payload .with(item -> { }, throwable -> logActionError(testId, event.name(), action.type, throwable)); } catch (Exception e) { - log.errorf(e, "Failed to invoke action %d", action.id); + Log.errorf(e, "Failed to invoke action %d", action.id); new ActionLogDAO(PersistentLogDAO.ERROR, testId, event.name(), action.type, "Failed to invoke: " + e.getMessage()).persist(); new ActionLogDAO(PersistentLogDAO.DEBUG, testId, event.name(), action.type, @@ -105,15 +103,15 @@ private void executeActions(AsyncEventChannels event, int testId, Object payload } void logActionError(int testId, String event, String type, Throwable throwable) { - log.errorf("Error executing action '%s' for event %s on test %d: %s: %s", + Log.errorf("Error executing action '%s' for event %s on test %d: %s: %s", type, event, testId, throwable.getClass().getName(), throwable.getMessage()); Util.executeBlocking(vertx, CachedSecurityIdentity.ANONYMOUS, Uni.createFrom().item(() -> { doLogActionError(testId, event, type, throwable); return null; })).subscribe().with(item -> { }, t -> { - log.error("Cannot log error in action!", t); - log.error("Logged error: ", throwable); + Log.error("Cannot log error in action!", t); + Log.error("Logged error: ", throwable); }); } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/AlertingServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/AlertingServiceImpl.java index bc1895a8f..72f1e0f51 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/AlertingServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/AlertingServiceImpl.java @@ -43,7 +43,6 @@ import org.hibernate.Session; import org.hibernate.query.NativeQuery; import org.hibernate.type.StandardBasicTypes; -import org.jboss.logging.Logger; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -73,6 +72,7 @@ import io.hyperfoil.tools.horreum.mapper.*; import io.hyperfoil.tools.horreum.server.WithRoles; import io.quarkus.hibernate.orm.panache.PanacheEntityBase; +import io.quarkus.logging.Log; import io.quarkus.panache.common.Parameters; import io.quarkus.runtime.Startup; import io.quarkus.scheduler.Scheduled; @@ -82,7 +82,6 @@ @ApplicationScoped @Startup public class AlertingServiceImpl implements AlertingService { - private static final Logger log = Logger.getLogger(AlertingServiceImpl.class); //@formatter:off private static final String LOOKUP_TIMESTAMP = @@ -246,7 +245,7 @@ public void onLabelsUpdated(Dataset.LabelsUpdatedEvent event) { retryCounterSet.putIfAbsent(event.datasetId, new AtomicInteger(0)); int retryCounter = retryCounterSet.get(event.datasetId).getAndIncrement(); if (retryCounter < labelCalcRetries) { - log.infof("Retrying labels update for dataset %d, attempt %d/%d", event.datasetId, retryCounter, + Log.infof("Retrying labels update for dataset %d, attempt %d/%d", event.datasetId, retryCounter, this.labelCalcRetries); vertx.setTimer(1000, timerId -> messageBus.executeForTest(event.datasetId, () -> Util.withTx(tm, () -> { onLabelsUpdated(event); @@ -255,7 +254,7 @@ public void onLabelsUpdated(Dataset.LabelsUpdatedEvent event) { return; } else { //we have retried `horreum.alerting.updateLabel.retries` number of times, log a warning and stop retrying - log.warnf("Unsuccessfully retried updating labels %d times for dataset %d. Stopping", this.labelCalcRetries, + Log.warnf("Unsuccessfully retried updating labels %d times for dataset %d. Stopping", this.labelCalcRetries, event.datasetId); retryCounterSet.remove(event.datasetId); return; @@ -315,10 +314,10 @@ private void createMissingDataRuleResult(DatasetDAO dataset, int ruleId) { private void recalculateDatapointsForDataset(DatasetDAO dataset, boolean notify, boolean debug, Recalculation recalculation) { - log.debugf("Analyzing dataset %d (%d/%d)", (long) dataset.id, (long) dataset.run.id, dataset.ordinal); + Log.debugf("Analyzing dataset %d (%d/%d)", (long) dataset.id, (long) dataset.run.id, dataset.ordinal); TestDAO test = TestDAO.findById(dataset.testid); if (test == null) { - log.errorf("Cannot load test ID %d", dataset.testid); + Log.errorf("Cannot load test ID %d", dataset.testid); return; } if (!testFingerprint(dataset, test.fingerprintFilter)) { @@ -558,7 +557,7 @@ void createDataPoint(DatasetDAO dataset, Instant timestamp, int variableId, doub Util.registerTxSynchronization(tm, txStatus -> mediator.publishEvent(AsyncEventChannels.DATAPOINT_NEW, dataset.testid, event)); } else { - log.debugf("DataPoint for dataset %d, variable %d, timestamp %s, value %f not found", dataset.id, variableId, + Log.debugf("DataPoint for dataset %d, variable %d, timestamp %s, value %f not found", dataset.id, variableId, timestamp, value); } } @@ -568,8 +567,8 @@ private void logCalculationMessage(DatasetDAO dataSet, int level, String format, } private void logCalculationMessage(int testId, int datasetId, int level, String format, Object... args) { - String msg = args.length == 0 ? format : String.format(format, args); - log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), testId, datasetId, msg); + String msg = args.length == 0 ? format : format.formatted(args); + Log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), testId, datasetId, msg); new DatasetLogDAO(em.getReference(TestDAO.class, testId), em.getReference(DatasetDAO.class, datasetId), level, "variables", msg).persist(); } @@ -579,15 +578,15 @@ private void logMissingDataMessage(DatasetDAO dataSet, int level, String format, } private void logMissingDataMessage(int testId, int datasetId, int level, String format, Object... args) { - String msg = args.length == 0 ? format : String.format(format, args); - log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), testId, datasetId, msg); + String msg = args.length == 0 ? format : format.formatted(args); + Log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), testId, datasetId, msg); new DatasetLogDAO(em.getReference(TestDAO.class, testId), em.getReference(DatasetDAO.class, datasetId), level, "missingdata", msg).persist(); } private void logChangeDetectionMessage(int testId, int datasetId, int level, String format, Object... args) { - String msg = args.length == 0 ? format : String.format(format, args); - log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), testId, datasetId, msg); + String msg = args.length == 0 ? format : format.formatted(args); + Log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), testId, datasetId, msg); new DatasetLogDAO(em.getReference(TestDAO.class, testId), em.getReference(DatasetDAO.class, datasetId), level, "changes", msg).persist(); } @@ -599,14 +598,14 @@ void onNewDataPoint(DataPoint.Event event, boolean lastDatapoint) { if (dataPoint.variable != null && dataPoint.variable.id != null) { VariableDAO variable = VariableDAO.findById(dataPoint.variable.id); if (variable != null) { - log.debugf("Processing new datapoint for dataset %d at %s, variable %d (%s), value %f", + Log.debugf("Processing new datapoint for dataset %d at %s, variable %d (%s), value %f", dataPoint.datasetId, dataPoint.timestamp, variable.id, variable.name, dataPoint.value); JsonNode fingerprint = FingerprintDAO. findByIdOptional(dataPoint.datasetId) .map(fp -> fp.fingerprint).orElse(null); VarAndFingerprint key = new VarAndFingerprint(variable.id, fingerprint); - log.debugf("Invalidating variable %d FP %s timestamp %s, current value is %s", variable.id, fingerprint, + Log.debugf("Invalidating variable %d FP %s timestamp %s, current value is %s", variable.id, fingerprint, dataPoint.timestamp, validUpTo.get(key)); validUpTo.compute(key, (ignored, current) -> { if (current == null || !dataPoint.timestamp.isAfter(current.timestamp)) { @@ -617,11 +616,11 @@ void onNewDataPoint(DataPoint.Event event, boolean lastDatapoint) { }); runChangeDetection(VariableDAO.findById(variable.id), fingerprint, event.notify, true, lastDatapoint); } else { - log.warnf("Could not process new datapoint for dataset %d at %s, could not find variable by id %d ", + Log.warnf("Could not process new datapoint for dataset %d at %s, could not find variable by id %d ", dataPoint.datasetId, dataPoint.timestamp, dataPoint.variable == null ? -1 : dataPoint.variable.id); } } else { - log.warnf("Could not process new datapoint for dataset %d when the supplied variable or id reference is null ", + Log.warnf("Could not process new datapoint for dataset %d when the supplied variable or id reference is null ", dataPoint.datasetId); } } @@ -645,7 +644,7 @@ private void runChangeDetection(VariableDAO variable, JsonNode fingerprint, bool .setParameter(4, fingerprint, JsonBinaryType.INSTANCE) .getResultStream().filter(Objects::nonNull).findFirst().orElse(null); if (nextTimestamp == null) { - log.debugf("No further datapoints for change detection"); + Log.debugf("No further datapoints for change detection"); return; } @@ -660,7 +659,7 @@ private void runChangeDetection(VariableDAO variable, JsonNode fingerprint, bool .setParameter(3, !valid.inclusive) .setParameter(4, fingerprint, JsonBinaryType.INSTANCE) .executeUpdate(); - log.debugf("Deleted %d changes %s %s for variable %d, fingerprint %s", numDeleted, valid.inclusive ? ">" : ">=", + Log.debugf("Deleted %d changes %s %s for variable %d, fingerprint %s", numDeleted, valid.inclusive ? ">" : ">=", valid.timestamp, variable.id, fingerprint); } @@ -678,7 +677,7 @@ private void runChangeDetection(VariableDAO variable, JsonNode fingerprint, bool Instant changeTimestamp = LONG_TIME_AGO; if (lastChange != null) { - log.debugf("Filtering DP between %s (change %d) and %s", lastChange.timestamp, lastChange.id, nextTimestamp); + Log.debugf("Filtering DP between %s (change %d) and %s", lastChange.timestamp, lastChange.id, nextTimestamp); changeTimestamp = lastChange.timestamp; } @@ -697,7 +696,7 @@ private void runChangeDetection(VariableDAO variable, JsonNode fingerprint, bool // Last datapoint is already in the list if (dataPoints.isEmpty()) { if (expectExists) { - log.warn("The published datapoint should be already in the list"); + Log.warn("The published datapoint should be already in the list"); } } else { int datasetId = dataPoints.get(0).getDatasetId(); @@ -741,7 +740,7 @@ private void runChangeDetection(VariableDAO variable, JsonNode fingerprint, bool } catch (ChangeDetectionException e) { new ChangeDetectionLogDAO(variable, fingerprint, PersistentLogDAO.ERROR, e.getLocalizedMessage()) .persist(); - log.error("An error occurred while running change detection!", e); + Log.error("An error occurred while running change detection!", e); } } } @@ -755,7 +754,7 @@ private void runChangeDetection(VariableDAO variable, JsonNode fingerprint, bool private void validateUpTo(VariableDAO variable, JsonNode fingerprint, Instant timestamp) { validUpTo.compute(new VarAndFingerprint(variable.id, fingerprint), (ignored, current) -> { - log.debugf("Attempt %s, valid up to %s, ", timestamp, current); + Log.debugf("Attempt %s, valid up to %s", timestamp, current); if (current == null || !current.timestamp.isAfter(timestamp)) { return new UpTo(timestamp, true); } else { @@ -843,10 +842,10 @@ public void updateVariables(int testId, List variablesDTO) { em.flush(); } catch (PersistenceException e) { - log.error("Failed to update variables", e); + Log.error("Failed to update variables", e); throw new WebApplicationException(e, Response.serverError().build()); } - log.debug("Variables updated, everything is fine, returning"); + Log.debug("Variables updated, everything is fine, returning"); } private void ensureDefaults(Set rds) { @@ -979,7 +978,7 @@ public void updateChange(int id, Change apiChange) { jpaChange.confirmed = apiChange.confirmed; em.merge(jpaChange); } else { - throw new WebApplicationException(String.format("Could not find change with ID: %s", id)); + throw new WebApplicationException("Could not find change with ID: " + id); } } catch (PersistenceException e) { @@ -1019,7 +1018,7 @@ void startRecalculation(int testId, boolean notify, boolean debug, boolean clear Recalculation previous = recalcProgress.putIfAbsent(testId, recalculation); while (previous != null) { if (!previous.done) { - log.debugf("Already started recalculation on test %d, ignoring.", testId); + Log.debugf("Already started recalculation on test %d, ignoring.", testId); return; } if (recalcProgress.replace(testId, previous, recalculation)) { @@ -1030,15 +1029,15 @@ void startRecalculation(int testId, boolean notify, boolean debug, boolean clear recalculation.clearDatapoints = clearDatapoints; try { - log.debugf("Updating fingerprints for test %d", testId); + Log.debugf("Updating fingerprints for test %d", testId); //update fingerprints before starting recalculation //TODO: check if we need to update fingerprints for all tests mediator.updateFingerprints(testId); - log.debugf("About to recalculate datapoints in test %d between %s and %s", testId, from, to); + Log.debugf("About to recalculate datapoints in test %d between %s and %s", testId, from, to); //TODO:: determine if we should clear datapoints recalculation.datasets = getDatasetsForRecalculation(testId, from, to, clearDatapoints); int numRuns = recalculation.datasets.size(); - log.debugf("Starting recalculation of test %d, %d runs", testId, numRuns); + Log.debugf("Starting recalculation of test %d, %d runs", testId, numRuns); int completed = 0; recalcProgress.put(testId, recalculation); //TODO:: this could be more streamlined @@ -1058,7 +1057,7 @@ void startRecalculation(int testId, boolean notify, boolean debug, boolean clear } } catch (Throwable t) { - log.error("Recalculation failed", t); + Log.error("Recalculation failed", t); throw t; } finally { recalculation.done = true; @@ -1103,7 +1102,7 @@ void recalculateForDataset(Integer datasetId, boolean notify, boolean debug, Rec if (dataset != null) { recalculateDatapointsForDataset(dataset, notify, debug, recalculation); } else { - log.debugf("Could not find dataset with id: %d", datasetId); + Log.debugf("Could not find dataset with id: %d", datasetId); } } @@ -1142,7 +1141,7 @@ public void checkMissingDataset() { int numUpdated = em.createNativeQuery("UPDATE missingdata_rule SET last_notification = ?1 WHERE id = ?2") .setParameter(1, timeService.now()).setParameter(2, ruleId).executeUpdate(); if (numUpdated != 1) { - log.errorf("Missing data rules update for rule %d (test %d) didn't work: updated: %d", ruleId, testId, + Log.errorf("Missing data rules update for rule %d (test %d) didn't work: updated: %d", ruleId, testId, numUpdated); } } @@ -1350,14 +1349,14 @@ public void removeExpected(Run run) { query.setParameter(1, run.id); int updated = query.executeUpdate(); if (updated > 0) { - log.debugf("Removed %d run expectations as run %d was added.", updated, run.id); + Log.debugf("Removed %d run expectations as run %d was added", updated, run.id); } } @WithRoles(extras = Roles.HORREUM_SYSTEM) @Transactional void onDatasetDeleted(int datasetId) { - log.debugf("Removing changes for dataset %d", datasetId); + Log.debugf("Removing changes for dataset %d", datasetId); ChangeDAO.delete("dataset.id = ?1 AND confirmed = false", datasetId); DataPointDAO.delete("dataset.id", datasetId); //Need to make sure we delete MissingDataRuleResults when datasets are removed @@ -1369,7 +1368,7 @@ void onDatasetDeleted(int datasetId) { void onTestDeleted(int testId) { // We need to delete in a loop to cascade this to ChangeDetection List variables = VariableDAO.list("testId", testId); - log.debugf("Deleting %d variables for test (%d)", variables.size(), testId); + Log.debugf("Deleting %d variables for test (%d)", variables.size(), testId); for (var variable : variables) { variable.delete(); } @@ -1390,7 +1389,7 @@ public void checkExpectedRuns() { Util.doAfterCommit(tm, () -> notificationService.notifyExpectedRun(expectation.testId, expectation.expectedBefore.toEpochMilli(), expectation.expectedBy, expectation.backlink)); } else { - log.debugf("Skipping expected run notification on test %d since it is disabled.", expectation.testId); + Log.debugf("Skipping expected run notification on test %d since it is disabled", expectation.testId); } expectation.delete(); } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/BannerServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/BannerServiceImpl.java index 729cc4ef8..254dfac83 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/BannerServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/BannerServiceImpl.java @@ -13,18 +13,17 @@ import jakarta.transaction.Transactional; import org.eclipse.microprofile.config.inject.ConfigProperty; -import org.jboss.logging.Logger; import io.hyperfoil.tools.horreum.api.data.Banner; import io.hyperfoil.tools.horreum.api.internal.services.BannerService; import io.hyperfoil.tools.horreum.entity.BannerDAO; import io.hyperfoil.tools.horreum.mapper.BannerMapper; import io.hyperfoil.tools.horreum.server.WithRoles; +import io.quarkus.logging.Log; import io.quarkus.qute.Location; import io.quarkus.qute.Template; public class BannerServiceImpl implements BannerService { - private static final Logger log = Logger.getLogger(BannerServiceImpl.class); @ConfigProperty(name = "horreum.downtime.response") Optional downtimeResponse; @@ -64,7 +63,7 @@ public void set(Banner dto) { try { Files.writeString(Path.of(downtimeResponse.get()), response); } catch (IOException e) { - log.error("Failed to write response file " + downtimeResponse.get(), e); + Log.errorf(e, "Failed to write response file %s", downtimeResponse.get()); } } } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ConfigServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ConfigServiceImpl.java index edbfbd5d1..ca011b1da 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ConfigServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ConfigServiceImpl.java @@ -15,7 +15,6 @@ import org.eclipse.microprofile.config.ConfigProvider; import org.eclipse.microprofile.config.inject.ConfigProperty; -import org.jboss.logging.Logger; import io.hyperfoil.tools.horreum.api.Version; import io.hyperfoil.tools.horreum.api.data.Access; @@ -25,13 +24,12 @@ import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO; import io.hyperfoil.tools.horreum.mapper.DatasourceMapper; import io.hyperfoil.tools.horreum.server.WithRoles; +import io.quarkus.logging.Log; import io.quarkus.security.identity.SecurityIdentity; @ApplicationScoped public class ConfigServiceImpl implements ConfigService { - private static final Logger log = Logger.getLogger(ConfigServiceImpl.class); - @ConfigProperty(name = "horreum.privacy") Optional privacyStatement; @@ -92,7 +90,7 @@ public Integer newDatastore(Datastore datastore) { List uploaders = identity.getRoles().stream().filter(role -> role.endsWith("-uploader")) .collect(Collectors.toList()); if (uploaders.size() != 1) { - log.debugf("Failed to create datastore %s: no owner, available uploaders: %s", dao.name, uploaders); + Log.debugf("Failed to create datastore '%s': no owner, available uploaders: %s", dao.name, uploaders); throw ServiceException.badRequest( "Missing owner and cannot select single default owners; this user has these uploader roles: " + uploaders); @@ -100,7 +98,7 @@ public Integer newDatastore(Datastore datastore) { String uploader = uploaders.get(0); dao.owner = uploader.substring(0, uploader.length() - 9) + "-team"; } else if (!identity.getRoles().contains(dao.owner)) { - log.debugf("Failed to create datastore %s: requested owner %s, available roles: %s", dao.name, dao.owner, + Log.debugf("Failed to create datastore '%s': requested owner %s, available roles: %s", dao.name, dao.owner, identity.getRoles()); throw ServiceException.badRequest("This user does not have permissions to upload datastore for owner=" + dao.owner); } @@ -126,16 +124,16 @@ public Integer newDatastore(Datastore datastore) { throw ServiceException.badRequest(error); } - log.debugf("Creating new Datastore with owner=%s and access=%s", dao.owner, dao.access); + Log.debugf("Creating new Datastore with owner=%s and access=%s", dao.owner, dao.access); try { em.persist(dao); em.flush(); } catch (Exception e) { - log.error("Failed to persist run.", e); + Log.error("Failed to persist run", e); throw ServiceException.serverError("Failed to persist backend configuration"); } - log.debugf("Upload flushed, backendConfig ID %d", dao.id); + Log.debugf("Upload flushed, backendConfig ID %d", dao.id); return dao.id; } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/DatasetServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/DatasetServiceImpl.java index cfed11c91..da962a5b2 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/DatasetServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/DatasetServiceImpl.java @@ -1,7 +1,9 @@ package io.hyperfoil.tools.horreum.svc; import java.time.Instant; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -21,7 +23,6 @@ import org.hibernate.Session; import org.hibernate.query.NativeQuery; import org.hibernate.type.StandardBasicTypes; -import org.jboss.logging.Logger; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; @@ -42,13 +43,13 @@ import io.hyperfoil.tools.horreum.hibernate.JsonBinaryType; import io.hyperfoil.tools.horreum.mapper.DatasetMapper; import io.hyperfoil.tools.horreum.server.WithRoles; +import io.quarkus.logging.Log; import io.quarkus.runtime.Startup; import io.quarkus.security.identity.SecurityIdentity; @ApplicationScoped @Startup public class DatasetServiceImpl implements DatasetService { - private static final Logger log = Logger.getLogger(DatasetServiceImpl.class); //@formatter:off private static final String LABEL_QUERY = """ @@ -262,7 +263,6 @@ private NativeQuery initTypes(String sql) { } catch (JsonProcessingException e) { throw new RuntimeException(e); } - ; } return summary; }); @@ -342,7 +342,7 @@ public LabelPreview previewLabel(int datasetId, Label label) { try { extractors = Util.OBJECT_MAPPER.writeValueAsString(label.extractors); } catch (JsonProcessingException e) { - log.error("Cannot serialize label extractors", e); + Log.error("Cannot serialize label extractors", e); throw ServiceException.badRequest("Cannot serialize label extractors"); } JsonNode extracted; @@ -394,10 +394,9 @@ public Dataset getDataset(int datasetId) { if (dataset != null) { Hibernate.initialize(dataset.data); } else { - log.warnf("Could not retrieve dataset: " + datasetId); + Log.warnf("Could not retrieve dataset: %d", datasetId); throw ServiceException.notFound("Could not find Dataset: " + datasetId + ". If you have recently started a re-tranformation, please wait until datasets are available"); - } return DatasetMapper.from(dataset); } @@ -405,7 +404,7 @@ public Dataset getDataset(int datasetId) { @WithRoles(extras = Roles.HORREUM_SYSTEM) @Transactional void calculateLabelValues(int testId, int datasetId, int queryLabelId, boolean isRecalculation) { - log.debugf("Calculating label values for dataset %d, label %d", datasetId, queryLabelId); + Log.debugf("Calculating label values for dataset %d, label %d", datasetId, queryLabelId); List extracted; try { // Note: we are fetching even labels that are marked as private/could be otherwise inaccessible @@ -519,7 +518,7 @@ private void createFingerprint(int datasetId, int testId) { json = em.createQuery("SELECT t.fingerprintLabels from test t WHERE t.id = ?1", JsonNode.class) .setParameter(1, testId).getSingleResult(); } catch (NoResultException noResultException) { - log.infof("Could not find fingerprint for dataset: %d", datasetId); + Log.infof("Could not find fingerprint for dataset: %d", datasetId); } if (json == null) return; @@ -564,10 +563,10 @@ void logMessageInNewTx(int datasetId, int level, String message, Object... param } private void logMessage(int datasetId, int level, String message, Object... params) { - String msg = String.format(message, params); + String msg = params.length == 0 ? message : message.formatted(params); DatasetDAO dataset = DatasetDAO.findById(datasetId); if (dataset != null) { - log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), dataset.testid, datasetId, + Log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), dataset.testid, datasetId, msg); new DatasetLogDAO(em.getReference(TestDAO.class, dataset.testid), em.getReference(DatasetDAO.class, datasetId), level, "labels", msg).persist(); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ExperimentServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ExperimentServiceImpl.java index 05b06f9ed..391957aee 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ExperimentServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ExperimentServiceImpl.java @@ -22,7 +22,6 @@ import org.hibernate.Session; import org.hibernate.query.NativeQuery; import org.hibernate.type.StandardBasicTypes; -import org.jboss.logging.Logger; import com.fasterxml.jackson.databind.JsonNode; @@ -45,13 +44,13 @@ import io.hyperfoil.tools.horreum.mapper.DatasetMapper; import io.hyperfoil.tools.horreum.mapper.ExperimentProfileMapper; import io.hyperfoil.tools.horreum.server.WithRoles; +import io.quarkus.logging.Log; import io.quarkus.panache.common.Sort; import io.quarkus.runtime.Startup; @ApplicationScoped @Startup public class ExperimentServiceImpl implements ExperimentService { - private static final Logger log = Logger.getLogger(ExperimentServiceImpl.class); private static final Map MODELS = Map.of( RelativeDifferenceExperimentModel.NAME, new RelativeDifferenceExperimentModel()); @@ -149,8 +148,8 @@ public void onTestDeleted(int testId) { } private void addLog(List logs, int testId, int datasetId, int level, String format, Object... args) { - String msg = args.length == 0 ? format : String.format(format, args); - log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), testId, datasetId, msg); + String msg = args.length == 0 ? format : format.formatted(args); + Log.tracef("Logging %s for test %d, dataset %d: %s", PersistentLogDAO.logLevel(level), testId, datasetId, msg); logs.add(new DatasetLogDAO(em.getReference(TestDAO.class, testId), em.getReference(DatasetDAO.class, datasetId), level, "experiment", msg)); } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/LogServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/LogServiceImpl.java index 9af81e33a..0804a4765 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/LogServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/LogServiceImpl.java @@ -11,7 +11,6 @@ import jakarta.transaction.Transactional; import org.eclipse.microprofile.config.inject.ConfigProperty; -import org.jboss.logging.Logger; import io.hyperfoil.tools.horreum.api.alerting.DatasetLog; import io.hyperfoil.tools.horreum.api.alerting.TransformationLog; @@ -25,6 +24,7 @@ import io.hyperfoil.tools.horreum.mapper.TransformationLogMapper; import io.hyperfoil.tools.horreum.server.WithRoles; import io.quarkus.hibernate.orm.panache.PanacheQuery; +import io.quarkus.logging.Log; import io.quarkus.panache.common.Page; import io.quarkus.panache.common.Sort; import io.quarkus.runtime.Startup; @@ -33,7 +33,7 @@ @ApplicationScoped @Startup public class LogServiceImpl implements LogService { - private static final Logger log = Logger.getLogger(LogServiceImpl.class); + private static final Instant EPOCH_START = Instant.ofEpochMilli(0); private static final Instant FAR_FUTURE = Instant.ofEpochSecond(4 * (long) Integer.MAX_VALUE); @@ -93,7 +93,7 @@ public void deleteDatasetLogs(String source, int testId, Integer datasetId, Long "test.id = ?1 AND source = ?2 AND timestamp >= ?3 AND timestamp < ?4 AND dataset.id = ?5", testId, source, fromTs, toTs, datasetId); } - log.debugf("Deleted %d logs for test %s", deleted, testId); + Log.debugf("Deleted %d logs for test %s", deleted, testId); } @RolesAllowed(Roles.TESTER) @@ -141,7 +141,7 @@ public void deleteTransformationLogs(int testId, Integer runId, Long from, Long deleted = TransformationLogDAO.delete("test.id = ?1 AND run.id = ?2 AND timestamp >= ?3 AND timestamp < ?4", testId, runId, fromTs, toTs); } - log.debugf("Deleted %d logs for test %d, run %d", deleted, testId, runId == null ? -1 : 0); + Log.debugf("Deleted %d logs for test %d, run %d", deleted, testId, runId == null ? -1 : 0); } @Override @@ -170,7 +170,7 @@ public void deleteActionLogs(int testId, Long from, Long to) { Instant fromTs = from == null ? EPOCH_START : Instant.ofEpochMilli(from); Instant toTs = to == null ? FAR_FUTURE : Instant.ofEpochMilli(to); long deleted = ActionLogDAO.delete("test.id = ?1 AND timestamp >= ?2 AND timestamp < ?3", testId, fromTs, toTs); - log.debugf("Deleted %d logs for test %d", deleted, testId); + Log.debugf("Deleted %d logs for test %d", deleted, testId); } @WithRoles(extras = Roles.HORREUM_SYSTEM) @@ -186,6 +186,6 @@ public void onTestDelete(int testId) { void checkExpiredTransformationLogs() { Duration maxLifespan = Duration.parse(transformationLogMaxLifespan); long logsDeleted = TransformationLogDAO.delete("timestamp < ?1", timeService.now().minus(maxLifespan)); - log.debugf("Deleted %d expired transformation log messages", logsDeleted); + Log.debugf("Deleted %d expired transformation log messages", logsDeleted); } } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/NotificationServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/NotificationServiceImpl.java index a091a5e1a..98c8d8504 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/NotificationServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/NotificationServiceImpl.java @@ -21,7 +21,6 @@ import jakarta.transaction.Transactional; import org.hibernate.Session; -import org.jboss.logging.Logger; import io.hyperfoil.tools.horreum.api.alerting.NotificationSettings; import io.hyperfoil.tools.horreum.api.internal.services.NotificationService; @@ -34,12 +33,13 @@ import io.hyperfoil.tools.horreum.notification.Notification; import io.hyperfoil.tools.horreum.notification.NotificationPlugin; import io.hyperfoil.tools.horreum.server.WithRoles; +import io.quarkus.logging.Log; import io.quarkus.runtime.Startup; @ApplicationScoped @Startup public class NotificationServiceImpl implements NotificationService { - private static final Logger log = Logger.getLogger(NotificationServiceImpl.class); + //@formatter:off private static final String GET_NOTIFICATIONS = """ WITH ens AS ( @@ -81,10 +81,10 @@ public void init() { @Transactional public void onNewChanges(DatasetChanges event) { if (!event.isNotify()) { - log.debug("Notification skipped"); + Log.debug("Notification skipped"); return; } - log.debugf("Received new changes in test %d (%s), dataset %d/%d (fingerprint: %s)", + Log.debugf("Received new changes in test %d (%s), dataset %d/%d (fingerprint: %s)", event.dataset.testId, event.testName, event.dataset.runId, event.dataset.ordinal, event.fingerprint); notifyAll(event.dataset.testId, n -> n.notifyChanges(event)); } @@ -93,14 +93,14 @@ public void onNewChanges(DatasetChanges event) { @Transactional public void onMissingValues(MissingValuesEvent event) { if (!event.notify) { - log.debugf("Skipping notification for missing run values on test %d, run %d", event.dataset.testId, + Log.debugf("Skipping notification for missing run values on test %d, run %d", event.dataset.testId, event.dataset.id); return; } // TODO: breaks storage/alerting separation! TestDAO test = TestDAO.findById(event.dataset.testId); String testName = test == null ? "unknown" : test.name; - log.debugf("Received missing values event in test %d (%s), run %d, variables %s", event.dataset.testId, testName, + Log.debugf("Received missing values event in test %d (%s), run %d, variables %s", event.dataset.testId, testName, event.dataset.id, event.variables); String fingerprint = em.getReference(DatasetDAO.class, event.dataset.id).getFingerprint(); @@ -111,18 +111,18 @@ private void notifyAll(int testId, Consumer consumer) { List results = em.unwrap(Session.class).createNativeQuery(GET_NOTIFICATIONS, Object[].class) .setParameter(1, testId).getResultList(); if (results.isEmpty()) { - log.infof("There are no subscribers for notification on test %d!", testId); + Log.infof("There are no subscribers for notification on test %d!", testId); } for (Object[] pair : results) { if (pair.length != 3) { - log.errorf("Unexpected result %s", Arrays.toString(pair)); + Log.errorf("Unexpected result %s", Arrays.toString(pair)); } String method = String.valueOf(pair[0]); String data = String.valueOf(pair[1]); String userName = String.valueOf(pair[2]); NotificationPlugin plugin = plugins.get(method); if (plugin == null) { - log.errorf("Cannot notify %s; no plugin for method %s with data %s", userName, method, data); + Log.errorf("Cannot notify %s; no plugin for method %s with data %s", userName, method, data); } else { consumer.accept(plugin.create(userName, data)); } @@ -154,7 +154,7 @@ public void updateSettings(String name, boolean team, NotificationSettings[] set try { tm.setRollbackOnly(); } catch (SystemException e) { - log.error("Cannot rollback", e); + Log.error("Cannot rollback", e); } throw ServiceException.badRequest("Invalid method " + ns.method); } @@ -197,7 +197,7 @@ public void notifyApiKeyExpiration(UserApiKey key, long toExpiration) { NotificationSettingsDAO. stream("name", key.user.username).forEach(notification -> { NotificationPlugin plugin = plugins.get(notification.method); if (plugin == null) { - log.errorf("Cannot notify %s of API key \"%s\" expiration: no plugin for method %s", + Log.errorf("Cannot notify %s of API key '%s' expiration: no plugin for method %s", notification.name, key.name, notification.method); } else { plugin.create(notification.name, notification.data) diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ReportServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ReportServiceImpl.java index 752313a6e..7ab199773 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ReportServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ReportServiceImpl.java @@ -26,7 +26,6 @@ import org.hibernate.Session; import org.hibernate.query.NativeQuery; import org.hibernate.type.StandardBasicTypes; -import org.jboss.logging.Logger; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -44,13 +43,13 @@ import io.hyperfoil.tools.horreum.mapper.ReportCommentMapper; import io.hyperfoil.tools.horreum.mapper.TableReportMapper; import io.hyperfoil.tools.horreum.server.WithRoles; +import io.quarkus.logging.Log; import io.quarkus.runtime.Startup; import io.quarkus.security.identity.SecurityIdentity; @ApplicationScoped @Startup public class ReportServiceImpl implements ReportService { - private static final Logger log = Logger.getLogger(ReportServiceImpl.class); static { System.setProperty("polyglot.engine.WarnInterpreterOnly", "false"); @@ -315,16 +314,16 @@ private TableReportDAO createTableReport(TableReportConfigDAO config, Integer re NativeQuery timestampQuery; if (!nullOrEmpty(config.filterLabels)) { List datasetIds = filterDatasetIds(config, report); - log.debugf("Table report %s(%d) includes datasets %s", config.title, config.id, datasetIds); + Log.debugf("Table report %s(%d) includes datasets %s", config.title, config.id, datasetIds); series = selectByDatasets(config.seriesLabels, datasetIds); - log.debugf("Series: %s", rowsToMap(series)); + Log.debugf("Series: %s", rowsToMap(series)); if (!nullOrEmpty(config.scaleLabels)) { scales = selectByDatasets(config.scaleLabels, datasetIds); - log.debugf("Scales: %s", rowsToMap(scales)); + Log.debugf("Scales: %s", rowsToMap(scales)); } if (!nullOrEmpty(config.categoryLabels)) { categories = selectByDatasets(config.categoryLabels, datasetIds); - log.debugf("Categories: %s", rowsToMap(categories)); + Log.debugf("Categories: %s", rowsToMap(categories)); } timestampQuery = em.unwrap(Session.class) .createNativeQuery("SELECT id, start FROM dataset WHERE id IN :datasets", Object[].class) @@ -333,14 +332,14 @@ private TableReportDAO createTableReport(TableReportConfigDAO config, Integer re log(report, PersistentLogDAO.DEBUG, "Table report %s(%d) includes all datasets for test %s(%d)", config.title, config.id, config.test.name, config.test.id); series = selectByTest(config.test.id, config.seriesLabels); - log.debugf("Series: %s", rowsToMap(series)); + Log.debugf("Series: %s", rowsToMap(series)); if (!nullOrEmpty(config.scaleLabels)) { scales = selectByTest(config.test.id, config.scaleLabels); - log.debugf("Scales: %s", rowsToMap(scales)); + Log.debugf("Scales: %s", rowsToMap(scales)); } if (!nullOrEmpty(config.categoryLabels)) { categories = selectByTest(config.test.id, config.categoryLabels); - log.debugf("Categories: %s", rowsToMap(categories)); + Log.debugf("Categories: %s", rowsToMap(categories)); } timestampQuery = em.unwrap(Session.class) .createNativeQuery("SELECT id, start FROM dataset WHERE testid = ?", Object[].class) @@ -363,7 +362,7 @@ private TableReportDAO createTableReport(TableReportConfigDAO config, Integer re } Map datasetData = series.isEmpty() ? Collections.emptyMap() : getData(config, report, categories, series, scales); - log.debugf("Data per dataset: %s", datasetData); + Log.debugf("Data per dataset: %s", datasetData); Map timestamps = timestampQuery.getResultStream() .collect(Collectors.toMap(row -> (Integer) row[0], row -> (Instant) row[1])); @@ -407,7 +406,7 @@ private TableReportDAO createTableReport(TableReportConfigDAO config, Integer re log(report, PersistentLogDAO.ERROR, "Failed to run report %s(%d) label function on run %d. Offending code:
%s
", config.title, config.id, datasetId, jsCode); - log.debug("Caused by exception", e); + Log.debug("Caused by exception", e); } } } @@ -455,7 +454,7 @@ private Map getData(TableReportConfigDAO config, T log(report, PersistentLogDAO.ERROR, "Failed to run report %s(%d) category function on dataset %d/%d (%d). Offending code:
%s
", config.title, config.id, data.runId, data.ordinal, data.datasetId, jsCode); - log.debug("Caused by exception", e); + Log.debug("Caused by exception", e); continue; } } @@ -481,7 +480,7 @@ private Map getData(TableReportConfigDAO config, T log(report, PersistentLogDAO.ERROR, "Failed to run report %s(%d) series function on run %d/%d (%d). Offending code:
%s
", config.title, config.id, runId, ordinal, datasetId, jsCode); - log.debug("Caused by exception", e); + Log.debug("Caused by exception", e); } } } @@ -505,7 +504,7 @@ private Map getData(TableReportConfigDAO config, T log(report, PersistentLogDAO.ERROR, "Failed to run report %s(%d) label function on dataset %d/%d (%d). Offending code:
%s
", config.title, config.id, runId, ordinal, datasetId, jsCode); - log.debug("Caused by exception", e); + Log.debug("Caused by exception", e); } } } @@ -522,7 +521,7 @@ private List getFinalDatasetIds(Map timestamps, Map getFinalDatasetIds(Map timestamps, Map filterDatasetIds(TableReportConfigDAO config, TableReportD datasetIds.add(datasetId); } else { debugList.append("(filtered)"); - log.debugf("Dataset %d/%d (%d) filtered out, value: %s", runId, ordinal, datasetId, row[3]); + Log.debugf("Dataset %d/%d (%d) filtered out, value: %s", runId, ordinal, datasetId, row[3]); } } else { @@ -680,7 +679,7 @@ private List filterDatasetIds(TableReportConfigDAO config, TableReportD log(report, PersistentLogDAO.ERROR, "Failed to run report %s(%d) filter function on dataset %d/%d (%d). Offending code:
%s
", config.title, config.id, runId, ordinal, datasetId, jsCode); - log.debug("Caused by exception", e); + Log.debug("Caused by exception", e); } } log(report, PersistentLogDAO.DEBUG, "Datasets considered for report: %s", debugList); @@ -690,7 +689,7 @@ private List filterDatasetIds(TableReportConfigDAO config, TableReportD } private void log(TableReportDAO report, int level, String msg, Object... args) { - String message = args.length == 0 ? msg : String.format(msg, args); + String message = args.length == 0 ? msg : msg.formatted(args); report.logs.add(new ReportLogDAO(report, level, message)); } @@ -711,7 +710,7 @@ private void executeInContext(TableReportConfigDAO config, Consumer con } } finally { if (out.size() > 0) { - log.infof("Output while calculating data for report %s(%d):
%s
", config.title, config.id, + Log.infof("Output while calculating data for report %s(%d):
%s
", config.title, config.id, out.toString()); } } @@ -722,6 +721,6 @@ private void executeInContext(TableReportConfigDAO config, Consumer con public void onTestDelete(int testId) { int changedRows = em.createNativeQuery("UPDATE tablereportconfig SET testid = NULL WHERE testid = ?") .setParameter(1, testId).executeUpdate(); - log.infof("Disowned %d report configs as test (%d) was deleted.", changedRows, testId); + Log.infof("Disowned %d report configs as test (%d) was deleted", changedRows, testId); } } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/RunServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/RunServiceImpl.java index 176c4a73b..91b84a3ed 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/RunServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/RunServiceImpl.java @@ -44,7 +44,6 @@ import org.hibernate.Session; import org.hibernate.query.NativeQuery; import org.hibernate.type.StandardBasicTypes; -import org.jboss.logging.Logger; import org.jboss.resteasy.reactive.multipart.FileUpload; import com.fasterxml.jackson.core.JsonProcessingException; @@ -83,6 +82,7 @@ import io.hyperfoil.tools.horreum.mapper.RunMapper; import io.hyperfoil.tools.horreum.server.RoleManager; import io.hyperfoil.tools.horreum.server.WithRoles; +import io.quarkus.logging.Log; import io.quarkus.narayana.jta.runtime.TransactionConfiguration; import io.quarkus.runtime.Startup; import io.quarkus.security.identity.SecurityIdentity; @@ -90,7 +90,6 @@ @ApplicationScoped @Startup public class RunServiceImpl implements RunService { - private static final Logger log = Logger.getLogger(RunServiceImpl.class); //@formatter:off private static final String FIND_AUTOCOMPLETE = """ @@ -157,7 +156,7 @@ WHEN jsonb_typeof(data) = 'array' THEN ?1 IN (SELECT jsonb_array_elements(data)- @Transactional @WithRoles(extras = Roles.HORREUM_SYSTEM) void onTestDeleted(int testId) { - log.debugf("Trashing runs for test (%d)", testId); + Log.debugf("Trashing runs for test %d", testId); ScrollableResults results = session.createNativeQuery("SELECT id FROM run WHERE testid = ?1", Integer.class) .setParameter(1, testId) .setReadOnly(true) @@ -184,7 +183,7 @@ void trashDueToTestDeleted(int id) { void onNewOrUpdatedSchema(int schemaId) { SchemaDAO schema = SchemaDAO.findById(schemaId); if (schema == null) { - log.errorf("Cannot process schema add/update: cannot load schema %d", schemaId); + Log.errorf("Cannot process schema add/update: cannot load schema %d", schemaId); return; } processNewOrUpdatedSchema(schema); @@ -194,7 +193,7 @@ void onNewOrUpdatedSchema(int schemaId) { void processNewOrUpdatedSchema(SchemaDAO schema) { // we don't have to care about races with new runs findRunsWithUri(schema.uri, (runId, testId) -> { - log.debugf("Recalculate Datasets for run %d - schema %d (%s) changed", runId, schema.id, schema.uri); + Log.debugf("Recalculate Datasets for run %d - schema %d (%s) changed", runId, schema.id, schema.uri); onNewOrUpdatedSchemaForRun(runId, schema.id); }); } @@ -355,7 +354,7 @@ public Response add(String testNameOrId, String owner, Access access, Run run) { if (access != null) { run.access = access; } - log.debugf("About to add new run to test %s using owner", testNameOrId, owner); + Log.debugf("About to add new run to test %s using owner %s", testNameOrId, owner); if (testNameOrId == null || testNameOrId.isEmpty()) { if (run.testid == null || run.testid == 0) { return Response.status(Response.Status.BAD_REQUEST).entity("No test name or id provided").build(); @@ -380,16 +379,16 @@ public Response addRunFromData(String start, String stop, String test, String ow public Response addRunFromData(String start, String stop, String test, String owner, Access access, String schemaUri, String description, FileUpload data, FileUpload metadata) { if (data == null) { - log.debugf("Failed to upload for test %s with description %s because of missing data.", test, description); + Log.debugf("Failed to upload for test %s with description %s because of missing data", test, description); throw ServiceException.badRequest("No data!"); } else if (!MediaType.APPLICATION_JSON.equals(data.contentType())) { - log.debugf("Failed to upload for test %s with description %s because of wrong data content type: %s.", test, + Log.debugf("Failed to upload for test %s with description %s because of wrong data content type: %s", test, description, data.contentType()); throw ServiceException .badRequest("Part 'data' must use content-type: application/json, currently: " + data.contentType()); } if (metadata != null && !MediaType.APPLICATION_JSON.equals(metadata.contentType())) { - log.debugf("Failed to upload for test %s with description %s because of wrong metadata content type: %s.", test, + Log.debugf("Failed to upload for test %s with description %s because of wrong metadata content type: %s", test, description, metadata.contentType()); throw ServiceException.badRequest( "Part 'metadata' must use content-type: application/json, currently: " + metadata.contentType()); @@ -403,20 +402,20 @@ public Response addRunFromData(String start, String stop, String test, String ow if (metadataNode.isArray()) { for (JsonNode item : metadataNode) { if (!item.isObject()) { - log.debugf( - "Failed to upload for test %s with description %s because of wrong item in metadata: %s.", + Log.debugf( + "Failed to upload for test %s with description %s because of wrong item in metadata: %s", test, description, item); throw ServiceException.badRequest("One of metadata elements is not an object!"); } else if (!item.has("$schema")) { - log.debugf( - "Failed to upload for test %s with description %s because of missing schema in metadata: %s.", + Log.debugf( + "Failed to upload for test %s with description %s because of missing schema in metadata: %s", test, description, item); throw ServiceException.badRequest("One of metadata elements is missing a schema!"); } } } else if (metadataNode.isObject()) { if (!metadataNode.has("$schema")) { - log.debugf("Failed to upload for test %s with description %s because of missing schema in metadata.", + Log.debugf("Failed to upload for test %s with description %s because of missing schema in metadata", test, description); throw ServiceException.badRequest("Metadata is missing schema!"); } @@ -424,7 +423,7 @@ public Response addRunFromData(String start, String stop, String test, String ow } } } catch (IOException e) { - log.error("Failed to read data/metadata from upload file", e); + Log.error("Failed to read data/metadata from upload file", e); throw ServiceException.badRequest("Provided data/metadata can't be read (JSON encoding problem?)"); } return addRunFromData(start, stop, test, owner, access, schemaUri, description, dataNode.toString(), metadataNode); @@ -438,7 +437,7 @@ Response addRunFromData(String start, String stop, String test, String schemaUri, String description, String stringData, JsonNode metadata) { if (stringData == null) { - log.debugf("Failed to upload for test %s with description %s because of missing data.", test, description); + Log.debugf("Failed to upload for test %s with description %s because of missing data", test, description); throw ServiceException.badRequest("No data!"); } JsonNode data = null; @@ -451,7 +450,7 @@ Response addRunFromData(String start, String stop, String test, Object foundTest = findIfNotSet(test, data); String testNameOrId = foundTest == null ? null : foundTest.toString().trim(); if (testNameOrId == null || testNameOrId.isEmpty()) { - log.debugf("Failed to upload for test %s with description %s as the test cannot be identified.", test, description); + Log.debugf("Failed to upload for test %s with description %s as the test cannot be identified", test, description); throw ServiceException.badRequest("Cannot identify test name."); } @@ -501,7 +500,7 @@ void persistRun(ServiceMediator.RunUpload runUpload) { roleManager.setRoles(runUpload.roles.stream().collect(Collectors.joining(","))); TestDAO testEntity = TestDAO.findById(runUpload.testId); if (testEntity == null) { - log.errorf("Could not find Test (%d) for Run Upload", runUpload.testId); + Log.errorf("Could not find Test (%d) for Run Upload", runUpload.testId); return; } try { @@ -510,11 +509,10 @@ void persistRun(ServiceMediator.RunUpload runUpload) { runUpload.description, runUpload.metaData, runUpload.payload, testEntity); if (runID == null) { - log.errorf("Could not persist Run for Test: %d", testEntity.name); + Log.errorf("Could not persist Run for Test: %d", testEntity.name); } } catch (ServiceException serviceException) { - log.errorf("Could not persist Run for Test: %d", testEntity.name, serviceException); - + Log.errorf("Could not persist Run for Test: %d", testEntity.name, serviceException); } } @@ -528,11 +526,11 @@ private Integer getPersistRun(String start, String stop, String test, String own Instant stopInstant = Util.toInstant(foundStop); if (startInstant == null) { - log.debugf("Failed to upload for test %s with description %s; cannot parse start time %s (%s)", test, description, + Log.debugf("Failed to upload for test %s with description %s; cannot parse start time %s (%s)", test, description, foundStart, start); throw ServiceException.badRequest("Cannot parse start time from " + foundStart + " (" + start + ")"); } else if (stopInstant == null) { - log.debugf("Failed to upload for test %s with description %s; cannot parse start time %s (%s)", test, description, + Log.debugf("Failed to upload for test %s with description %s; cannot parse start time %s (%s)", test, description, foundStop, stop); throw ServiceException.badRequest("Cannot parse stop time from " + foundStop + " (" + stop + ")"); } @@ -549,7 +547,7 @@ private Integer getPersistRun(String start, String stop, String test, String own } } - log.debugf("Creating new run for test %s(%d) with description %s", testEntity.name, testEntity.id, foundDescription); + Log.debugf("Creating new run for test %s(%d) with description %s", testEntity.name, testEntity.id, foundDescription); RunDAO run = new RunDAO(); run.testid = testEntity.id; @@ -588,7 +586,7 @@ private Integer addAuthenticated(RunDAO run, TestDAO test) { List uploaders = identity.getRoles().stream().filter(role -> role.endsWith("-uploader")) .collect(Collectors.toList()); if (uploaders.size() != 1) { - log.debugf("Failed to upload for test %s: no owner, available uploaders: %s", test.name, uploaders); + Log.debugf("Failed to upload for test %s: no owner, available uploaders: %s", test.name, uploaders); throw ServiceException.badRequest( "Missing owner and cannot select single default owners; this user has these uploader roles: " + uploaders); @@ -596,14 +594,14 @@ private Integer addAuthenticated(RunDAO run, TestDAO test) { String uploader = uploaders.get(0); run.owner = uploader.substring(0, uploader.length() - 9) + "-team"; } else if (!Objects.equals(test.owner, run.owner) && !identity.getRoles().contains(run.owner)) { - log.debugf("Failed to upload for test %s: requested owner %s, available roles: %s", test.name, run.owner, + Log.debugf("Failed to upload for test %s: requested owner %s, available roles: %s", test.name, run.owner, identity.getRoles()); throw ServiceException.badRequest("This user does not have permissions to upload run for owner=" + run.owner); } if (run.access == null) { run.access = Access.PRIVATE; } - log.debugf("Uploading with owner=%s and access=%s", run.owner, run.access); + Log.debugf("Uploading with owner=%s and access=%s", run.owner, run.access); try { if (run.id == null) { @@ -614,10 +612,10 @@ private Integer addAuthenticated(RunDAO run, TestDAO test) { } em.flush(); } catch (Exception e) { - log.error("Failed to persist run.", e); + Log.error("Failed to persist run", e); throw ServiceException.serverError("Failed to persist run"); } - log.debugf("Upload flushed, run ID %d", run.id); + Log.debugf("Upload flushed, run ID %d", run.id); mediator.newRun(RunMapper.from(run)); transform(run.id, false); @@ -816,7 +814,7 @@ private RunSummary createSummary(Object[] row) { try { run.datasets = Util.OBJECT_MAPPER.treeToValue(((ArrayNode) row[11]), Integer[].class); } catch (JsonProcessingException e) { - log.warnf("Could not map datasets to array"); + Log.warnf("Could not map datasets to array"); } } //if we send over an empty JsonNode object it will be a NullNode, that can be cast to a string @@ -824,7 +822,7 @@ private RunSummary createSummary(Object[] row) { try { run.validationErrors = Util.OBJECT_MAPPER.treeToValue(((ArrayNode) row[12]), ValidationError[].class); } catch (JsonProcessingException e) { - log.warnf("Could not map validation errors to array"); + Log.warnf("Could not map validation errors to array"); } } return run; @@ -947,7 +945,7 @@ private void trashConnectedDatasets(int runId, int testId) { //Make sure to remove run_schemas as we've trashed the run em.createNativeQuery("DELETE FROM run_schemas WHERE runid = ?1").setParameter(1, runId).executeUpdate(); List datasets = DatasetDAO.list("run.id", runId); - log.debugf("Trashing run %d (test %d, %d datasets)", runId, testId, datasets.size()); + Log.debugf("Trashing run %d (test %d, %d datasets)", runId, testId, datasets.size()); for (var dataset : datasets) { mediator.propagatedDatasetDelete(dataset.id); } @@ -1051,7 +1049,7 @@ public void recalculateAll(String fromStr, String toStr) { "DELETE FROM dataset USING run WHERE run.id = dataset.runid AND run.trashed AND run.start BETWEEN ?1 AND ?2") .setParameter(1, from).setParameter(2, to).executeUpdate(); if (deleted > 0) { - log.debugf("Deleted %d datasets for trashed runs between %s and %s", deleted, from, to); + Log.debugf("Deleted %d datasets for trashed runs between %s and %s", deleted, from, to); } ScrollableResults results = session @@ -1068,7 +1066,7 @@ public void recalculateAll(String fromStr, String toStr) { .scroll(ScrollMode.FORWARD_ONLY); while (results.next()) { Recalculate r = results.get(); - log.debugf("Recalculate Datasets for run %d - forcing recalculation of all between %s and %s", r.runId, from, to); + Log.debugf("Recalculate Datasets for run %d - forcing recalculation of all between %s and %s", r.runId, from, to); // transform will add proper roles anyway // messageBus.executeForTest(r.testId, () -> datasetService.withRecalculationLock(() -> transform(r.runId, true))); Util.registerTxSynchronization(tm, txStatus -> mediator.queueRunRecalculation(r.runId)); @@ -1088,11 +1086,11 @@ public void recalculateAll(String fromStr, String toStr) { @Transactional int transform(int runId, boolean isRecalculation) { if (runId < 1) { - log.errorf("Transformation parameters error: run %s", runId); + Log.errorf("Transformation parameters error: run %s", runId); return 0; } - log.debugf("Transforming run ID %d, recalculation? %s", runId, Boolean.toString(isRecalculation)); + Log.debugf("Transforming run ID %d, recalculation? %s", runId, Boolean.toString(isRecalculation)); int numDatasets = 0; // check whether there is an ongoing transformation on the same runId @@ -1103,7 +1101,7 @@ int transform(int runId, boolean isRecalculation) { Util.registerTxSynchronization(tm, txStatus -> transformations.remove(runId, status)); if (prev != null) { // there is an ongoing transformation that has recently been initiated - log.warnf("Transformation for run %d already in progress", runId); + Log.warnf("Transformation for run %d already in progress", runId); return numDatasets; } @@ -1118,7 +1116,7 @@ int transform(int runId, boolean isRecalculation) { RunDAO run = RunDAO.findById(runId); if (run == null) { - log.errorf("Cannot load run ID %d for transformation", runId); + Log.errorf("Cannot load run ID %d for transformation", runId); return numDatasets; // this is 0 } Map transformerResults = new TreeMap<>(); @@ -1147,7 +1145,7 @@ int transform(int runId, boolean isRecalculation) { if (transformerId != null) { t = TransformerDAO.findById(transformerId); if (t == null) { - log.errorf("Missing transformer with ID %d", transformerId); + Log.errorf("Missing transformer with ID %d", transformerId); } } else { t = null; @@ -1189,7 +1187,7 @@ int transform(int runId, boolean isRecalculation) { if (t.extractors.size() == 1) { if (root.size() != 1) { // missing results should be null nodes - log.errorf("Unexpected result for single extractor: %s", root.toPrettyString()); + Log.errorf("Unexpected result for single extractor: %s", root.toPrettyString()); } else { root = root.iterator().next(); } @@ -1285,17 +1283,20 @@ int transform(int runId, boolean isRecalculation) { if (position < node.size()) { all.add(node.get(position)); } else { - String message = String.format( - "Transformer %d produced an array of %d elements but other transformer " + - "produced %d elements; dataset %d/%d might be missing some data.", - entry.getKey(), node.size(), max, run.id, numDatasets); + String message = """ + Transformer %d produced an array of %d elements but other transformer produced %d elements; dataset %d/%d might be missing some data. + """ + .formatted(entry.getKey(), node.size(), max, run.id, numDatasets); logMessage(run, PersistentLogDAO.WARN, "%s", message); - log.warnf(message); + Log.warnf(message); } } else { - logMessage(run, PersistentLogDAO.WARN, "Unexpected result provided by one of the transformers: %s", - node); - log.warnf("Unexpected result provided by one of the transformers: %s", node); + String message = """ + Unexpected result provided by transformer %d: %s + """ + .formatted(entry.getKey(), node); + logMessage(run, PersistentLogDAO.WARN, "%s", message); + Log.warnf(message); } } nakedNodes.forEach(all::add); @@ -1326,8 +1327,8 @@ private void createDataset(DatasetDAO ds, boolean isRecalculation) { Util.registerTxSynchronization(tm, txStatus -> mediator.publishEvent(AsyncEventChannels.DATASET_NEW, ds.testid, new Dataset.EventNew(DatasetMapper.from(ds), isRecalculation))); } catch (TransactionRequiredException tre) { - log.error( - "Failed attempt to persist and send Dataset event during inactive Transaction. Likely due to prior error.", + Log.error( + "Failed attempt to persist and send Dataset event during inactive Transaction, likely due to prior error", tre); } } @@ -1335,7 +1336,7 @@ private void createDataset(DatasetDAO ds, boolean isRecalculation) { @WithRoles(extras = Roles.HORREUM_SYSTEM) @Transactional(Transactional.TxType.REQUIRES_NEW) protected void logMessage(RunDAO run, int level, String format, Object... args) { - String msg = args.length > 0 ? String.format(format, args) : format; + String msg = args.length == 0 ? format : format.formatted(args); new TransformationLogDAO(em.getReference(TestDAO.class, run.testid), run, level, msg).persist(); } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SchemaServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SchemaServiceImpl.java index b794569d9..2ea0104c2 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SchemaServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SchemaServiceImpl.java @@ -24,7 +24,6 @@ import org.hibernate.query.NativeQuery; import org.hibernate.query.SelectionQuery; import org.hibernate.type.StandardBasicTypes; -import org.jboss.logging.Logger; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -63,13 +62,13 @@ import io.hyperfoil.tools.horreum.mapper.ValidationErrorMapper; import io.hyperfoil.tools.horreum.server.WithRoles; import io.quarkus.hibernate.orm.panache.PanacheQuery; +import io.quarkus.logging.Log; import io.quarkus.narayana.jta.runtime.TransactionConfiguration; import io.quarkus.panache.common.Page; import io.quarkus.panache.common.Sort; import io.quarkus.security.identity.SecurityIdentity; public class SchemaServiceImpl implements SchemaService { - private static final Logger log = Logger.getLogger(SchemaServiceImpl.class); //@formatter:off private static final String FETCH_SCHEMAS_RECURSIVE = @@ -175,12 +174,12 @@ public Integer add(Schema schemaDTO) { em.flush(); newOrUpdatedSchema(schema); } - log.debugf("Added schema %s (%d), URI %s", schema.name, schema.id, schema.uri); + Log.debugf("Added schema %s (%d), URI %s", schema.name, schema.id, schema.uri); return schema.id; } private void newOrUpdatedSchema(SchemaDAO schema) { - log.debugf("Push schema event for async run schemas update: %d (%s)", schema.id, schema.uri); + Log.debugf("Push schema event for async run schemas update: %d (%s)", schema.id, schema.uri); Util.registerTxSynchronization(tm, txStatus -> mediator.queueSchemaSync(schema.id)); } @@ -280,10 +279,10 @@ public void updateAccess(int id, String owner, Access access) { @WithRoles(extras = Roles.HORREUM_SYSTEM) @Transactional void validateRunData(int runId, Predicate schemaFilter) { - log.debugf("About to validate data for run %d", runId); + Log.debugf("About to validate data for run %d", runId); RunDAO run = RunDAO.findById(runId); if (run == null) { - log.errorf("Cannot load run %d for schema validation", runId); + Log.errorf("Cannot load run %d for schema validation", runId); return; } // remember to clear prev validation errors @@ -303,20 +302,18 @@ void validateRunData(int runId, Predicate schemaFilter) { Util.registerTxSynchronization(tm, txStatus -> mediator.publishEvent(AsyncEventChannels.RUN_VALIDATED, run.testid, new Schema.ValidationEvent(run.id, run.validationErrors.stream() .map(ValidationErrorMapper::fromValidationError).collect(Collectors.toList())))); - - ; } @WithRoles(extras = Roles.HORREUM_SYSTEM) @Transactional void validateDatasetData(int datasetId, Predicate schemaFilter) { - log.debugf("About to validate data for dataset %d", datasetId); + Log.debugf("About to validate data for dataset %d", datasetId); DatasetDAO dataset = DatasetDAO.findById(datasetId); if (dataset == null) { // Don't log error when the dataset is not present and we're revalidating all datasets - it might be // concurrently removed because of URI change if (schemaFilter != null) { - log.errorf("Cannot load dataset %d for schema validation", datasetId); + Log.errorf("Cannot load dataset %d for schema validation", datasetId); } return; } @@ -355,7 +352,7 @@ void validateDatasetData(int datasetId, Predicate schemaFilter) { void revalidateAll(int schemaId) { SchemaDAO schema = SchemaDAO.findById(schemaId); if (schema == null) { - log.errorf("Cannot load schema %d for validation", schemaId); + Log.errorf("Cannot load schema %d for validation", schemaId); return; } //clear tables on schemaId @@ -429,7 +426,7 @@ private void validateData(JsonNode data, Predicate filter, Collection filter, Collection void publishEvent(AsyncEventChannels channel, int testId, T payload) { if (testMode) { - log.debugf("Publishing test %d on %s: %s", testId, channel, payload); + Log.debugf("Publishing test %d on %s: %s", testId, channel, payload); // eventBus.publish(channel.name(), new MessageBus.Message(BigInteger.ZERO.longValue(), testId, 0, payload)); events.putIfAbsent(channel, new HashMap<>()); BlockingQueue queue = events.get(channel).computeIfAbsent(testId, k -> new LinkedBlockingQueue<>()); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SqlServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SqlServiceImpl.java index d73533794..dcd53fa01 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SqlServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SqlServiceImpl.java @@ -12,7 +12,6 @@ import org.eclipse.microprofile.config.inject.ConfigProperty; import org.hibernate.JDBCException; -import org.jboss.logging.Logger; import io.hyperfoil.tools.horreum.api.data.JsonpathValidation; import io.hyperfoil.tools.horreum.api.data.QueryResult; @@ -23,7 +22,6 @@ @ApplicationScoped public class SqlServiceImpl implements SqlService { - private static final Logger log = Logger.getLogger(SqlServiceImpl.class); @Inject EntityManager em; diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SubscriptionServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SubscriptionServiceImpl.java index 3be7bb23c..3aa338064 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SubscriptionServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SubscriptionServiceImpl.java @@ -12,8 +12,6 @@ import jakarta.persistence.EntityManager; import jakarta.transaction.Transactional; -import org.jboss.logging.Logger; - import io.hyperfoil.tools.horreum.api.alerting.Watch; import io.hyperfoil.tools.horreum.api.data.TestExport; import io.hyperfoil.tools.horreum.api.internal.services.SubscriptionService; @@ -21,13 +19,13 @@ import io.hyperfoil.tools.horreum.entity.data.TestDAO; import io.hyperfoil.tools.horreum.mapper.WatchMapper; import io.hyperfoil.tools.horreum.server.WithRoles; +import io.quarkus.logging.Log; import io.quarkus.runtime.Startup; import io.quarkus.security.identity.SecurityIdentity; @ApplicationScoped @Startup public class SubscriptionServiceImpl implements SubscriptionService { - private static final Logger log = Logger.getLogger(SubscriptionService.class); @Inject EntityManager em; @@ -206,7 +204,7 @@ private List currentWatches(WatchDAO watch) { @Transactional public void onTestDelete(int testId) { var subscriptions = WatchDAO.list("test.id = ?1", testId); - log.infof("Deleting %d subscriptions for test (%d)", subscriptions.size(), testId); + Log.infof("Deleting %d subscriptions for test %d", subscriptions.size(), testId); for (var subscription : subscriptions) { subscription.delete(); } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/TestServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/TestServiceImpl.java index 431f06773..55de22a4f 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/TestServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/TestServiceImpl.java @@ -30,7 +30,6 @@ import org.hibernate.Session; import org.hibernate.query.NativeQuery; import org.hibernate.type.StandardBasicTypes; -import org.jboss.logging.Logger; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -57,13 +56,13 @@ import io.hyperfoil.tools.horreum.mapper.TestMapper; import io.hyperfoil.tools.horreum.server.WithRoles; import io.quarkus.hibernate.orm.panache.PanacheQuery; +import io.quarkus.logging.Log; import io.quarkus.panache.common.Page; import io.quarkus.panache.common.Sort; import io.quarkus.security.identity.SecurityIdentity; @ApplicationScoped public class TestServiceImpl implements TestService { - private static final Logger log = Logger.getLogger(TestServiceImpl.class); private static final String FILTER_BY_NAME_FIELD = "name"; @@ -113,7 +112,7 @@ public void delete(int id) { } else if (!identity.getRoles().contains(test.owner)) { throw ServiceException.forbidden("You are not an owner of test " + id); } - log.debugf("Deleting test %s (%d)", test.name, test.id); + Log.debugf("Deleting test %s (%d)", test.name, test.id); mediator.deleteTest(test.id); test.delete(); if (mediator.testMode()) @@ -182,10 +181,10 @@ public TestDAO ensureTestExists(String testNameOrId) { if (Roles.hasRoleWithSuffix(identity, test.owner, "-uploader")) { return detached; } - log.debugf("Failed to retrieve test %s as this user (%s = %s) is not uploader for %s", + Log.debugf("Failed to retrieve test %s as this user (%s = %s) is not uploader for %s", testNameOrId, identity.getPrincipal().getName(), identity.getRoles(), test.owner); } else { - log.debugf("Failed to retrieve test %s - could not find it in the database", testNameOrId); + Log.debugf("Failed to retrieve test %s - could not find it in the database", testNameOrId); } // we need to be vague about the test existence throw ServiceException.badRequest("Cannot upload to test " + testNameOrId); @@ -201,7 +200,7 @@ public Test add(Test dto) { } if (dto.name == null || dto.name.isBlank()) throw ServiceException.badRequest("Test name can not be empty"); - log.debugf("Creating new test: %s", dto.toString()); + Log.debugf("Creating new test: %s", dto.toString()); return TestMapper.from(addAuthenticated(dto)); } @@ -641,7 +640,7 @@ public void recalculateDatasets(int testId) { // it gets removed even if transaction-level exception occurs, e.g., timeout Util.registerTxSynchronization(tm, txStatus -> recalculations.remove(testId, status)); if (prev != null) { - log.infof("Recalculation for test %d (%s) already in progress", testId, test.name); + Log.infof("Recalculation for test %d (%s) already in progress", testId, test.name); return; } @@ -650,7 +649,7 @@ public void recalculateDatasets(int testId) { "DELETE FROM dataset USING run WHERE run.id = dataset.runid AND run.trashed AND dataset.testid = ?1") .setParameter(1, testId).executeUpdate(); if (deleted > 0) { - log.debugf("Deleted %d datasets for trashed runs in test %s (%d)", deleted, test.name, (Object) testId); + Log.debugf("Deleted %d datasets for trashed runs in test %s (%d)", deleted, test.name, (Object) testId); } try (ScrollableResults results = em @@ -660,7 +659,7 @@ public void recalculateDatasets(int testId) { .scroll(ScrollMode.FORWARD_ONLY)) { while (results.next()) { int runId = (int) results.get(); - log.debugf("Recalculate Datasets for run %d - forcing recalculation for test %d (%s)", runId, testId, + Log.debugf("Recalculate Datasets for run %d - forcing recalculation for test %d (%s)", runId, testId, test.name); mediator.executeBlocking(() -> { diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/UIServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/UIServiceImpl.java index b3c28f8ac..4aeba198b 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/UIServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/UIServiceImpl.java @@ -97,7 +97,7 @@ public List getViews(int testId) { TestDAO test = TestDAO.findById(testId); if (test == null) { - throw ServiceException.badRequest("Test not found with id: ".concat(Integer.toString(testId))); + throw ServiceException.badRequest("Test not found with id: " + testId); } return ViewDAO. find("test.id", testId) diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/UserServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/UserServiceImpl.java index 86e822c1b..c16e2db3b 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/UserServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/UserServiceImpl.java @@ -1,6 +1,5 @@ package io.hyperfoil.tools.horreum.svc; -import static java.text.MessageFormat.format; import static java.util.Collections.emptyList; import java.time.temporal.ChronoUnit; @@ -52,7 +51,7 @@ public class UserServiceImpl implements UserService { private UserInfo currentUser() { return UserInfo. findByIdOptional(getUsername()) - .orElseThrow(() -> ServiceException.notFound(format("Username {0} not found", getUsername()))); + .orElseThrow(() -> ServiceException.notFound("Username '" + getUsername() + "' not found")); } private String getUsername() { @@ -84,7 +83,7 @@ public void createUser(NewUser user) { userIsManagerForTeam(user.team); backend.get().createUser(user); createLocalUser(user.user.username, user.team); - Log.infov("{0} created user {1} {2} with username {3} on team {4}", getUsername(), + Log.infof("%s created user '%s %s' with username '%s' on team '%s'", getUsername(), user.user.firstName, user.user.lastName, user.user.username, user.team); } @@ -96,7 +95,7 @@ public void removeUser(String username) { } backend.get().removeUser(username); removeLocalUser(username); - Log.infov("{0} removed user {1}", getUsername(), username); + Log.infof("'%s' removed user '%s'", getUsername(), username); } @Override @@ -110,7 +109,7 @@ public List getTeams() { public String defaultTeam() { UserInfo userInfo = currentUser(); if (userInfo == null) { - throw ServiceException.notFound(format("User with username {0} not found", getUsername())); + throw ServiceException.notFound("User with username '" + getUsername() + "' not found"); } return userInfo.defaultTeam != null ? userInfo.defaultTeam : ""; } @@ -121,7 +120,7 @@ public String defaultTeam() { public void setDefaultTeam(String unsafeTeam) { UserInfo userInfo = currentUser(); if (userInfo == null) { - throw ServiceException.notFound(format("User with username {0} not found", getUsername())); + throw ServiceException.notFound("User with username '" + getUsername() + "' not found"); } userInfo.defaultTeam = validateTeamName(unsafeTeam); userInfo.persistAndFlush(); @@ -158,7 +157,7 @@ public List getAllTeams() { public void addTeam(String unsafeTeam) { String team = validateTeamName(unsafeTeam); backend.get().addTeam(team); - Log.infov("{0} created team {1}", getUsername(), team); + Log.infof("%s created team %s", getUsername(), team); } @RolesAllowed(Roles.ADMIN) @@ -166,7 +165,7 @@ public void addTeam(String unsafeTeam) { public void deleteTeam(String unsafeTeam) { String team = validateTeamName(unsafeTeam); backend.get().deleteTeam(team); - Log.infov("{0} deleted team {1}", getUsername(), team); + Log.infof("%s deleted team %s", getUsername(), team); } @RolesAllowed(Roles.ADMIN) @@ -187,7 +186,7 @@ public void updateAdministrators(List newAdmins) { private void userIsManagerForTeam(String team) { if (!identity.getRoles().contains(Roles.ADMIN) && !identity.hasRole(team.substring(0, team.length() - 4) + Roles.MANAGER)) { - throw ServiceException.badRequest(format("This user is not a manager for team {0}", team)); + throw ServiceException.badRequest("This user is not a manager for " + team); } } @@ -256,7 +255,7 @@ public String newApiKey(ApiKeyRequest request) { newKey.persist(); userInfo.persist(); - Log.debugv("{0} created API key \"{1}\"", getUsername(), request.name == null ? "" : request.name); + Log.debugf("'%s' created API key '%s'", getUsername(), request.name == null ? "" : request.name); return newKey.keyString(); } @@ -277,13 +276,13 @@ public List apiKeys() { public void renameApiKey(long keyId, String newName) { validateApiKeyName(newName == null ? "" : newName); UserApiKey key = UserApiKey. findByIdOptional(keyId) - .orElseThrow(() -> ServiceException.notFound(format("Key with id {0} not found", keyId))); + .orElseThrow(() -> ServiceException.notFound("Key with id " + keyId + " not found")); if (key.revoked) { throw ServiceException.badRequest("Can't rename revoked key"); } String oldName = key.name; key.name = newName == null ? "" : newName; - Log.debugv("{0} renamed API key \"{1}\" to \"{2}\"", getUsername(), oldName, newName == null ? "" : newName); + Log.debugf("'%s' renamed API key '%s' to '%s'", getUsername(), oldName, newName == null ? "" : newName); } @Transactional @@ -291,9 +290,9 @@ public void renameApiKey(long keyId, String newName) { @Override public void revokeApiKey(long keyId) { UserApiKey key = UserApiKey. findByIdOptional(keyId) - .orElseThrow(() -> ServiceException.notFound(format("Key with id {0} not found", keyId))); + .orElseThrow(() -> ServiceException.notFound("Key with id " + keyId + " not found")); key.revoked = true; - Log.debugv("{0} revoked API key \"{1}\"", getUsername(), key.name); + Log.debugf("'%s' revoked API key '%s'", getUsername(), key.name); } @PermitAll @@ -308,7 +307,7 @@ UserApiKey. stream("#UserApiKey.expire", timeService.now().plus(toEx } // revoke expired keys -- could be done directly in the DB but iterate instead to be able to log UserApiKey. stream("#UserApiKey.pastExpiration", timeService.now()).forEach(key -> { - Log.debugv("Idle API key \"{0}\" revoked", key.name); + Log.debugf("Idle API key '%s' revoked", key.name); key.revoked = true; }); } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/Util.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/Util.java index ce7cb99fa..9c431b0f4 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/Util.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/Util.java @@ -38,7 +38,6 @@ import org.graalvm.polyglot.proxy.Proxy; import org.graalvm.polyglot.proxy.ProxyExecutable; import org.hibernate.query.NativeQuery; -import org.jboss.logging.Logger; import org.postgresql.util.PSQLException; import com.fasterxml.jackson.core.JsonProcessingException; @@ -56,6 +55,7 @@ import io.hyperfoil.tools.horreum.api.SortDirection; import io.hyperfoil.tools.horreum.server.RolesInterceptor; +import io.quarkus.logging.Log; import io.quarkus.security.identity.SecurityIdentity; import io.smallrye.context.SmallRyeContextManagerProvider; import io.smallrye.mutiny.Uni; @@ -63,7 +63,6 @@ import io.vertx.core.eventbus.EventBus; public class Util { - private static final Logger log = Logger.getLogger(Util.class); public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final Configuration JSONPATH_CONFIG = Configuration.builder() .jsonProvider(new JacksonJsonNodeJsonProvider()) @@ -211,7 +210,7 @@ public void afterCompletion(int status) { runnable.run(); } } catch (Throwable t) { - log.error("Error in TX synchronization", t); + Log.error("Error in TX synchronization", t); throw t; } } @@ -222,9 +221,9 @@ public static void doAfterCommit(TransactionManager tm, Runnable runnable) { try { doAfterCommitThrowing(tm, runnable); } catch (RollbackException e) { - log.debugf("Not performing %s as the transaction has been marked rollback-only", runnable); + Log.debugf("Not performing %s as the transaction has been marked rollback-only", runnable); } catch (SystemException e) { - log.errorf(e, "Failed to perform %s after transaction completion", runnable); + Log.errorf(e, "Failed to perform %s after transaction completion", runnable); } } @@ -232,9 +231,9 @@ static void publishLater(TransactionManager tm, final EventBus eventBus, String try { doAfterCommitThrowing(tm, () -> eventBus.publish(eventName, event)); } catch (RollbackException e) { - log.debug("Not publishing the event as the transaction has been marked rollback-only"); + Log.debug("Not publishing the event as the transaction has been marked rollback-only"); } catch (SystemException e) { - log.errorf(e, "Failed to publish event %s: %s after transaction completion", eventName, event); + Log.errorf(e, "Failed to publish event %s: %s after transaction completion", eventName, event); } } @@ -245,7 +244,7 @@ public static JsonNode toJsonNode(String str) { } return OBJECT_MAPPER.readTree(str); } catch (JsonProcessingException e) { - log.errorf(e, "Failed to parse into JSON: %s", str); + Log.errorf(e, "Failed to parse into JSON: %s", str); return null; } } @@ -257,7 +256,7 @@ public static JsonNode toJsonNode(byte[] bytes) { } return OBJECT_MAPPER.readTree(bytes); } catch (IOException e) { - log.errorf(e, "Failed to parse into JSON: %s", new String(bytes, StandardCharsets.UTF_8)); + Log.errorf(e, "Failed to parse into JSON: %s", new String(bytes, StandardCharsets.UTF_8)); throw new RuntimeException(e); } } @@ -476,7 +475,7 @@ public static T withTx(TransactionManager tm, Supplier supplier) { tm.setRollbackOnly(); // Similar code is in BaseTransactionRetryInterceptor if (retry > Util.MAX_TRANSACTION_RETRIES) { - log.error("Exceeded maximum number of retries."); + Log.error("Exceeded maximum number of retries."); throw t; } if (!lookupRetryHint(t, new HashSet<>())) { @@ -502,8 +501,8 @@ public static T withTx(TransactionManager tm, Supplier supplier) { private static void yieldAndLog(int retry, Throwable t) { Thread.yield(); // give the other transaction a bit more chance to complete - log.infof("Retrying failed transaction, attempt %d/%d", retry, Util.MAX_TRANSACTION_RETRIES); - log.trace("This is the exception that caused retry: ", t); + Log.infof("Retrying failed transaction, attempt %d/%d", retry, Util.MAX_TRANSACTION_RETRIES); + Log.trace("This is the exception that caused retry: ", t); } public static T getAnnotation(Method method, Class annotationClass) { @@ -535,7 +534,7 @@ public static void executeBlocking(Vertx vertx, Runnable runnable) { try { wrapped.run(); } catch (Exception e) { - log.error("Failed to execute blocking task", e); + Log.error("Failed to execute blocking task", e); } finally { promise.complete(); } @@ -686,7 +685,7 @@ public Object execute(Value... arguments) { } else if (resolved.size() == 1) { value = resolved.get(0); } else { //resolve.size() > 1, this doesn't happen - log.error("resolved promise size=" + resolved.size() + ", expected 1 for promise = " + value); + Log.errorf("resolved promise size= %d, expected 1 for promise = %s", resolved.size(), value); } } return value; @@ -751,10 +750,10 @@ static T runQuery(EntityManager em, Class klass, String query, Object... try { return (T) q.getSingleResult(); } catch (NoResultException e) { - log.errorf("No results in %s with params: %s", query, Arrays.asList(params)); + Log.errorf("No results in %s with params: %s", query, Arrays.toString(params)); throw ServiceException.notFound("No result"); } catch (Throwable t) { - log.errorf(t, "Query error in %s with params: %s", query, Arrays.asList(params)); + Log.errorf(t, "Query error in %s with params: %s", query, Arrays.toString(params)); throw t; } } @@ -798,7 +797,7 @@ public static Instant toInstant(Object time) { try { return ZonedDateTime.parse(str, DateTimeFormatter.ISO_DATE_TIME).toInstant(); } catch (DateTimeParseException e) { - log.debug("failed to convert " + time + " to timestamp using " + str); + Log.debugf("failed to convert %s to timestamp using %s", time, str); } } return null;//nothing matched @@ -906,11 +905,10 @@ public void beforeCompletion() { @Override public void afterCompletion(int status) { - try { consumer.accept(status); } catch (Exception e) { - log.errorf("Tx Synchronization callback failed: %s", e.getMessage()); + Log.errorf("Tx Synchronization callback failed: %s", e.getMessage()); } } }); @@ -918,13 +916,12 @@ public void afterCompletion(int status) { consumer.accept(0); } } catch (SystemException | RollbackException e) { - log.errorf("Error occurred in transaction: %s", e.getMessage()); + Log.errorf("Error occurred in transaction: %s", e.getMessage()); // throw new RuntimeException(e); consumer.accept(0); } catch (Exception e) { - log.errorf("Error occurred processing consumer: %s", e.getMessage()); + Log.errorf("Error occurred processing consumer: %s", e.getMessage()); } - } public record DecomposedJsonPath(String root, String jsonpath) { diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/user/DatabaseUserBackend.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/user/DatabaseUserBackend.java index 59b0fad95..8aec84247 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/user/DatabaseUserBackend.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/user/DatabaseUserBackend.java @@ -1,6 +1,5 @@ package io.hyperfoil.tools.horreum.svc.user; -import static java.text.MessageFormat.format; import static java.util.Collections.emptyMap; import static java.util.stream.Collectors.toSet; @@ -16,8 +15,6 @@ import jakarta.persistence.criteria.CriteriaQuery; import jakarta.transaction.Transactional; -import org.jboss.logging.Logger; - import io.hyperfoil.tools.horreum.api.services.UserService; import io.hyperfoil.tools.horreum.entity.user.Team; import io.hyperfoil.tools.horreum.entity.user.TeamMembership; @@ -28,6 +25,7 @@ import io.hyperfoil.tools.horreum.svc.Roles; import io.hyperfoil.tools.horreum.svc.ServiceException; import io.quarkus.arc.lookup.LookupIfProperty; +import io.quarkus.logging.Log; /** * Implementation of {@link UserBackEnd} that uses Horreum database for storage. @@ -38,8 +36,6 @@ @LookupIfProperty(name = "horreum.roles.provider", stringValue = "database") public class DatabaseUserBackend implements UserBackEnd { - private static final Logger LOG = Logger.getLogger(DatabaseUserBackend.class); - private static UserService.UserData toUserInfo(UserInfo info) { return new UserService.UserData("", info.username, info.firstName, info.lastName, info.email); } @@ -106,7 +102,7 @@ public void createUser(UserService.NewUser user) { } else if (Roles.MANAGER.equals(role)) { addTeamMembership(userInfo, teamName, TeamRole.TEAM_MANAGER); } else { - LOG.infov("Dropping role {0} for user {1} {2}", role, userInfo.firstName, userInfo.lastName); + Log.infof("Dropping role '%s' for user '%s %s'", role, userInfo.firstName, userInfo.lastName); } } } @@ -156,7 +152,7 @@ public Map> teamMembers(String team) { public void updateTeamMembers(String team, Map> roles) { Team teamEntity = Team.find("teamName", removeTeamSuffix(team)).firstResult(); if (teamEntity == null) { - throw ServiceException.notFound(format("The team {0} does not exist", team)); + throw ServiceException.notFound("The team '" + team + "' does not exist"); } // need to remove from the "owning" side of the relationship @@ -195,7 +191,7 @@ public void addTeam(String team) { public void deleteTeam(String team) { Team teamEntity = Team.find("teamName", removeTeamSuffix(team)).firstResult(); if (teamEntity == null) { - throw ServiceException.notFound(format("The team {0} does not exist", team)); + throw ServiceException.notFound("The team '" + team + "' does not exist"); } // need to delete memberships with roles deleteTeamAndMemberships(teamEntity); @@ -212,8 +208,8 @@ void deleteTeamAndMemberships(Team teamEntity) { }); teamEntity.delete(); } catch (Throwable t) { - LOG.warnv("Unable to delete team {0} due to {1}", teamEntity.teamName, t.getMessage()); - throw ServiceException.serverError(format("Unable to delete team {0}", teamEntity.teamName)); + Log.warnf("Unable to delete team '%s' due to %s", teamEntity.teamName, t.getMessage()); + throw ServiceException.serverError("Unable to delete team '" + teamEntity.teamName + "'"); } } @@ -232,7 +228,7 @@ public void updateAdministrators(List newAdmins) { if (!newAdmins.contains(u.username)) { u.roles.remove(UserRole.ADMIN); u.persist(); - LOG.infov("Removed administrator role from user {0}", u.username); + Log.infof("Removed administrator role from user '%s %s'", u.firstName, u.lastName); } }); newAdmins.forEach(username -> { @@ -240,7 +236,7 @@ public void updateAdministrators(List newAdmins) { user.ifPresent(u -> { u.roles.add(UserRole.ADMIN); u.persist(); - LOG.infov("Added administrator role to user {0}", username); + Log.infof("Added administrator role to user '%s %s'", u.firstName, u.lastName); }); }); } @@ -258,7 +254,7 @@ private List getAdministratorUsers() { public void setPassword(String username, String password) { UserInfo user = UserInfo.findById(username); if (user == null) { - throw ServiceException.notFound(format("User {0} not found", username)); + throw ServiceException.notFound("User with username '" + username + "' not found"); } user.setPassword(password); } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/user/KeycloakUserBackend.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/user/KeycloakUserBackend.java index 12f2489a2..28fabbbd5 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/user/KeycloakUserBackend.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/user/KeycloakUserBackend.java @@ -1,6 +1,5 @@ package io.hyperfoil.tools.horreum.svc.user; -import static java.text.MessageFormat.format; import static java.util.stream.Collectors.joining; import java.util.ArrayList; @@ -19,7 +18,6 @@ import jakarta.ws.rs.core.Response; import org.eclipse.microprofile.config.inject.ConfigProperty; -import org.jboss.logging.Logger; import org.keycloak.admin.client.Keycloak; import org.keycloak.admin.client.resource.ClientsResource; import org.keycloak.admin.client.resource.RoleMappingResource; @@ -34,6 +32,7 @@ import io.hyperfoil.tools.horreum.svc.Roles; import io.hyperfoil.tools.horreum.svc.ServiceException; import io.quarkus.arc.lookup.LookupIfProperty; +import io.quarkus.logging.Log; /** * Implementation of {@link UserBackEnd} using an external Keycloak server. @@ -43,8 +42,6 @@ @LookupIfProperty(name = "horreum.roles.provider", stringValue = "keycloak") public class KeycloakUserBackend implements UserBackEnd { - private static final Logger LOG = Logger.getLogger(KeycloakUserBackend.class); - private static final String[] ROLE_TYPES = new String[] { "team", Roles.VIEWER, Roles.TESTER, Roles.UPLOADER, Roles.MANAGER }; @@ -115,8 +112,8 @@ public List info(List usernames) { keycloak.realm(realm).users().search(username).stream().filter(u -> username.equals(u.getUsername())) .map(KeycloakUserBackend::toUserInfo).forEach(users::add); } catch (Throwable t) { - LOG.warnv(t, "Failed to fetch info for user {0}", username); - throw ServiceException.serverError(format("Failed to fetch info for user {0}", username)); + Log.warnf(t, "Failed to fetch info for user '%s'", username); + throw ServiceException.serverError("Failed to fetch info for user " + username); } } return users; @@ -128,7 +125,7 @@ public void createUser(UserService.NewUser user) { try (Response response = keycloak.realm(realm).users().create(rep)) { if (response.getStatusInfo().getFamily() != Response.Status.Family.SUCCESSFUL) { - LOG.warnv("Failed to create new user {0}: {1}", rep.getUsername(), response.getStatusInfo()); + Log.warnf("Failed to create new user '%s': %s", rep.getUsername(), response.getStatusInfo()); if (!keycloak.realm(realm).users().search(rep.getUsername(), true).isEmpty()) { throw ServiceException.badRequest("User exists with same username"); } else if (!keycloak.realm(realm).users().searchByEmail(rep.getEmail(), true).isEmpty()) { @@ -141,7 +138,7 @@ public void createUser(UserService.NewUser user) { } catch (ServiceException se) { throw se; // thrown above, re-throw } catch (Throwable t) { - throw ServiceException.serverError(format("Failed to create new user {0}", rep.getUsername())); + throw ServiceException.serverError("Failed to create new user " + rep.getUsername()); } try { // assign the provided roles to the realm @@ -168,8 +165,8 @@ public void createUser(UserService.NewUser user) { } catch (ServiceException se) { throw se; // thrown above, re-throw } catch (Throwable t) { - LOG.warnv(t, "Unable to assign roles to new user {0}", rep.getUsername()); - throw ServiceException.serverError(format("Unable to assign roles to new user {0}", rep.getUsername())); + Log.warnf(t, "Unable to assign roles to new user '%s'", rep.getUsername()); + throw ServiceException.serverError("Unable to assign roles to new user " + rep.getUsername()); } } @@ -177,14 +174,14 @@ public void createUser(UserService.NewUser user) { public void removeUser(String username) { try (Response response = keycloak.realm(realm).users().delete(findMatchingUserId(username))) { if (response.getStatusInfo().getFamily() != Response.Status.Family.SUCCESSFUL) { - LOG.warnv("Got {0} response for removing user {0}", response.getStatusInfo(), username); - throw ServiceException.serverError(format("Unable to remove user {0}", username)); + Log.warnf("Got %s response for removing user '%s'", response.getStatusInfo(), username); + throw ServiceException.serverError("Unable to remove user " + username); } } catch (ServiceException se) { throw se; // thrown above, re-throw } catch (Throwable t) { - LOG.warnv(t, "Unable to remove user {0}", username); - throw ServiceException.serverError(format("Unable to remove user {0}", username)); + Log.warnf(t, "Unable to remove user '%s'", username); + throw ServiceException.serverError("Unable to remove user " + username); } } @@ -217,12 +214,12 @@ public List getTeams() { // get the "team roles" in the realm private String findMatchingUserId(String username) { // find the clientID of a single user List matchingUsers = keycloak.realm(realm).users().search(username, true); if (matchingUsers == null || matchingUsers.isEmpty()) { - LOG.warnv("Cannot find user with username {0}", username); - throw ServiceException.notFound(format("User {0} does not exist", username)); + Log.warnf("Cannot find user with username '%s'", username); + throw ServiceException.notFound("User " + username + " does not exist"); } else if (matchingUsers.size() > 1) { - LOG.warnv("Multiple matches for exact search for username {0}: {1}", username, + Log.warnf("Multiple matches for exact search for username '%s': %s", username, matchingUsers.stream().map(UserRepresentation::getId).collect(joining(" "))); - throw ServiceException.serverError(format("More than one user with username {0}", username)); + throw ServiceException.serverError("More than one user with username " + username); } return matchingUsers.get(0).getId(); } @@ -237,9 +234,9 @@ public Map> teamMembers(String team) { // get a list of mem keycloak.realm(realm).roles().get(prefix + role).getUserMembers(0, Integer.MAX_VALUE) .forEach(user -> userMap.computeIfAbsent(user.getUsername(), u -> new ArrayList<>()).add(role)); } catch (NotFoundException e) { - LOG.warnv("Cannot find role {0}{1} in Keycloak", prefix, role); // was there a failure when creating the team? + Log.warnf("Cannot find role '%s%s' in Keycloak", prefix, role); // was there a failure when creating the team? } catch (Throwable t) { - LOG.warnv("Error querying keycloak: {0}", t.getMessage()); + Log.warnf("Error querying keycloak: %s", t.getMessage()); throw ServiceException.serverError("Failed to retrieve role users from Keycloak"); } } @@ -258,9 +255,9 @@ public void updateTeamMembers(String team, Map> roles) { // rolesMappingResource = keycloak.realm(realm).users().get(userId).roles(); existingRoles = rolesMappingResource.realmLevel().listAll().stream().map(RoleRepresentation::getName).toList(); } catch (Throwable t) { - LOG.warnv(t, "Failed to retrieve current roles of user {0} from Keycloak", entry.getKey()); + Log.warnf(t, "Failed to retrieve current roles of user '%s'", entry.getKey()); throw ServiceException - .serverError(format("Failed to retrieve current roles of user {0} from Keycloak", entry.getKey())); + .serverError("Failed to retrieve current roles of user " + entry.getKey()); } try { // add new roles that are not in the list of current roles and then remove the existing roles that are not on the new roles @@ -277,8 +274,8 @@ public void updateTeamMembers(String team, Map> roles) { // rolesMappingResource.realmLevel().remove(rolesToRemove); } } catch (Throwable t) { - LOG.warnv(t, "Failed to modify roles of user {0}", entry.getKey()); - throw ServiceException.serverError(format("Failed to modify roles of user {0}", entry.getKey())); + Log.warnf(t, "Failed to modify roles of user '%s'", entry.getKey()); + throw ServiceException.serverError("Failed to modify roles of user " + entry.getKey()); } } @@ -294,10 +291,10 @@ public void updateTeamMembers(String team, Map> roles) { // } } } catch (NotFoundException e) { - throw ServiceException.serverError(format("The team {0} does not exist", team)); + throw ServiceException.serverError("The team " + team + " does not exist"); } catch (Throwable t) { - LOG.warnv(t, "Failed to remove all roles of team {0}", team); - throw ServiceException.serverError(format("Failed to remove all roles of team {0}", team)); + Log.warnv(t, "Failed to remove all roles of team '%s'", team); + throw ServiceException.serverError("Failed to remove all roles of team " + team); } } @@ -308,7 +305,7 @@ private RoleRepresentation ensureRole(String roleName) { keycloak.realm(realm).roles().create(new RoleRepresentation(roleName, null, false)); return keycloak.realm(realm).roles().get(roleName).toRepresentation(); } catch (Throwable t) { - throw ServiceException.serverError(format("Unable to fetch role {0}", roleName)); + throw ServiceException.serverError("Unable to fetch role " + roleName); } } @@ -344,12 +341,12 @@ private void createRole(String roleName, Set compositeRoles) { keycloak.realm(realm).roles().create(role); } catch (ClientErrorException e) { if (e.getResponse().getStatus() == Response.Status.CONFLICT.getStatusCode()) { - LOG.warnv("Registration of role {0} failed because it already exists", roleName); + Log.warnv("Registration of role '%s' failed because it already exists", roleName); } else { - throw ServiceException.serverError(format("Unable to create role {0}", roleName)); + throw ServiceException.serverError("Unable to create role " + roleName); } } catch (Throwable t) { - throw ServiceException.serverError(format("Unable to create role {0}", roleName)); + throw ServiceException.serverError("Unable to create role " + roleName); } } @@ -360,11 +357,11 @@ public void deleteTeam(String team) { // delete a team by deleting all the "team try { keycloak.realm(realm).roles().deleteRole(prefix + type); } catch (NotFoundException e) { - LOG.warnv("Role {0}{1} was not found when deleting it", prefix, type); - throw ServiceException.notFound(format("Team {0} not found", team)); + Log.warnf("Role '%s%s' was not found when deleting it", prefix, type); + throw ServiceException.notFound("Team " + team + " not found"); } catch (Throwable t) { - LOG.warnv(t, "Unable to delete team {0}", team); - throw ServiceException.serverError(format("Unable to delete team {0}", team)); + Log.warnf(t, "Unable to delete team '%s'", team); + throw ServiceException.serverError("Unable to delete team " + team); } } } @@ -375,7 +372,7 @@ public List administrators() { // get the list of all the return keycloak.realm(realm).roles().get(Roles.ADMIN).getUserMembers(0, Integer.MAX_VALUE).stream() .map(KeycloakUserBackend::toUserInfo).toList(); } catch (Throwable t) { - LOG.warnv(t, "Unable to list administrators"); + Log.warnv(t, "Unable to list administrators"); throw ServiceException .serverError("Please verify with the System Administrators that you have the correct permissions"); } @@ -394,9 +391,9 @@ public void updateAdministrators(List newAdmins) { // update the list of if (!newAdmins.contains(user.getUsername())) { try { usersResource.get(user.getId()).roles().realmLevel().remove(List.of(adminRole)); - LOG.infov("Removed administrator role from user {0}", user.getUsername()); + Log.infof("Removed administrator role from user '%s'", user.getUsername()); } catch (Throwable t) { - LOG.warnv("Could not remove admin role from user {0} due to {1}", user.getUsername(), t.getMessage()); + Log.warnf("Could not remove admin role from user '%s' due to %s", user.getUsername(), t.getMessage()); } } } @@ -405,16 +402,16 @@ public void updateAdministrators(List newAdmins) { // update the list of if (oldAdmins.stream().noneMatch(old -> username.equals(old.getUsername()))) { try { usersResource.get(findMatchingUserId(username)).roles().realmLevel().add(List.of(adminRole)); - LOG.infov("Added administrator role to user {0}", username); + Log.infof("Added administrator role to user '%s'", username); } catch (Throwable t) { - LOG.warnv("Could not add admin role to user {0} due to {1}", username, t.getMessage()); + Log.warnf("Could not add admin role to user '%s' due to %s", username, t.getMessage()); } } } } catch (ServiceException se) { throw se; // thrown above, re-throw } catch (Throwable t) { - LOG.warnv(t, "Cannot fetch representation for admin role"); + Log.warn("Cannot fetch representation for admin role", t); throw ServiceException.serverError("Cannot find admin role"); } } @@ -428,9 +425,9 @@ public void setPassword(String username, String password) { keycloak.realm(realm).users().get(findMatchingUserId(username)).resetPassword(credentials); } catch (Throwable t) { - LOG.warnv(t, "Failed to retrieve current representation of user {0} from Keycloak", username); + Log.warnf(t, "Failed to retrieve current representation of user '%s'", username); throw ServiceException - .serverError(format("Failed to retrieve current representation of user {0} from Keycloak", username)); + .serverError("Failed to retrieve current representation of user " + username); } } } diff --git a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/AlertingServiceTest.java b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/AlertingServiceTest.java index 026bd5729..a73b84b53 100644 --- a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/AlertingServiceTest.java +++ b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/AlertingServiceTest.java @@ -13,7 +13,6 @@ import jakarta.inject.Inject; -import org.jboss.logging.Logger; import org.junit.jupiter.api.TestInfo; import org.mockito.Mockito; @@ -55,7 +54,6 @@ @QuarkusTestResource(OidcWiremockTestResource.class) @TestProfile(HorreumTestProfile.class) public class AlertingServiceTest extends BaseServiceTest { - private static final Logger log = Logger.getLogger(AlertingServiceTest.class); @Inject RoleManager roleManager; diff --git a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/BaseServiceNoRestTest.java b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/BaseServiceNoRestTest.java index e17f94fc3..b57c82e4e 100644 --- a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/BaseServiceNoRestTest.java +++ b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/BaseServiceNoRestTest.java @@ -70,7 +70,7 @@ protected List createSomeSampleTests(int count, String prefix) { } List tests = new ArrayList<>(); for (int i = 0; i < count; i += 1) { - tests.add(createSampleTest(String.format("%s_%d", prefix, i), null, null, i)); + tests.add(createSampleTest("%s_%d".formatted(prefix, i), null, null, i)); } return tests; } diff --git a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/BaseServiceTest.java b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/BaseServiceTest.java index a344b1ed9..5c461ec83 100644 --- a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/BaseServiceTest.java +++ b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/BaseServiceTest.java @@ -28,7 +28,6 @@ import jakarta.ws.rs.core.MediaType; import org.hibernate.query.NativeQuery; -import org.jboss.logging.Logger; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.TestInfo; @@ -70,6 +69,7 @@ import io.hyperfoil.tools.horreum.server.CloseMe; import io.hyperfoil.tools.horreum.server.RoleManager; import io.quarkus.arc.impl.ParameterizedTypeImpl; +import io.quarkus.logging.Log; import io.restassured.RestAssured; import io.restassured.response.Response; import io.restassured.specification.RequestSpecification; @@ -87,8 +87,6 @@ public class BaseServiceTest { int lastAddedLabelId; - protected final Logger log = Logger.getLogger(getClass()); - @Inject protected EntityManager em; @@ -168,18 +166,18 @@ protected static ObjectNode runWithValueSchemas(double value, Schema... schemas) @BeforeEach public void beforeMethod(TestInfo info) { - log.debugf("Starting test %s.%s", info.getTestClass().map(Class::getSimpleName).orElse(""), + Log.debugf("Starting test %s.%s", info.getTestClass().map(Class::getSimpleName).orElse(""), info.getDisplayName()); } @AfterEach public void afterMethod(TestInfo info) { - log.debugf("Completed test %s.%s", info.getTestClass().map(Class::getSimpleName).orElse(""), + Log.debugf("Completed test %s.%s", info.getTestClass().map(Class::getSimpleName).orElse(""), info.getDisplayName()); dropAllViewsAndTests(); afterMethodCleanup.forEach(Runnable::run); afterMethodCleanup.clear(); - log.debugf("Finished cleanup of test %s.%s", info.getTestClass().map(Class::getSimpleName).orElse(""), + Log.debugf("Finished cleanup of test %s.%s", info.getTestClass().map(Class::getSimpleName).orElse(""), info.getDisplayName()); } @@ -405,7 +403,7 @@ protected List runExperiments(int datasetId) } protected Test createTest(Test test) { - log.debugf("Creating new test via /api/test: %s", test.toString()); + Log.debugf("Creating new test via /api/test: %s", test.toString()); test = jsonRequest() .body(test) @@ -414,7 +412,7 @@ protected Test createTest(Test test) { .statusCode(200) .extract().body().as(Test.class); - log.debugf("New test created via /api/test: %s", test.toString()); + Log.debugf("New test created via /api/test: %s", test.toString()); return test; } @@ -627,7 +625,7 @@ protected T withExampleDataset(Test test, JsonNode data, Function T withExampleDataset(Test test, JsonNode data, Function { try (CloseMe ignored = roleManager.withRoles(Arrays.asList(TESTER_ROLES))) { ViewDAO view = ViewDAO.find("test.id", test.id).firstResult(); - log.debugf("view is null: %b", view == null); + Log.debugf("view is null: %b", view == null); view.components.clear(); ViewComponentDAO vc1 = new ViewComponentDAO(); vc1.view = view; @@ -350,7 +351,7 @@ public void testDatasetView() { int labelA = addLabel(schemas[0], "a", null, valuePath); int labelB = addLabel(schemas[1], "b", null, valuePath); // view update should happen in the same transaction as labels update so we can use the event - log.debugf("Waiting for MessageBusChannels.DATASET_UPDATED_LABELS"); + Log.debugf("Waiting for MessageBusChannels.DATASET_UPDATED_LABELS"); BlockingQueue updateQueue = serviceMediator .getEventQueue(AsyncEventChannels.DATASET_UPDATED_LABELS, test.id); withExampleDataset(test, createABData(), ds -> { diff --git a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/SchemaServiceNoRestTest.java b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/SchemaServiceNoRestTest.java index 8a204dc40..dbbea94e3 100644 --- a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/SchemaServiceNoRestTest.java +++ b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/SchemaServiceNoRestTest.java @@ -21,7 +21,11 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import io.hyperfoil.tools.horreum.api.SortDirection; -import io.hyperfoil.tools.horreum.api.data.*; +import io.hyperfoil.tools.horreum.api.data.Access; +import io.hyperfoil.tools.horreum.api.data.Extractor; +import io.hyperfoil.tools.horreum.api.data.Label; +import io.hyperfoil.tools.horreum.api.data.Schema; +import io.hyperfoil.tools.horreum.api.data.Transformer; import io.hyperfoil.tools.horreum.api.services.SchemaService; import io.hyperfoil.tools.horreum.entity.data.LabelDAO; import io.hyperfoil.tools.horreum.entity.data.SchemaDAO; @@ -405,7 +409,7 @@ void testUpdateSchemaTransformerOfDifferentSchema() { ServiceException thrown = assertThrows(ServiceException.class, () -> schemaService.addOrUpdateTransformer(999, t)); assertTrue( - thrown.getMessage().contains(String.format("Transformer id=%d, name=%s belongs to a different schema: %d(%s)", + thrown.getMessage().contains("Transformer id=%d, name=%s belongs to a different schema: %d(%s)".formatted( t.id, t.name, s.id, s.uri))); assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), thrown.getResponse().getStatus()); } @@ -441,7 +445,7 @@ void testDeleteSchemaTransformerWithFailure() { // wrong schema id thrown = assertThrows(ServiceException.class, () -> schemaService.deleteTransformer(999, id)); - assertEquals(String.format("Transformer %s does not belong to schema 999", id), thrown.getMessage()); + assertEquals("Transformer %s does not belong to schema 999".formatted(id), thrown.getMessage()); assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), thrown.getResponse().getStatus()); } @@ -503,7 +507,7 @@ void testCreateSchemaLabelWithNotExistingId() { l.id = 999; ServiceException thrown = assertThrows(ServiceException.class, () -> schemaService.addOrUpdateLabel(s.id, l)); - assertEquals(String.format("Label %d not found", l.id), thrown.getMessage()); + assertEquals("Label %d not found".formatted(l.id), thrown.getMessage()); assertEquals(Response.Status.NOT_FOUND.getStatusCode(), thrown.getResponse().getStatus()); } @@ -562,7 +566,7 @@ void testUpdateSchemaLabelWrongSchemaId() { // update the label passing the wrong schema id l.id = id; ServiceException thrown = assertThrows(ServiceException.class, () -> schemaService.addOrUpdateLabel(999, l)); - assertEquals(String.format("Label id=%d, name=%s belongs to a different schema: %d(%s)", + assertEquals("Label id=%d, name=%s belongs to a different schema: %d(%s)".formatted( l.id, l.name, s.id, s.uri), thrown.getMessage()); assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), thrown.getResponse().getStatus()); } @@ -583,8 +587,8 @@ void testUpdateSchemaLabelNameWhenAlreadyExisting() { l.id = id; l.name = "AnotherLabel"; ServiceException thrown = assertThrows(ServiceException.class, () -> schemaService.addOrUpdateLabel(s.id, l)); - assertEquals(String.format("There is an existing label with the same name (%s) in this " + - "schema; please choose different name.", l.name), thrown.getMessage()); + assertEquals("There is an existing label with the same name (%s) in this schema; please choose different name." + .formatted(l.name), thrown.getMessage()); assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), thrown.getResponse().getStatus()); } @@ -613,11 +617,11 @@ void testDeleteSchemaLabelWithFailures() { assertEquals(1, LabelDAO.count()); ServiceException thrown = assertThrows(ServiceException.class, () -> schemaService.deleteLabel(s.id, 999)); - assertEquals(String.format("Label %d not found", 999), thrown.getMessage()); + assertEquals("Label %d not found".formatted(999), thrown.getMessage()); assertEquals(Response.Status.NOT_FOUND.getStatusCode(), thrown.getResponse().getStatus()); thrown = assertThrows(ServiceException.class, () -> schemaService.deleteLabel(999, id)); - assertEquals(String.format("Label %d does not belong to schema %d", id, 999), thrown.getMessage()); + assertEquals("Label %d does not belong to schema %d".formatted(id, 999), thrown.getMessage()); assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), thrown.getResponse().getStatus()); } diff --git a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/SlackDummyService.java b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/SlackDummyService.java index d686d086e..af85c7294 100644 --- a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/SlackDummyService.java +++ b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/SlackDummyService.java @@ -10,7 +10,6 @@ import org.apache.http.HttpStatus; import org.eclipse.microprofile.openapi.annotations.tags.Tag; -import org.jboss.logging.Logger; import org.jboss.resteasy.reactive.RestResponse; import org.jboss.resteasy.reactive.RestResponse.ResponseBuilder; @@ -18,6 +17,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; +import io.quarkus.logging.Log; + @ApplicationScoped @Path("/api/slack") @Consumes(MediaType.APPLICATION_JSON) @@ -25,7 +26,6 @@ @Tag(name = "SlackService", description = "Mock endpoint for Slack service.") public class SlackDummyService { - private static final Logger log = Logger.getLogger(SlackDummyService.class); private static boolean oneTime = true; @Inject @@ -33,20 +33,20 @@ public class SlackDummyService { @POST public RestResponse mockSlackEndpoint(JsonNode payload) { - log.infof("Received payload: %s", payload); + Log.infof("Received payload: %s", payload); ObjectNode body = mapper.createObjectNode(); ResponseBuilder response = ResponseBuilder.ok(); - log.infof("Switching on channel"); + Log.infof("Switching on channel"); switch (payload.get("channel").asText()) { case "BADCHANNEL": { - log.infof("Bad channel: returning JSON failure"); + Log.infof("Bad channel: returning JSON failure"); body.put("ok", false).put("error", "Bad channel"); response.entity(body); break; } case "ERRORCHANNEL": { - log.infof("Edge case: Slack API failure"); + Log.infof("Edge case: Slack API failure"); body.put("error", "Forced error"); response.status(HttpStatus.SC_FORBIDDEN).entity(body); break; @@ -55,21 +55,21 @@ public RestResponse mockSlackEndpoint(JsonNode payload) { // NOTE: on retry, this falls through to GOODCHANNEL if (oneTime) { oneTime = false; - log.infof("Busy channel: requesting retry"); + Log.infof("Busy channel: requesting retry"); response.status(HttpStatus.SC_TOO_MANY_REQUESTS).header("Retry-After", "1"); break; } else { - log.infof("Busy channel: redux"); + Log.infof("Busy channel: redux"); } } case "GOODCHANNEL": { - log.infof("Good channel: success"); + Log.infof("Good channel: success"); body.put("ok", true); response.entity(body); break; } } - log.infof("Returning ..."); + Log.infof("Returning ..."); return response.build(); } } diff --git a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/TestServiceNoRestTest.java b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/TestServiceNoRestTest.java index 2cc831b18..14791487f 100644 --- a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/TestServiceNoRestTest.java +++ b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/TestServiceNoRestTest.java @@ -121,7 +121,7 @@ void testCreateTestWithMissingRole() { Test t = createSampleTest("test", "missing-role", null, null); ServiceException thrown = assertThrows(ServiceException.class, () -> testService.add(t)); - assertEquals(String.format("This user does not have the %s role!", t.owner), thrown.getMessage()); + assertEquals("This user does not have the %s role!".formatted(t.owner), thrown.getMessage()); assertEquals(Response.Status.FORBIDDEN.getStatusCode(), thrown.getResponse().getStatus()); } diff --git a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/test/HorreumKeycloakTestResourceLifecycleManager.java b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/test/HorreumKeycloakTestResourceLifecycleManager.java index 8c257dca5..97f56535b 100644 --- a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/test/HorreumKeycloakTestResourceLifecycleManager.java +++ b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/test/HorreumKeycloakTestResourceLifecycleManager.java @@ -54,7 +54,7 @@ public Map start() { managementRolesResource.add( managementRolesResource.listAvailable().stream().filter(r -> "realm-admin".equals(r.getName())).toList()); - Log.infov("realm-admin role added to {0} client", KEYCLOAK_SERVICE_CLIENT); + Log.infof("realm-admin role added to %s client", KEYCLOAK_SERVICE_CLIENT); } return properties; diff --git a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/test/TestUtil.java b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/test/TestUtil.java index 13a6b7951..0e2c526bf 100644 --- a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/test/TestUtil.java +++ b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/test/TestUtil.java @@ -7,12 +7,11 @@ import java.util.concurrent.TimeUnit; import java.util.function.BooleanSupplier; -import org.jboss.logging.Logger; - import com.fasterxml.jackson.databind.JsonNode; +import io.quarkus.logging.Log; + public final class TestUtil { - private static final Logger log = Logger.getLogger(TestUtil.class); private TestUtil() { } @@ -31,7 +30,7 @@ public static void eventually(Runnable test) { test.run(); return; } catch (AssertionError e) { - log.debug("Ignoring failed assertion", e); + Log.debug("Ignoring failed assertion", e); } try { Thread.sleep(10); diff --git a/horreum-integration-tests/src/test/java/io/hyperfoil/tools/horreum/it/ItResource.java b/horreum-integration-tests/src/test/java/io/hyperfoil/tools/horreum/it/ItResource.java index c13a16f17..9e10c37be 100644 --- a/horreum-integration-tests/src/test/java/io/hyperfoil/tools/horreum/it/ItResource.java +++ b/horreum-integration-tests/src/test/java/io/hyperfoil/tools/horreum/it/ItResource.java @@ -26,15 +26,14 @@ import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; -import org.jboss.logging.Logger; import io.hyperfoil.tools.horreum.infra.common.SelfSignedCert; +import io.quarkus.logging.Log; import io.quarkus.test.common.QuarkusTestResourceLifecycleManager; public class ItResource implements QuarkusTestResourceLifecycleManager { - private static final Logger log = Logger.getLogger(ItResource.class); - private static boolean started = false; + private static boolean started; public static String HORREUM_BOOTSTRAP_PASSWORD = "horreum.secret"; @@ -42,10 +41,9 @@ public class ItResource implements QuarkusTestResourceLifecycleManager { public Map start() { synchronized (ItResource.class) { if (!started) { - log.info("Starting Horreum IT resources"); + Log.info("Starting Horreum IT resources"); started = true; try { - String keycloakImage = getProperty(HORREUM_DEV_KEYCLOAK_IMAGE); String postgresImage = getProperty(HORREUM_DEV_POSTGRES_IMAGE); @@ -78,7 +76,7 @@ public Map start() { config.getOptionalValue("quarkus.http.host", String.class).orElse("localhost"))); return startContainers(containerArgs); } catch (Exception e) { - log.fatal("Could not start Horreum services", e); + Log.fatal("Could not start Horreum services", e); stopContainers(); throw new RuntimeException("Could not start Horreum services", e); } @@ -91,14 +89,13 @@ public Map start() { public void stop() { synchronized (ItResource.class) { try { - log.info("Stopping Horreum IT resources"); + Log.info("Stopping Horreum IT resources"); stopContainers(); started = false; } catch (Exception e) { throw new RuntimeException(e); } } - } } diff --git a/infra/horreum-dev-services/deployment/src/main/java/io/hyperfoil/tools/horreum/dev/services/deployment/HorreumDevServicesProcessor.java b/infra/horreum-dev-services/deployment/src/main/java/io/hyperfoil/tools/horreum/dev/services/deployment/HorreumDevServicesProcessor.java index 9f41a10da..f48688c9c 100644 --- a/infra/horreum-dev-services/deployment/src/main/java/io/hyperfoil/tools/horreum/dev/services/deployment/HorreumDevServicesProcessor.java +++ b/infra/horreum-dev-services/deployment/src/main/java/io/hyperfoil/tools/horreum/dev/services/deployment/HorreumDevServicesProcessor.java @@ -12,7 +12,6 @@ import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; -import org.jboss.logging.Logger; import io.hyperfoil.tools.horreum.dev.services.deployment.config.DevServicesConfig; import io.hyperfoil.tools.horreum.infra.common.HorreumResources; @@ -27,13 +26,12 @@ import io.quarkus.deployment.console.StartupLogCompressor; import io.quarkus.deployment.dev.devservices.GlobalDevServicesConfig; import io.quarkus.deployment.logging.LoggingSetupBuildItem; +import io.quarkus.logging.Log; @BuildSteps(onlyIfNot = IsNormal.class, onlyIf = { HorreumDevServicesProcessor.IsEnabled.class, GlobalDevServicesConfig.Enabled.class }) public class HorreumDevServicesProcessor { - private static final Logger LOG = Logger.getLogger(HorreumDevServicesProcessor.class); - private static volatile DevServicesResultBuildItem.RunningDevService horreumKeycloakDevService; private static volatile DevServicesResultBuildItem.RunningDevService horreumPostgresDevService; @@ -56,13 +54,13 @@ public void startHorreumContainers( boolean errors = false; - LOG.infof("Horreum dev services (enabled: ".concat(Boolean.toString(horreumBuildTimeConfig.enabled())).concat(")")); + Log.infof("Horreum dev services (enabled: %B)", horreumBuildTimeConfig.enabled()); if (horreumBuildTimeConfig.enabled()) { try { if (errors = !dockerStatusBuildItem.isContainerRuntimeAvailable()) { - LOG.warn("Docker dev service instance not found"); + Log.warn("Docker dev service instance not found"); } if (!errors) { @@ -70,7 +68,7 @@ public void startHorreumContainers( //TODO:: check to see if devServicesConfiguration has changed if (horreumKeycloakDevService == null || horreumPostgresDevService == null) { - LOG.infof("Starting Horreum containers"); + Log.info("Starting Horreum containers"); final Map containerArgs = new HashMap<>(); containerArgs.put(HORREUM_DEV_KEYCLOAK_ENABLED, @@ -172,14 +170,14 @@ public void startHorreumContainers( try { horreumKeycloakDevService.close(); } catch (Throwable t) { - LOG.error("Failed to stop Keycloak container", t); + Log.error("Failed to stop Keycloak container", t); } } if (horreumPostgresDevService != null) { try { horreumPostgresDevService.close(); } catch (Throwable t) { - LOG.error("Failed to stop Postgres container", t); + Log.error("Failed to stop Postgres container", t); } } horreumKeycloakDevService = null; diff --git a/infra/horreum-infra-common/src/main/java/io/hyperfoil/tools/horreum/infra/common/resources/HorreumResource.java b/infra/horreum-infra-common/src/main/java/io/hyperfoil/tools/horreum/infra/common/resources/HorreumResource.java index 3804e3686..6c41c4cc9 100644 --- a/infra/horreum-infra-common/src/main/java/io/hyperfoil/tools/horreum/infra/common/resources/HorreumResource.java +++ b/infra/horreum-infra-common/src/main/java/io/hyperfoil/tools/horreum/infra/common/resources/HorreumResource.java @@ -44,7 +44,7 @@ public void init(Map initArgs) { jdbcUrl = "jdbc:postgresql://".concat(postgresNetworkAlias).concat(":5432/horreum?loggerLevel=OFF"); } - String keycloakUrl = String.format("%s/realms/horreum", keycloakHostUrl); + String keycloakUrl = keycloakHostUrl + "/realms/horreum"; String horreumUrl = "http://" + networkAlias + ":8081"; horreumContainer