From 95890d95ca820405138a4c6500ec085aebf2e41e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lu=C3=ADs=20Serralheiro?= Date: Mon, 13 May 2024 17:47:08 +0100 Subject: [PATCH] Fixing compilation errors and adding tests --- .../DefaultJdbcRepositoryOperations.java | 27 ++--- .../DefaultR2dbcRepositoryOperations.java | 23 ++-- .../sql/AbstractSqlRepositoryOperations.java | 10 +- .../sql/LoggingSqlExecutionObserver.java | 7 ++ .../internal/sql/SqlExecutionObserver.java | 2 + .../data/tck/TestSqlExecutionObserver.groovy | 39 ++++++ .../tck/tests/AbstractRepositorySpec.groovy | 114 +++++++++++++++--- 7 files changed, 177 insertions(+), 45 deletions(-) create mode 100644 data-tck/src/main/groovy/io/micronaut/data/tck/TestSqlExecutionObserver.groovy diff --git a/data-jdbc/src/main/java/io/micronaut/data/jdbc/operations/DefaultJdbcRepositoryOperations.java b/data-jdbc/src/main/java/io/micronaut/data/jdbc/operations/DefaultJdbcRepositoryOperations.java index 7bd6b050707..ecb120c570d 100644 --- a/data-jdbc/src/main/java/io/micronaut/data/jdbc/operations/DefaultJdbcRepositoryOperations.java +++ b/data-jdbc/src/main/java/io/micronaut/data/jdbc/operations/DefaultJdbcRepositoryOperations.java @@ -88,10 +88,7 @@ import io.micronaut.data.runtime.operations.internal.OperationContext; import io.micronaut.data.runtime.operations.internal.SyncCascadeOperations; import io.micronaut.data.runtime.operations.internal.query.BindableParametersStoredQuery; -import io.micronaut.data.runtime.operations.internal.sql.AbstractSqlRepositoryOperations; -import io.micronaut.data.runtime.operations.internal.sql.SqlJsonColumnMapperProvider; -import io.micronaut.data.runtime.operations.internal.sql.SqlPreparedQuery; -import io.micronaut.data.runtime.operations.internal.sql.SqlStoredQuery; +import io.micronaut.data.runtime.operations.internal.sql.*; import io.micronaut.data.runtime.support.AbstractConversionContext; import io.micronaut.json.JsonMapper; import io.micronaut.transaction.TransactionOperations; @@ -198,7 +195,8 @@ public final class DefaultJdbcRepositoryOperations extends AbstractSqlRepository JdbcSchemaHandler schemaHandler, @Nullable JsonMapper jsonMapper, SqlJsonColumnMapperProvider sqlJsonColumnMapperProvider, - List sqlExceptionMapperList) { + List sqlExceptionMapperList, + List observers) { super( dataSourceName, new ColumnNameResultSetReader(conversionService), @@ -210,7 +208,8 @@ public final class DefaultJdbcRepositoryOperations extends AbstractSqlRepository conversionService, attributeConverterRegistry, jsonMapper, - sqlJsonColumnMapperProvider); + sqlJsonColumnMapperProvider, + observers); this.schemaTenantResolver = schemaTenantResolver; this.schemaHandler = schemaHandler; this.connectionOperations = connectionOperations; @@ -538,8 +537,8 @@ public Optional executeUpdate(@NonNull PreparedQuery pq) { try (PreparedStatement ps = prepareStatement(connection::prepareStatement, preparedQuery, true, false)) { preparedQuery.bindParameters(new JdbcParameterBinder(connection, ps, preparedQuery)); int result = ps.executeUpdate(); - if (QUERY_LOG.isTraceEnabled()) { - QUERY_LOG.trace("Update operation updated {} records", result); + for (SqlExecutionObserver observer : observers) { + observer.updatedRecords(result); } if (preparedQuery.isOptimisticLock()) { checkOptimisticLocking(1, result); @@ -847,8 +846,8 @@ public R execute(@NonNull ConnectionCallback callback) { public R prepareStatement(@NonNull String sql, @NonNull PreparedStatementCallback callback) { ArgumentUtils.requireNonNull("sql", sql); ArgumentUtils.requireNonNull("callback", callback); - if (QUERY_LOG.isDebugEnabled()) { - QUERY_LOG.debug("Executing Query: {}", sql); + for (SqlExecutionObserver observer : observers) { + observer.query(sql); } ConnectionContext connectionCtx = getConnectionCtx(); try { @@ -1169,8 +1168,8 @@ private PreparedStatement prepare(Connection connection, SqlStoredQuery st @Override protected void execute() throws SQLException { - if (QUERY_LOG.isDebugEnabled()) { - QUERY_LOG.debug("Executing SQL query: {}", storedQuery.getQuery()); + for (SqlExecutionObserver observer : observers) { + observer.query(storedQuery.getQuery()); } try { if (storedQuery.getOperationType() == StoredQuery.OperationType.INSERT_RETURNING @@ -1292,8 +1291,8 @@ private void setParameters(PreparedStatement stmt, SqlStoredQuery storedQu @Override protected void execute() { - if (QUERY_LOG.isDebugEnabled()) { - QUERY_LOG.debug("Executing SQL query: {}", storedQuery.getQuery()); + for (SqlExecutionObserver observer : observers) { + observer.query(storedQuery.getQuery()); } if (storedQuery.getOperationType() == StoredQuery.OperationType.INSERT_RETURNING || storedQuery.getOperationType() == StoredQuery.OperationType.UPDATE_RETURNING) { diff --git a/data-r2dbc/src/main/java/io/micronaut/data/r2dbc/operations/DefaultR2dbcRepositoryOperations.java b/data-r2dbc/src/main/java/io/micronaut/data/r2dbc/operations/DefaultR2dbcRepositoryOperations.java index 6fbc82f2616..c35f9ddb612 100644 --- a/data-r2dbc/src/main/java/io/micronaut/data/r2dbc/operations/DefaultR2dbcRepositoryOperations.java +++ b/data-r2dbc/src/main/java/io/micronaut/data/r2dbc/operations/DefaultR2dbcRepositoryOperations.java @@ -77,10 +77,7 @@ import io.micronaut.data.runtime.operations.internal.OperationContext; import io.micronaut.data.runtime.operations.internal.ReactiveCascadeOperations; import io.micronaut.data.runtime.operations.internal.query.BindableParametersStoredQuery; -import io.micronaut.data.runtime.operations.internal.sql.AbstractSqlRepositoryOperations; -import io.micronaut.data.runtime.operations.internal.sql.SqlJsonColumnMapperProvider; -import io.micronaut.data.runtime.operations.internal.sql.SqlPreparedQuery; -import io.micronaut.data.runtime.operations.internal.sql.SqlStoredQuery; +import io.micronaut.data.runtime.operations.internal.sql.*; import io.micronaut.data.runtime.support.AbstractConversionContext; import io.micronaut.json.JsonMapper; import io.micronaut.transaction.exceptions.TransactionSystemException; @@ -188,7 +185,8 @@ final class DefaultR2dbcRepositoryOperations extends AbstractSqlRepositoryOperat SqlJsonColumnMapperProvider sqlJsonColumnMapperProvider, List r2dbcExceptionMapperList, @Parameter R2dbcReactorTransactionOperations transactionOperations, - @Parameter ReactorConnectionOperations connectionOperations) { + @Parameter ReactorConnectionOperations connectionOperations, + List observers) { super( dataSourceName, new ColumnNameR2dbcResultReader(conversionService), @@ -200,7 +198,8 @@ final class DefaultR2dbcRepositoryOperations extends AbstractSqlRepositoryOperat conversionService, attributeConverterRegistry, jsonMapper, - sqlJsonColumnMapperProvider); + sqlJsonColumnMapperProvider, + observers); this.connectionFactory = connectionFactory; this.ioExecutorService = executorService; this.schemaTenantResolver = schemaTenantResolver; @@ -545,8 +544,8 @@ public Mono executeUpdate(@NonNull PreparedQuery pq) { preparedQuery.bindParameters(new R2dbcParameterBinder(connection, statement, preparedQuery)); return executeAndGetRowsUpdatedSingle(statement, dialect) .flatMap((Number rowsUpdated) -> { - if (QUERY_LOG.isTraceEnabled()) { - QUERY_LOG.trace("Update operation updated {} records", rowsUpdated); + for (SqlExecutionObserver observer : observers) { + observer.updatedRecords(rowsUpdated); } if (preparedQuery.isOptimisticLock()) { checkOptimisticLocking(1, rowsUpdated); @@ -950,8 +949,8 @@ private Mono executeAndMapEachRowSingle(Statement statement, Dialect dial @Override protected void execute() throws RuntimeException { - if (QUERY_LOG.isDebugEnabled()) { - QUERY_LOG.debug("Executing SQL query: {}", storedQuery.getQuery()); + for (SqlExecutionObserver observer : observers) { + observer.query(storedQuery.getQuery()); } Statement statement = prepare(ctx.connection); setParameters(statement, storedQuery); @@ -1045,8 +1044,8 @@ private void setParameters(Statement stmt, SqlStoredQuery storedQuery) { @Override protected void execute() throws RuntimeException { - if (QUERY_LOG.isDebugEnabled()) { - QUERY_LOG.debug("Executing SQL query: {}", storedQuery.getQuery()); + for (SqlExecutionObserver observer : observers) { + observer.query(storedQuery.getQuery()); } Statement statement; if (hasGeneratedId) { diff --git a/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/AbstractSqlRepositoryOperations.java b/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/AbstractSqlRepositoryOperations.java index 3754f1949b9..189ec341ecf 100644 --- a/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/AbstractSqlRepositoryOperations.java +++ b/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/AbstractSqlRepositoryOperations.java @@ -116,7 +116,7 @@ public abstract class AbstractSqlRepositoryOperations entityInserts = new ConcurrentHashMap<>(10); private final Map entityUpdates = new ConcurrentHashMap<>(10); private final Map associationInserts = new ConcurrentHashMap<>(10); - private final List listeners; + protected final List observers; /** * Default constructor. @@ -145,7 +145,7 @@ protected AbstractSqlRepositoryOperations( AttributeConverterRegistry attributeConverterRegistry, JsonMapper jsonMapper, SqlJsonColumnMapperProvider sqlJsonColumnMapperProvider, - List listeners) { + List observers) { super(dateTimeProvider, runtimeEntityRegistry, conversionService, attributeConverterRegistry); this.dataSourceName = dataSourceName; this.columnNameResultSetReader = columnNameResultSetReader; @@ -153,7 +153,7 @@ protected AbstractSqlRepositoryOperations( this.preparedStatementWriter = preparedStatementWriter; this.jsonMapper = jsonMapper; this.sqlJsonColumnMapperProvider = sqlJsonColumnMapperProvider; - this.listeners = listeners; + this.observers = observers; Collection> beanDefinitions = beanContext .getBeanDefinitions(Object.class, Qualifiers.byStereotype(Repository.class)); for (BeanDefinition beanDefinition : beanDefinitions) { @@ -203,7 +203,7 @@ protected PS prepareStatement(StatementSupplier statementFunction, } String query = sqlPreparedQuery.getQuery(); - listeners.forEach(listener -> listener.query(query)); + observers.forEach(listener -> listener.query(query)); final PS ps; try { ps = statementFunction.create(query); @@ -251,7 +251,7 @@ protected void setStatementParameter(PS preparedStatement, int index, DataType d dataType = dialect.getDataType(dataType); - for (SqlExecutionObserver listener : listeners) { + for (SqlExecutionObserver listener : observers) { listener.parameter(index, value, dataType); } diff --git a/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/LoggingSqlExecutionObserver.java b/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/LoggingSqlExecutionObserver.java index ce32c37658b..52820f9e51b 100644 --- a/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/LoggingSqlExecutionObserver.java +++ b/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/LoggingSqlExecutionObserver.java @@ -38,4 +38,11 @@ public void parameter(int index, Object value, DataType dataType) { QUERY_LOG.trace("Binding parameter at position {} to value {} with data type: {}", index, value, dataType); } } + + @Override + public void updatedRecords(Number result) { + if (QUERY_LOG.isTraceEnabled()) { + QUERY_LOG.trace("Update operation updated {} records", result); + } + } } diff --git a/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/SqlExecutionObserver.java b/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/SqlExecutionObserver.java index e28ef34314a..651f451a95b 100644 --- a/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/SqlExecutionObserver.java +++ b/data-runtime/src/main/java/io/micronaut/data/runtime/operations/internal/sql/SqlExecutionObserver.java @@ -22,4 +22,6 @@ public interface SqlExecutionObserver { void query(String query); void parameter(int index, Object value, DataType datatype); + + void updatedRecords(Number result); } diff --git a/data-tck/src/main/groovy/io/micronaut/data/tck/TestSqlExecutionObserver.groovy b/data-tck/src/main/groovy/io/micronaut/data/tck/TestSqlExecutionObserver.groovy new file mode 100644 index 00000000000..9e0066e5bdb --- /dev/null +++ b/data-tck/src/main/groovy/io/micronaut/data/tck/TestSqlExecutionObserver.groovy @@ -0,0 +1,39 @@ +package io.micronaut.data.tck + +import io.micronaut.data.model.DataType +import io.micronaut.data.runtime.operations.internal.sql.SqlExecutionObserver +import jakarta.inject.Singleton + +@Singleton +class TestSqlExecutionObserver implements SqlExecutionObserver { + public List invocations = new ArrayList<>() + + @Override + void query(String query) { + invocations.add(new Invocation(query)) + } + + @Override + void parameter(int index, Object value, DataType datatype) { + invocations.last().parameters[index] = value + } + + @Override + void updatedRecords(Number result) { + invocations.last().affected = result + } + + void clear() { + invocations.clear() + } + + class Invocation { + String query + Map parameters = [:] + Number affected + + Invocation(String query) { + this.query = query + } + } +} diff --git a/data-tck/src/main/groovy/io/micronaut/data/tck/tests/AbstractRepositorySpec.groovy b/data-tck/src/main/groovy/io/micronaut/data/tck/tests/AbstractRepositorySpec.groovy index fe2e7f6194f..8e1cd79c778 100644 --- a/data-tck/src/main/groovy/io/micronaut/data/tck/tests/AbstractRepositorySpec.groovy +++ b/data-tck/src/main/groovy/io/micronaut/data/tck/tests/AbstractRepositorySpec.groovy @@ -25,6 +25,7 @@ import io.micronaut.data.repository.jpa.criteria.DeleteSpecification import io.micronaut.data.repository.jpa.criteria.PredicateSpecification import io.micronaut.data.repository.jpa.criteria.QuerySpecification import io.micronaut.data.repository.jpa.criteria.UpdateSpecification +import io.micronaut.data.tck.TestSqlExecutionObserver import io.micronaut.data.tck.entities.Author import io.micronaut.data.tck.entities.AuthorBooksDto import io.micronaut.data.tck.entities.AuthorDtoWithBookDtos @@ -60,27 +61,16 @@ import jakarta.persistence.criteria.CriteriaBuilder import jakarta.persistence.criteria.CriteriaUpdate import jakarta.persistence.criteria.Predicate import jakarta.persistence.criteria.Root -import spock.lang.AutoCleanup -import spock.lang.IgnoreIf -import spock.lang.Shared -import spock.lang.Specification -import spock.lang.Unroll +import spock.lang.* import java.sql.Connection import java.time.LocalDate import java.time.ZoneId import java.util.concurrent.CountDownLatch import java.util.concurrent.TimeUnit -import java.util.stream.Collectors -import static io.micronaut.data.tck.repositories.BookSpecifications.hasChapter -import static io.micronaut.data.tck.repositories.BookSpecifications.titleEquals -import static io.micronaut.data.tck.repositories.BookSpecifications.titleEqualsWithJoin -import static io.micronaut.data.tck.repositories.PersonRepository.Specifications.distinct -import static io.micronaut.data.tck.repositories.PersonRepository.Specifications.idsIn -import static io.micronaut.data.tck.repositories.PersonRepository.Specifications.nameEquals -import static io.micronaut.data.tck.repositories.PersonRepository.Specifications.setIncome -import static io.micronaut.data.tck.repositories.PersonRepository.Specifications.setName +import static io.micronaut.data.tck.repositories.BookSpecifications.* +import static io.micronaut.data.tck.repositories.PersonRepository.Specifications.* abstract class AbstractRepositorySpec extends Specification { @@ -118,6 +108,9 @@ abstract class AbstractRepositorySpec extends Specification { @Shared Optional> transactionManager = context.findBean(SynchronousTransactionManager) + @Shared + TestSqlExecutionObserver observer = context.getBean(TestSqlExecutionObserver) + ApplicationContext getApplicationContext() { return context } @@ -2802,6 +2795,99 @@ abstract class AbstractRepositorySpec extends Specification { entityWithIdClass2Repository.deleteAll() } + void "observer receives inserts"() { + given: + observer.clear() + + when: + bookRepository.save(new Book(title: "Anonymous", totalPages: 400)) + + then: + observer.invocations.size() == 1 + observer.invocations.get(0).query =~ /(?i)insert\s+into\s+.*/ + observer.invocations.get(0).parameters[3] == "Anonymous" + observer.invocations.get(0).parameters[4] == 400 + + cleanup: + cleanupData() + } + + void "observer receives query"() { + given: + observer.clear() + + when: + bookRepository.findById(1) + + then: + observer.invocations.size() == 1 + observer.invocations.get(0).query =~ /(?i)select\s+.*\s+from\s+.book.\s+.*/ + observer.invocations.get(0).parameters == [1: 1] + + + cleanup: + cleanupData() + } + + void "observer receives update"() { + given: + setupBooks() + def book = bookRepository.findAllByTitleStartingWith("Along Came a Spider").first() + def author = authorRepository.searchByName("Stephen King") + observer.clear() + + when: + bookRepository.updateAuthor(book.id, author) + + then: + observer.invocations.size() == 1 + observer.invocations[0].query =~ /(?i)update\s+.book.\s+.*/ + observer.invocations[0].parameters[1] == author.id + observer.invocations[0].parameters[3] == book.id + observer.invocations[0].affected == 1 + + cleanup: + cleanupData() + } + + void "observer receives delete"() { + given: + setupBooks() + def book = bookRepository.findAllByTitleStartingWith("Along Came a Spider").first() + observer.clear() + + when: + bookRepository.delete(book) + + then: + observer.invocations.size() == 1 + observer.invocations[0].query =~ /(?i)delete\s+from\s+.book.\s+.*/ + observer.invocations[0].parameters == [1: book.id] + + cleanup: + cleanupData() + } + + void "observer receives @Query"(){ + given: + saveSampleBooks() + observer.clear() + + when: + def book = bookDtoRepository.findByTitleWithQuery("The Stand") + + then: + book.isPresent() + book.get().title == "The Stand" + + observer.invocations.size() == 1 + observer.invocations[0].query =~ /select \* from book b where b.title = .*/ + observer.invocations[0].parameters == [1: "The Stand"] + + cleanup: + cleanupData() + } + private GregorianCalendar getYearMonthDay(Date dateCreated) { def cal = dateCreated.toCalendar() def localDate = LocalDate.of(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH) + 1, cal.get(Calendar.DAY_OF_MONTH))