Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cache pagination: add FieldNameGenerator and EmbeddedFieldsProvider #5772

Merged
merged 3 commits into from
Apr 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,9 @@ class ResolverContext(
val variables: Executable.Variables,
val parent: Map<String, @JvmSuppressWildcards Any?>,
val parentId: String,
val parentType: String,
val cacheHeaders: CacheHeaders,
val fieldNameGenerator: FieldNameGenerator,
)

/**
Expand Down Expand Up @@ -119,6 +121,19 @@ object DefaultCacheResolver : CacheResolver {
}
}

/**
* An [ApolloResolver] that uses the parent to resolve fields.
*/
object DefaultApolloResolver : ApolloResolver {
override fun resolveField(context: ResolverContext): Any? {
val name = context.fieldNameGenerator.getFieldName(FieldNameContext(context.parentType, context.field, context.variables))
if (!context.parent.containsKey(name)) {
throw CacheMissException(context.parentId, name)
}

return context.parent[name]
}
}

/**
* A cache resolver that uses the cache date as a receive date and expires after a fixed max age
Expand Down Expand Up @@ -159,7 +174,7 @@ class ReceiveDateApolloResolver(private val maxAge: Int) : ApolloResolver {
* A cache resolver that uses the cache date as an expiration date and expires past it
*/
@ApolloExperimental
class ExpireDateCacheResolver() : CacheResolver {
class ExpireDateCacheResolver : CacheResolver {
/**
* @param parent a [Map] that represent the object containing this field. The map values can have the same types as the ones in [Record]
*/
Expand Down Expand Up @@ -208,12 +223,16 @@ object FieldPolicyCacheResolver : CacheResolver {
}

/**
* A [ApolloResolver] that uses @fieldPolicy annotations to resolve fields and delegates to [DefaultCacheResolver] else
* A [ApolloResolver] that uses @fieldPolicy annotations to resolve fields and delegates to [DefaultApolloResolver] else
*/
object FieldPolicyApolloResolver : ApolloResolver {
override fun resolveField(context: ResolverContext): Any? {
return FieldPolicyCacheResolver.resolveField(context.field, context.variables, context.parent, context.parentId)
}
}
val keyArgsValues = context.field.argumentValues(context.variables) { it.isKey }.values.map { it.toString() }

if (keyArgsValues.isNotEmpty()) {
return CacheKey(context.field.type.rawType().name, keyArgsValues)
}

return DefaultApolloResolver.resolveField(context)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
package com.apollographql.apollo3.cache.normalized.api

import com.apollographql.apollo3.annotations.ApolloExperimental
import com.apollographql.apollo3.api.CompiledNamedType
import com.apollographql.apollo3.api.InterfaceType
import com.apollographql.apollo3.api.ObjectType

@ApolloExperimental
interface EmbeddedFieldsProvider {
fun getEmbeddedFields(context: EmbeddedFieldsContext): List<String>
Comment on lines +9 to +10
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We could make this even more typesafe and also save a few string comparisons

interface CompiledFieldDefinition

// type
class Query {
  // field
  class Reviews {
    companion object {
      // argument
      val episode = CompiledArgumentDefinition.Builder("episode").build()
      val starsFloat = CompiledArgumentDefinition.Builder("starsFloat").build()
      
      val __definition = object: CompiledFieldDefinition {} 
    }
  }
  companion object {
    val type: ObjectType = ObjectType.Builder(name = "Query").build()
  }
}

And then

interface EmbeddedFieldsProvider {
  fun getEmbeddedFields(context: EmbeddedFieldsContext): List<CompiledFieldDefinition>

Not sure if it's worth it though...

}

@ApolloExperimental
class EmbeddedFieldsContext(
val parentType: CompiledNamedType,
)

@ApolloExperimental
object DefaultEmbeddedFieldsProvider : EmbeddedFieldsProvider {
override fun getEmbeddedFields(context: EmbeddedFieldsContext): List<String> {
return context.parentType.embeddedFields
}
}

private val CompiledNamedType.embeddedFields: List<String>
get() = when (this) {
is ObjectType -> embeddedFields
is InterfaceType -> embeddedFields
else -> emptyList()
}

@ApolloExperimental
class ConnectionEmbeddedFieldsProvider(
connectionFields: Map<String, List<String>>,
connectionTypes: Set<String>,
) : EmbeddedFieldsProvider {
companion object {
private val connectionFieldsToEmbed = listOf("pageInfo", "edges")
}

private val embeddedFields = connectionFields + connectionTypes.associateWith { connectionFieldsToEmbed }

override fun getEmbeddedFields(context: EmbeddedFieldsContext): List<String> {
return embeddedFields[context.parentType.name].orEmpty()
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
package com.apollographql.apollo3.cache.normalized.api

import com.apollographql.apollo3.annotations.ApolloExperimental
import com.apollographql.apollo3.api.CompiledField
import com.apollographql.apollo3.api.Executable

@ApolloExperimental
interface FieldNameGenerator {
fun getFieldName(context: FieldNameContext): String
}

@ApolloExperimental
class FieldNameContext(
val parentType: String,
val field: CompiledField,
val variables: Executable.Variables,
)

@ApolloExperimental
object DefaultFieldNameGenerator : FieldNameGenerator {
override fun getFieldName(context: FieldNameContext): String {
return context.field.nameWithArguments(context.variables)
}
}

@ApolloExperimental
class ConnectionFieldNameGenerator(private val connectionFields: Map<String, List<String>>) : FieldNameGenerator {
companion object {
private val paginationArguments = setOf("first", "last", "before", "after")
}

override fun getFieldName(context: FieldNameContext): String {
return if (context.field.name in connectionFields[context.parentType].orEmpty()) {
context.field.newBuilder()
.arguments(
context.field.arguments.filter { argument ->
argument.name !in paginationArguments
}
)
.build()
.nameWithArguments(context.variables)
} else {
DefaultFieldNameGenerator.getFieldName(context)
}
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package com.apollographql.apollo3.cache.normalized.api

import com.apollographql.apollo3.annotations.ApolloExperimental
import com.apollographql.apollo3.annotations.ApolloInternal
import com.apollographql.apollo3.api.Adapter
import com.apollographql.apollo3.api.CustomScalarAdapters
Expand All @@ -11,96 +10,71 @@ import com.apollographql.apollo3.api.json.MapJsonWriter
import com.apollographql.apollo3.api.variables
import com.apollographql.apollo3.cache.normalized.api.internal.CacheBatchReader
import com.apollographql.apollo3.cache.normalized.api.internal.Normalizer
import kotlin.jvm.JvmOverloads

fun <D : Operation.Data> Operation<D>.normalize(
data: D,
customScalarAdapters: CustomScalarAdapters,
cacheKeyGenerator: CacheKeyGenerator,
) = normalize(data, customScalarAdapters, cacheKeyGenerator, EmptyMetadataGenerator, CacheKey.rootKey().key)
metadataGenerator: MetadataGenerator = EmptyMetadataGenerator,
fieldNameGenerator: FieldNameGenerator = DefaultFieldNameGenerator,
embeddedFieldsProvider: EmbeddedFieldsProvider = DefaultEmbeddedFieldsProvider,
) = normalize(data, customScalarAdapters, cacheKeyGenerator, metadataGenerator, fieldNameGenerator, embeddedFieldsProvider, CacheKey.rootKey().key)

@ApolloExperimental
fun <D : Operation.Data> Operation<D>.normalize(
data: D,
customScalarAdapters: CustomScalarAdapters,
cacheKeyGenerator: CacheKeyGenerator,
metadataGenerator: MetadataGenerator,
) = normalize(data, customScalarAdapters, cacheKeyGenerator, metadataGenerator, CacheKey.rootKey().key)


@Suppress("UNCHECKED_CAST")
fun <D : Executable.Data> Executable<D>.normalize(
data: D,
customScalarAdapters: CustomScalarAdapters,
cacheKeyGenerator: CacheKeyGenerator,
rootKey: String,
): Map<String, Record> {
val writer = MapJsonWriter()
adapter().toJson(writer, customScalarAdapters, data)
val variables = variables(customScalarAdapters, true)
return Normalizer(variables, rootKey, cacheKeyGenerator, EmptyMetadataGenerator)
.normalize(writer.root() as Map<String, Any?>, rootField().selections, rootField().type.rawType())
}

@ApolloExperimental
@Suppress("UNCHECKED_CAST")
fun <D : Executable.Data> Executable<D>.normalize(
data: D,
customScalarAdapters: CustomScalarAdapters,
cacheKeyGenerator: CacheKeyGenerator,
metadataGenerator: MetadataGenerator,
metadataGenerator: MetadataGenerator = EmptyMetadataGenerator,
fieldNameGenerator: FieldNameGenerator = DefaultFieldNameGenerator,
embeddedFieldsProvider: EmbeddedFieldsProvider = DefaultEmbeddedFieldsProvider,
rootKey: String,
): Map<String, Record> {
val writer = MapJsonWriter()
adapter().toJson(writer, customScalarAdapters, data)
val variables = variables(customScalarAdapters)
return Normalizer(variables, rootKey, cacheKeyGenerator, metadataGenerator)
@Suppress("UNCHECKED_CAST")
return Normalizer(variables, rootKey, cacheKeyGenerator, metadataGenerator, fieldNameGenerator, embeddedFieldsProvider)
.normalize(writer.root() as Map<String, Any?>, rootField().selections, rootField().type.rawType())
}

fun <D : Executable.Data> Executable<D>.readDataFromCache(
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
): D = readDataFromCache(
cacheKey = CacheKey.rootKey(),
customScalarAdapters = customScalarAdapters,
cache = cache,
cacheResolver = cacheResolver,
cacheHeaders = cacheHeaders,
)

@JvmOverloads
fun <D : Executable.Data> Executable<D>.readDataFromCache(
cacheKey: CacheKey,
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
fieldNameGenerator: FieldNameGenerator = DefaultFieldNameGenerator,
): D {
val variables = variables(customScalarAdapters, true)
return readInternal(
cacheKey = cacheKey,
cache = cache,
cacheResolver = cacheResolver,
cacheHeaders = cacheHeaders,
variables = variables
variables = variables,
fieldNameGenerator = fieldNameGenerator,
).toData(adapter(), customScalarAdapters, variables)
}

@JvmOverloads
fun <D : Executable.Data> Executable<D>.readDataFromCache(
cacheKey: CacheKey,
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
cacheResolver: ApolloResolver,
cacheHeaders: CacheHeaders,
fieldNameGenerator: FieldNameGenerator = DefaultFieldNameGenerator,
): D {
val variables = variables(customScalarAdapters, true)
return readInternal(
cacheKey = cacheKey,
cache = cache,
cacheResolver = cacheResolver,
cacheHeaders = cacheHeaders,
variables = variables
variables = variables,
fieldNameGenerator = fieldNameGenerator,
).toData(adapter(), customScalarAdapters, variables)
}

Expand All @@ -111,12 +85,14 @@ fun <D : Executable.Data> Executable<D>.readDataFromCacheInternal(
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
variables: Executable.Variables,
fieldNameGenerator: FieldNameGenerator,
): CacheData = readInternal(
cacheKey = cacheKey,
cache = cache,
cacheResolver = cacheResolver,
cacheHeaders = cacheHeaders,
variables = variables,
fieldNameGenerator = fieldNameGenerator,
)

@ApolloInternal
Expand All @@ -126,12 +102,14 @@ fun <D : Executable.Data> Executable<D>.readDataFromCacheInternal(
cacheResolver: ApolloResolver,
cacheHeaders: CacheHeaders,
variables: Executable.Variables,
fieldNameGenerator: FieldNameGenerator,
): CacheData = readInternal(
cacheKey = cacheKey,
cache = cache,
cacheResolver = cacheResolver,
cacheHeaders = cacheHeaders,
variables
variables = variables,
fieldNameGenerator = fieldNameGenerator,
)


Expand All @@ -141,6 +119,7 @@ private fun <D : Executable.Data> Executable<D>.readInternal(
cacheResolver: Any,
cacheHeaders: CacheHeaders,
variables: Executable.Variables,
fieldNameGenerator: FieldNameGenerator,
): CacheData {
return CacheBatchReader(
cache = cache,
Expand All @@ -149,7 +128,8 @@ private fun <D : Executable.Data> Executable<D>.readInternal(
variables = variables,
rootKey = cacheKey.key,
rootSelections = rootField().selections,
rootTypename = rootField().type.rawType().name
rootTypename = rootField().type.rawType().name,
fieldNameGenerator = fieldNameGenerator,
).collectData()
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import com.apollographql.apollo3.cache.normalized.api.CacheData
import com.apollographql.apollo3.cache.normalized.api.CacheHeaders
import com.apollographql.apollo3.cache.normalized.api.CacheKey
import com.apollographql.apollo3.cache.normalized.api.CacheResolver
import com.apollographql.apollo3.cache.normalized.api.FieldNameGenerator
import com.apollographql.apollo3.cache.normalized.api.ReadOnlyNormalizedCache
import com.apollographql.apollo3.cache.normalized.api.Record
import com.apollographql.apollo3.cache.normalized.api.ResolverContext
Expand All @@ -29,6 +30,7 @@ internal class CacheBatchReader(
private val cacheHeaders: CacheHeaders,
private val rootSelections: List<CompiledSelection>,
private val rootTypename: String,
private val fieldNameGenerator: FieldNameGenerator,
) {
/**
* @param key: the key of the record we need to fetch
Expand Down Expand Up @@ -120,7 +122,15 @@ internal class CacheBatchReader(
val value = when (cacheResolver) {
is CacheResolver -> cacheResolver.resolveField(it, variables, record, record.key)
is ApolloResolver -> {
cacheResolver.resolveField(ResolverContext(it, variables, record, record.key, cacheHeaders))
cacheResolver.resolveField(ResolverContext(
field = it,
variables = variables,
parent = record,
parentId = record.key,
parentType = pendingReference.parentType,
cacheHeaders = cacheHeaders,
fieldNameGenerator = fieldNameGenerator,
))
}
else -> throw IllegalStateException()
}
Expand Down Expand Up @@ -168,7 +178,15 @@ internal class CacheBatchReader(
val value = when (cacheResolver) {
is CacheResolver -> cacheResolver.resolveField(it, variables, this, "")
is ApolloResolver -> {
cacheResolver.resolveField(ResolverContext(it, variables, this, "", cacheHeaders))
cacheResolver.resolveField(ResolverContext(
field = it,
variables = variables,
parent = this,
parentId = "",
parentType = parentType,
cacheHeaders = cacheHeaders,
fieldNameGenerator = fieldNameGenerator,
))
}
else -> throw IllegalStateException()
}
Expand Down
Loading
Loading