Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Forward-port assorted participant-side checks & check keys for structural equality 3.x #19691

Closed
wants to merge 17 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs.daml.com
Submodule docs.daml.com added at de038f
Original file line number Diff line number Diff line change
Expand Up @@ -13,29 +13,68 @@ import com.digitalasset.canton.platform.apiserver.services.admin.ApiPackageManag
import com.digitalasset.daml.lf.archive.DamlLf.Archive
import com.digitalasset.daml.lf.archive.Decode
import com.digitalasset.daml.lf.data.Ref
import com.digitalasset.daml.lf.language.Ast
import com.digitalasset.daml.lf.language.Util.dependenciesInTopologicalOrder
import com.digitalasset.canton.platform.apiserver.services.admin.PackageUpgradeValidator.PackageMap
import com.digitalasset.daml.lf.language.{Ast, LanguageVersion}
import com.digitalasset.daml.lf.validation.{TypecheckUpgrades, UpgradeError}
import com.digitalasset.canton.util.EitherTUtil
import scalaz.std.either.*
import scalaz.std.option.*
import scalaz.std.scalaFuture.futureInstance
import scalaz.syntax.traverse.*

import scala.concurrent.{ExecutionContext, Future}
import scala.math.Ordering.Implicits.infixOrderingOps

object PackageUpgradeValidator {
type PackageMap = Map[Ref.PackageId, (Ref.PackageName, Ref.PackageVersion)]
}

class PackageUpgradeValidator(
getPackageMap: LoggingContextWithTrace => Map[
Ref.PackageId,
(Ref.PackageName, Ref.PackageVersion),
],
getPackageMap: LoggingContextWithTrace => PackageMap,
getLfArchive: LoggingContextWithTrace => Ref.PackageId => Future[Option[Archive]],
val loggerFactory: NamedLoggerFactory,
)(implicit executionContext: ExecutionContext)
extends NamedLogging {

def validateUpgrade(uploadedPackage: (Ref.PackageId, Ast.Package))(implicit
def validateUpgrade(
upgradingPackages: List[(Ref.PackageId, Ast.Package)]
)(implicit
loggingContext: LoggingContextWithTrace
): EitherT[Future, DamlError, Unit] = {
val upgradingPackagesMap = upgradingPackages.toMap
val packagesInTopologicalOrder =
dependenciesInTopologicalOrder(upgradingPackages.map(_._1), upgradingPackagesMap)
val packageMap = getPackageMap(loggingContext)

def go(
packageMap: PackageMap,
deps: List[Ref.PackageId],
): EitherT[Future, DamlError, PackageMap] = deps match {
case Nil => EitherT.pure[Future, DamlError](packageMap)
case pkgId :: rest =>
val pkg = upgradingPackagesMap(pkgId)
val supportsUpgrades = LanguageVersion.supportsPackageUpgrades(pkg.languageVersion) && !pkg.isUtilityPackage
for {
_ <- EitherTUtil.ifThenET(supportsUpgrades)(
// This check will look for the closest neighbors of pkgId for the package versioning ordering and
// will load them from the DB and decode them. If one were to upload many packages that upgrade each
// other, we will end up decoding the same package many times. Some of these cases could be sped up
// by a cache depending on the order in which the packages are uploaded.
validatePackageUpgrade((pkgId, pkg), packageMap)
)
res <- go(packageMap + ((pkgId, (pkg.metadata.name, pkg.metadata.version))), rest)
} yield res
}
go(packageMap, packagesInTopologicalOrder).map(_ => ())
}

private def validatePackageUpgrade(
uploadedPackage: (Ref.PackageId, Ast.Package),
packageMap: PackageMap,
)(implicit
loggingContext: LoggingContextWithTrace
): EitherT[Future, DamlError, Unit] = {
val (uploadedPackageId, uploadedPackageAst) = uploadedPackage
val optUpgradingDar = Some(uploadedPackage)
logger.info(
Expand All @@ -49,6 +88,9 @@ class PackageUpgradeValidator(
)
EitherT.rightT[Future, DamlError](())
} else {
logger.info(
s"Bad version of package $uploadedPackageId as it has been previously uploaded $existingPackageId"
)
EitherT.leftT[Future, Unit](
Validation.UpgradeVersion
.Error(
Expand All @@ -69,6 +111,7 @@ class PackageUpgradeValidator(
)
_ <- typecheckUpgrades(
TypecheckUpgrades.MaximalDarCheck,
packageMap,
optUpgradingDar,
optMaximalDar,
)
Expand All @@ -77,6 +120,7 @@ class PackageUpgradeValidator(
)
_ <- typecheckUpgrades(
TypecheckUpgrades.MinimalDarCheck,
packageMap,
optMinimalDar,
optUpgradingDar,
)
Expand Down Expand Up @@ -144,6 +188,7 @@ class PackageUpgradeValidator(

private def strictTypecheckUpgrades(
phase: TypecheckUpgrades.UploadPhaseCheck,
packageMap: PackageMap,
optNewDar1: Option[(Ref.PackageId, Ast.Package)],
oldPkgId2: Ref.PackageId,
optOldPkg2: Option[Ast.Package],
Expand All @@ -161,7 +206,7 @@ class PackageUpgradeValidator(
EitherT(
Future(
TypecheckUpgrades
.typecheckUpgrades((newPkgId1, newPkg1), oldPkgId2, optOldPkg2)
.typecheckUpgrades(packageMap, (newPkgId1, newPkg1), oldPkgId2, optOldPkg2)
.toEither
)
).leftMap[DamlError] {
Expand All @@ -183,6 +228,7 @@ class PackageUpgradeValidator(

private def typecheckUpgrades(
typecheckPhase: TypecheckUpgrades.UploadPhaseCheck,
packageMap: PackageMap,
optNewDar1: Option[(Ref.PackageId, Ast.Package)],
optOldDar2: Option[(Ref.PackageId, Ast.Package)],
)(implicit
Expand All @@ -195,6 +241,7 @@ class PackageUpgradeValidator(
case (Some((newPkgId1, newPkg1)), Some((oldPkgId2, oldPkg2))) =>
strictTypecheckUpgrades(
typecheckPhase,
packageMap,
Some((newPkgId1, newPkg1)),
oldPkgId2,
Some(oldPkg2),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,9 @@ import com.digitalasset.canton.{LedgerSubmissionId, LfPackageId}
import com.digitalasset.daml.lf.archive.{DamlLf, Dar as LfDar, DarParser, Decode}
import com.digitalasset.daml.lf.data.Ref
import com.digitalasset.daml.lf.engine.Engine
import com.digitalasset.daml.lf.language.Ast
import com.digitalasset.daml.lf.language.{Ast, LanguageVersion}
import com.google.protobuf.ByteString
import scala.math.Ordering.Implicits.infixOrderingOps

import java.nio.file.Paths
import java.util.zip.ZipInputStream
Expand Down Expand Up @@ -88,7 +89,7 @@ class PackageUploader(
dependencies <- dar.dependencies.parTraverse(archive =>
catchUpstreamErrors(Decode.decodeArchive(archive))
)
_ <- validatePackages(mainPackage, dependencies)
_ <- validatePackages(mainPackage :: dependencies)
} yield hash
}

Expand All @@ -98,7 +99,7 @@ class PackageUploader(
submissionId: LedgerSubmissionId,
)(implicit
traceContext: TraceContext
): EitherT[FutureUnlessShutdown, DamlError, (List[Ref.PackageId], Hash)] =
): EitherT[FutureUnlessShutdown, DamlError, (List[LfPackageId], Hash)] =
performUnlessClosingEitherUSF("upload DAR") {
val darNameO =
fileNameO.map(fn => PathUtils.getFilenameWithoutExtension(Paths.get(fn).getFileName))
Expand All @@ -118,29 +119,28 @@ class PackageUploader(
dependencies <- dar.dependencies.parTraverse(archive =>
catchUpstreamErrors(Decode.decodeArchive(archive)).map(archive -> _)
)
allPackages = mainPackage :: dependencies
hash <- EitherT(
uploadDarExecutionQueue.executeUS(
uploadDarSequentialStep(
darPayload = darPayload,
mainPackage = mainPackage,
dependencies = dependencies,
packages = allPackages,
// TODO(#17635): Allow more generic source descriptions or rename source description to DAR name
lengthValidatedDarName = lengthValidatedNameO,
submissionId = submissionId,
),
description = "store DAR",
)
)
} yield (mainPackage._2._1 :: dependencies.map(_._2._1)) -> hash
} yield allPackages.map(_._2._1) -> hash
}

// This stage must be run sequentially to exclude the possibility
// that a package validation against the current package metadata view
// is happening concurrently with an update of the package metadata view.
private def uploadDarSequentialStep(
darPayload: ByteString,
mainPackage: (DamlLf.Archive, (LfPackageId, Ast.Package)),
dependencies: List[(DamlLf.Archive, (LfPackageId, Ast.Package))],
packages: List[(DamlLf.Archive, (LfPackageId, Ast.Package))],
lengthValidatedDarName: Option[String255],
submissionId: LedgerSubmissionId,
)(implicit traceContext: TraceContext): FutureUnlessShutdown[Either[DamlError, Hash]] = {
Expand All @@ -165,7 +165,9 @@ class PackageUploader(
s"Managed to upload one or more archives for submissionId $submissionId"
)
_ = allPackages.foreach { case (_, (pkgId, pkg)) =>
packageMetadataView.update(PackageMetadata.from(pkgId, pkg))
if (pkg.languageVersion >= LanguageVersion.Features.packageUpgrades && !pkg.isUtilityPackage) {
packageMetadataView.update(PackageMetadata.from(pkgId, pkg))
}
}
} yield ()

Expand All @@ -178,10 +180,8 @@ class PackageUploader(
DarDescriptor(hash, persistedDarName),
darPayload.toByteArray,
)
validatePackages(mainPackage._2, dependencies.map(_._2))
.semiflatMap { _ =>
val allPackages = mainPackage :: dependencies
val result = persist(darDescriptor, uploadTime, allPackages)
validatePackages(packages.map(_._2)).semiflatMap { _ =>
val result = persist(darDescriptor, uploadTime, packages)
handleUploadResult(result, submissionId)
}
.map(_ => hash)
Expand Down Expand Up @@ -213,26 +213,27 @@ class PackageUploader(
}

private def validatePackages(
mainPackage: (LfPackageId, Ast.Package),
dependencies: List[(LfPackageId, Ast.Package)],
packages: List[(LfPackageId, Ast.Package)]
)(implicit
traceContext: TraceContext
): EitherT[FutureUnlessShutdown, DamlError, Unit] =
for {
_ <- EitherT.fromEither[FutureUnlessShutdown](
engine
.validatePackages((mainPackage :: dependencies).toMap)
.validatePackages(packages.toMap)
.leftMap(
PackageServiceErrors.Validation.handleLfEnginePackageError(_): DamlError
)
)
_ <-
if (enableUpgradeValidation) {
packageUpgradeValidator
.validateUpgrade(mainPackage)(LoggingContextWithTrace(loggerFactory))
.validateUpgrade(packages)(LoggingContextWithTrace(loggerFactory))
.mapK(FutureUnlessShutdown.outcomeK)
} else {
logger.info(s"Skipping upgrade validation for package ${mainPackage._1}.")
logger.info(
s"Skipping upgrade validation for packages ${packages.map(_._1).sorted.mkString(", ")}"
)
EitherT.pure[FutureUnlessShutdown, DamlError](())
}
} yield ()
Expand Down
45 changes: 27 additions & 18 deletions sdk/compiler/daml-lf-tools/src/DA/Daml/LF/InferSerializability.hs
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,30 @@ import DA.Daml.LF.Ast
import DA.Daml.LF.TypeChecker.Serializability (CurrentModule(..), serializabilityConditionsDataType)

inferModule :: World -> Module -> Either String Module
inferModule world0 mod0 = do
let modName = moduleName mod0
let dataTypes = moduleDataTypes mod0
let interfaces = NM.namesSet (moduleInterfaces mod0)
let eqs =
[ (dataTypeCon dataType, serializable, deps)
| dataType <- NM.toList dataTypes
, let (serializable, deps) =
case serializabilityConditionsDataType world0 (Just $ CurrentModule modName interfaces) dataType of
Left _ -> (False, [])
Right deps0 -> (True, HS.toList deps0)
]
case leastFixedPointBy (&&) eqs of
Left name -> throwError ("Reference to unknown data type: " ++ show name)
Right serializabilities -> do
let updateDataType dataType =
dataType{dataSerializable = IsSerializable (HMS.lookupDefault False (dataTypeCon dataType) serializabilities)}
pure mod0{moduleDataTypes = NM.map updateDataType dataTypes}
inferModule world0 mod0 =
case moduleName mod0 of
-- Unstable parts of stdlib mustn't contain serializable types, because if they are
-- serializable, then the upgrading checks run on the datatypes and this causes problems.
-- Therefore, we mark the datatypes as not-serializable, so that upgrades checks don't trigger.
-- For more information on this issue, refer to issue
-- https://github.com/digital-asset/daml/issues/19338issues/19338
ModuleName ["GHC", "Stack", "Types"] -> pure mod0
ModuleName ["DA", "Numeric"] -> pure mod0
_ -> do
let modName = moduleName mod0
let dataTypes = moduleDataTypes mod0
let interfaces = NM.namesSet (moduleInterfaces mod0)
let eqs =
[ (dataTypeCon dataType, serializable, deps)
| dataType <- NM.toList dataTypes
, let (serializable, deps) =
case serializabilityConditionsDataType world0 (Just $ CurrentModule modName interfaces) dataType of
Left _ -> (False, [])
Right deps0 -> (True, HS.toList deps0)
]
case leastFixedPointBy (&&) eqs of
Left name -> throwError ("Reference to unknown data type: " ++ show name)
Right serializabilities -> do
let updateDataType dataType =
dataType{dataSerializable = IsSerializable (HMS.lookupDefault False (dataTypeCon dataType) serializabilities)}
pure mod0{moduleDataTypes = NM.map updateDataType dataTypes}
2 changes: 1 addition & 1 deletion sdk/compiler/damlc/tests/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ da_haskell_test(
"//test-common:upgrades-FailsWhenOldFieldIsDeletedFromTemplate-files",
"//test-common:upgrades-FailsWhenOldFieldIsDeletedFromTemplateChoice-files",
"//test-common:upgrades-FailsWhenTemplateAddsKeyType-files",
"//test-common:upgrades-FailsWhenTemplateChangesKeyType-files",
"//test-common:upgrades-FailsWhenTemplateChangesKeyTypeSuperficially-files",
"//test-common:upgrades-FailsWhenTemplateChoiceChangesItsReturnType-files",
"//test-common:upgrades-FailsWhenTemplateRemovesKeyType-files",
"//test-common:upgrades-FailsWhenTwoDeeplyNestedTypeSynonymsResolveToDifferentDatatypes-files",
Expand Down
Loading
Loading