From b540e616f4be42c0e90c746c339993ba8ae2901a Mon Sep 17 00:00:00 2001 From: James Judd Date: Thu, 4 Jul 2024 21:26:30 -0600 Subject: [PATCH] Get the consistent format commit to work better Bring back our read write mappers, so things are machine independent and timestamps don't cause problems. --- .../workers/common/AnalysisUtil.scala | 9 +- .../workers/common/AnnexMapper.scala | 152 ++++++++++++++++++ .../rules_scala/workers/common/BUILD | 1 + .../workers/zinc/compile/ZincRunner.scala | 13 +- .../workers/zinc/test/TestRunner.scala | 10 +- 5 files changed, 179 insertions(+), 6 deletions(-) create mode 100644 src/main/scala/higherkindness/rules_scala/workers/common/AnnexMapper.scala diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala b/src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala index 97b0e7596..b2fb9f123 100644 --- a/src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala +++ b/src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala @@ -2,23 +2,26 @@ package higherkindness.rules_scala package workers.common import java.io.File +import java.nio.file.Paths import sbt.internal.inc.Analysis import sbt.internal.inc.consistent.ConsistentFileAnalysisStore import xsbti.compile.AnalysisStore import xsbti.compile.analysis.ReadWriteMappers object AnalysisUtil { - def getAnalysisStore(analysisStoreFile: File, debug: Boolean): AnalysisStore = { + def getAnalysisStore(analysisStoreFile: File, debug: Boolean, isIncremental: Boolean): AnalysisStore = { + val readWriteMappers = AnnexMapper.mappers(Paths.get(""), isIncremental) + if (debug) { ConsistentFileAnalysisStore.text( analysisStoreFile, - ReadWriteMappers.getEmptyMappers, + readWriteMappers, sort = true, ) } else { ConsistentFileAnalysisStore.binary( analysisStoreFile, - ReadWriteMappers.getEmptyMappers, + readWriteMappers, sort = true, ) } diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/AnnexMapper.scala b/src/main/scala/higherkindness/rules_scala/workers/common/AnnexMapper.scala new file mode 100644 index 000000000..e573d9a8d --- /dev/null +++ b/src/main/scala/higherkindness/rules_scala/workers/common/AnnexMapper.scala @@ -0,0 +1,152 @@ +package higherkindness.rules_scala +package workers.common + +import com.google.devtools.build.buildjar.jarhelper.JarHelper +import java.io.{File, InputStream, OutputStream, OutputStreamWriter} +import java.nio.charset.StandardCharsets +import java.nio.file.{Files, NoSuchFileException, Path, Paths} +import java.nio.file.attribute.FileTime +import java.util +import java.util.concurrent.ConcurrentHashMap +import java.util.LinkedHashMap +import java.util.zip.{GZIPInputStream, GZIPOutputStream} +import java.util.Optional +import sbt.internal.inc.binary.converters.{ProtobufReaders, ProtobufWriters} +import sbt.internal.inc.Schema.Type.{Projection, Structure} +import sbt.internal.inc.{APIs, Analysis, FarmHash, Hash, LastModified, PlainVirtualFile, PlainVirtualFileConverter, Relations, Schema, SourceInfos, Stamp => StampImpl, Stamper, Stamps} +import sbt.internal.inc.Schema.{Access, AnalyzedClass, Annotation, AnnotationArgument, ClassDefinition, ClassDependencies, ClassLike, Companions, MethodParameter, NameHash, ParameterList, Path => SchemaPath, Qualifier, Type, TypeParameter, UsedName, UsedNames, Values} +import sbt.internal.shaded.com.google.protobuf.GeneratedMessageV3 +import sbt.io.IO +import scala.collection.immutable.TreeMap +import xsbti.compile.analysis.{GenericMapper, ReadMapper, ReadWriteMappers, Stamp, WriteMapper} +import xsbti.compile.{AnalysisContents, AnalysisStore, MiniSetup} +import scala.jdk.CollectionConverters._ +import xsbti.VirtualFileRef +import java.util.Objects + +// TODO: fix git for this file. Make it a mv to keep history. + +object AnnexMapper { + val rootPlaceholder = Paths.get("_ROOT_") + def mappers(root: Path, isIncremental: Boolean) = { + new ReadWriteMappers(new AnxReadMapper(root, isIncremental), new AnxWriteMapper(root)) + } + + /** + * Gets a reproducible/consistent stamp that we can write to the analysis file and end up with reproducible output + * across machines, jvms, builds, etc. + * + * Practically speaking, all we're doing is setting the timestamp in LastModified stamps to a constant value. + */ + final def getConsistentWriteStamp(stamp: Stamp): Stamp = { + stamp match { + case farmHash: FarmHash => farmHash + case hash: Hash => hash + case lastModified: LastModified => new LastModified(JarHelper.DEFAULT_TIMESTAMP) + case _ => throw new Exception("Unexpected Stamp type encountered when writing.") + } + } + + final def getReadStamp(file: VirtualFileRef, stamp: Stamp, isIncremental: Boolean): Stamp = { + if (isIncremental) { + getIncrementalModeReadStamp(file, stamp) + } else { + stamp + } + } + + /** + * When in incremental mode we do not want to rely on the timestamp from the AnalysisStore because we're assuming it + * was set to a constant value when written to the AnalysisStore. + * + * Instead, for any LastModified stamps, we read the file's time stamp from disk. + */ + final def getIncrementalModeReadStamp(file: VirtualFileRef, stamp: Stamp): Stamp = { + stamp match { + case farmHash: FarmHash => farmHash + case hash: Hash => hash + case lastModified: LastModified => { + Stamper.forLastModifiedP(PlainVirtualFileConverter.converter.toPath(file)) + } + case _ => throw new Exception("Unexpected Stamp type encountered when reading") + } + } +} + +final class AnxWriteMapper(root: Path) extends WriteMapper { + private[this] val rootAbs = root.toAbsolutePath + + private[this] def mapFile(path: Path): Path = { + if (path.startsWith(rootAbs)) { + AnnexMapper.rootPlaceholder.resolve(rootAbs.relativize(path)) + } else { + path + } + } + + private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = { + mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef)) + } + + override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile)) + override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile)) + override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile)) + + override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry) + override def mapJavacOption(javacOption: String): String = javacOption + override def mapScalacOption(scalacOption: String): String = scalacOption + + override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir) + override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir) + + override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = { + AnnexMapper.getConsistentWriteStamp(sourceStamp) + } + override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = { + AnnexMapper.getConsistentWriteStamp(binaryStamp) + } + override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = { + AnnexMapper.getConsistentWriteStamp(productStamp) + } + + override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup +} + +final class AnxReadMapper(root: Path, isIncremental: Boolean) extends ReadMapper { + private[this] val rootAbs = root.toAbsolutePath + + private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = { + mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef)) + } + + private[this] def mapFile(path: Path): Path = { + if (path.startsWith(AnnexMapper.rootPlaceholder)) { + rootAbs.resolve(AnnexMapper.rootPlaceholder.relativize(path)) + } else { + path + } + } + + override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile)) + override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile)) + override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile)) + + override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry) + override def mapJavacOption(javacOption: String): String = javacOption + override def mapScalacOption(scalacOption: String): String = scalacOption + + override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir) + override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir) + + override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = { + AnnexMapper.getReadStamp(file, sourceStamp, isIncremental) + } + override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = { + AnnexMapper.getReadStamp(file, binaryStamp, isIncremental) + } + override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = { + AnnexMapper.getReadStamp(file, productStamp, isIncremental) + } + + override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup +} diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/BUILD b/src/main/scala/higherkindness/rules_scala/workers/common/BUILD index 74635ee1b..973bc7a1d 100644 --- a/src/main/scala/higherkindness/rules_scala/workers/common/BUILD +++ b/src/main/scala/higherkindness/rules_scala/workers/common/BUILD @@ -8,6 +8,7 @@ scala_library( visibility = ["//visibility:public"], deps = [ "//src/main/scala/higherkindness/rules_scala/common/args", + "//third_party/bazel/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/jarhelper", "@annex//:net_sourceforge_argparse4j_argparse4j", "@annex//:org_scala_sbt_zinc_2_13", ], diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincRunner.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincRunner.scala index c4b6b4b5d..ac3277ea8 100644 --- a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincRunner.scala +++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincRunner.scala @@ -141,7 +141,7 @@ object ZincRunner extends WorkerMain[Namespace] { val debug = namespace.getBoolean("debug") val analysisStoreFile = namespace.get[File]("output_analysis_store") - val analysisStore: AnalysisStore = AnalysisUtil.getAnalysisStore(analysisStoreFile, debug) + val analysisStore: AnalysisStore = AnalysisUtil.getAnalysisStore(analysisStoreFile, debug, usePersistence) val persistence = persistenceDir.fold[ZincPersistence](NullPersistence) { rootDir => val path = namespace.getString("label").replaceAll("^/+", "").replaceAll(raw"[^\w/]", "_") @@ -211,7 +211,13 @@ object ZincRunner extends WorkerMain[Namespace] { depMap .get(file) .map { analysisStorePath => - val analysis = AnalysisUtil.getAnalysis(AnalysisUtil.getAnalysisStore(analysisStorePath.toFile, debug)) + val analysis = AnalysisUtil.getAnalysis( + AnalysisUtil.getAnalysisStore( + analysisStorePath.toFile, + debug, + isIncremental = usePersistence, + ), + ) Analysis.Empty.copy( apis = analysis.apis, relations = analysis.relations, @@ -333,6 +339,9 @@ final class AnxPerClasspathEntryLookup(analyses: Path => Option[CompileAnalysis] /** * We create this to deterministically set the hash code of directories otherwise they get set to the * System.identityHashCode() of an object created during compilation. That results in non-determinism. + * + * TODO: Get rid of this once the upstream fix is released: + * https://github.com/sbt/zinc/commit/b4db1476d7fdb2c530a97c543ec9710c13ac58e3 */ final class DeterministicDirectoryHashExternalHooks extends ExternalHooks.Lookup { // My understanding is that setting all these to None is the same as the diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestRunner.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestRunner.scala index 75e56a6fc..cb650e0ec 100644 --- a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestRunner.scala +++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestRunner.scala @@ -126,7 +126,15 @@ object TestRunner { val analysisStoreFile = runPath.resolve(testNamespace.get[File]("analysis_store").toPath) val apis = try { - AnalysisUtil.getAnalysis(AnalysisUtil.getAnalysisStore(analysisStoreFile.toFile, false)).apis + AnalysisUtil + .getAnalysis( + AnalysisUtil.getAnalysisStore( + analysisStoreFile.toFile, + debug = false, + isIncremental = false, + ), + ) + .apis } catch { case NonFatal(e) => throw new Exception(s"Failed to load APIs from analysis store: $analysisStoreFile", e) }