Skip to content

Commit

Permalink
Get the consistent format commit to work better
Browse files Browse the repository at this point in the history
Bring back our read write mappers, so things are machine independent and
timestamps don't cause problems.
  • Loading branch information
James Judd committed Jul 5, 2024
1 parent 2bf252a commit b540e61
Show file tree
Hide file tree
Showing 5 changed files with 179 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,26 @@ package higherkindness.rules_scala
package workers.common

import java.io.File
import java.nio.file.Paths
import sbt.internal.inc.Analysis
import sbt.internal.inc.consistent.ConsistentFileAnalysisStore
import xsbti.compile.AnalysisStore
import xsbti.compile.analysis.ReadWriteMappers

object AnalysisUtil {
def getAnalysisStore(analysisStoreFile: File, debug: Boolean): AnalysisStore = {
def getAnalysisStore(analysisStoreFile: File, debug: Boolean, isIncremental: Boolean): AnalysisStore = {
val readWriteMappers = AnnexMapper.mappers(Paths.get(""), isIncremental)

if (debug) {
ConsistentFileAnalysisStore.text(
analysisStoreFile,
ReadWriteMappers.getEmptyMappers,
readWriteMappers,
sort = true,
)
} else {
ConsistentFileAnalysisStore.binary(
analysisStoreFile,
ReadWriteMappers.getEmptyMappers,
readWriteMappers,
sort = true,
)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
package higherkindness.rules_scala
package workers.common

import com.google.devtools.build.buildjar.jarhelper.JarHelper
import java.io.{File, InputStream, OutputStream, OutputStreamWriter}
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, NoSuchFileException, Path, Paths}
import java.nio.file.attribute.FileTime
import java.util
import java.util.concurrent.ConcurrentHashMap
import java.util.LinkedHashMap
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
import java.util.Optional
import sbt.internal.inc.binary.converters.{ProtobufReaders, ProtobufWriters}
import sbt.internal.inc.Schema.Type.{Projection, Structure}
import sbt.internal.inc.{APIs, Analysis, FarmHash, Hash, LastModified, PlainVirtualFile, PlainVirtualFileConverter, Relations, Schema, SourceInfos, Stamp => StampImpl, Stamper, Stamps}
import sbt.internal.inc.Schema.{Access, AnalyzedClass, Annotation, AnnotationArgument, ClassDefinition, ClassDependencies, ClassLike, Companions, MethodParameter, NameHash, ParameterList, Path => SchemaPath, Qualifier, Type, TypeParameter, UsedName, UsedNames, Values}
import sbt.internal.shaded.com.google.protobuf.GeneratedMessageV3
import sbt.io.IO
import scala.collection.immutable.TreeMap
import xsbti.compile.analysis.{GenericMapper, ReadMapper, ReadWriteMappers, Stamp, WriteMapper}
import xsbti.compile.{AnalysisContents, AnalysisStore, MiniSetup}
import scala.jdk.CollectionConverters._
import xsbti.VirtualFileRef
import java.util.Objects

// TODO: fix git for this file. Make it a mv to keep history.

object AnnexMapper {
val rootPlaceholder = Paths.get("_ROOT_")
def mappers(root: Path, isIncremental: Boolean) = {
new ReadWriteMappers(new AnxReadMapper(root, isIncremental), new AnxWriteMapper(root))
}

/**
* Gets a reproducible/consistent stamp that we can write to the analysis file and end up with reproducible output
* across machines, jvms, builds, etc.
*
* Practically speaking, all we're doing is setting the timestamp in LastModified stamps to a constant value.
*/
final def getConsistentWriteStamp(stamp: Stamp): Stamp = {
stamp match {
case farmHash: FarmHash => farmHash
case hash: Hash => hash
case lastModified: LastModified => new LastModified(JarHelper.DEFAULT_TIMESTAMP)
case _ => throw new Exception("Unexpected Stamp type encountered when writing.")
}
}

final def getReadStamp(file: VirtualFileRef, stamp: Stamp, isIncremental: Boolean): Stamp = {
if (isIncremental) {
getIncrementalModeReadStamp(file, stamp)
} else {
stamp
}
}

/**
* When in incremental mode we do not want to rely on the timestamp from the AnalysisStore because we're assuming it
* was set to a constant value when written to the AnalysisStore.
*
* Instead, for any LastModified stamps, we read the file's time stamp from disk.
*/
final def getIncrementalModeReadStamp(file: VirtualFileRef, stamp: Stamp): Stamp = {
stamp match {
case farmHash: FarmHash => farmHash
case hash: Hash => hash
case lastModified: LastModified => {
Stamper.forLastModifiedP(PlainVirtualFileConverter.converter.toPath(file))
}
case _ => throw new Exception("Unexpected Stamp type encountered when reading")
}
}
}

final class AnxWriteMapper(root: Path) extends WriteMapper {
private[this] val rootAbs = root.toAbsolutePath

private[this] def mapFile(path: Path): Path = {
if (path.startsWith(rootAbs)) {
AnnexMapper.rootPlaceholder.resolve(rootAbs.relativize(path))
} else {
path
}
}

private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = {
mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef))
}

override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile))
override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile))
override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile))

override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry)
override def mapJavacOption(javacOption: String): String = javacOption
override def mapScalacOption(scalacOption: String): String = scalacOption

override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir)
override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir)

override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = {
AnnexMapper.getConsistentWriteStamp(sourceStamp)
}
override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = {
AnnexMapper.getConsistentWriteStamp(binaryStamp)
}
override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = {
AnnexMapper.getConsistentWriteStamp(productStamp)
}

override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup
}

final class AnxReadMapper(root: Path, isIncremental: Boolean) extends ReadMapper {
private[this] val rootAbs = root.toAbsolutePath

private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = {
mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef))
}

private[this] def mapFile(path: Path): Path = {
if (path.startsWith(AnnexMapper.rootPlaceholder)) {
rootAbs.resolve(AnnexMapper.rootPlaceholder.relativize(path))
} else {
path
}
}

override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile))
override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile))
override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile))

override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry)
override def mapJavacOption(javacOption: String): String = javacOption
override def mapScalacOption(scalacOption: String): String = scalacOption

override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir)
override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir)

override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = {
AnnexMapper.getReadStamp(file, sourceStamp, isIncremental)
}
override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = {
AnnexMapper.getReadStamp(file, binaryStamp, isIncremental)
}
override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = {
AnnexMapper.getReadStamp(file, productStamp, isIncremental)
}

override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ scala_library(
visibility = ["//visibility:public"],
deps = [
"//src/main/scala/higherkindness/rules_scala/common/args",
"//third_party/bazel/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/jarhelper",
"@annex//:net_sourceforge_argparse4j_argparse4j",
"@annex//:org_scala_sbt_zinc_2_13",
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ object ZincRunner extends WorkerMain[Namespace] {

val debug = namespace.getBoolean("debug")
val analysisStoreFile = namespace.get[File]("output_analysis_store")
val analysisStore: AnalysisStore = AnalysisUtil.getAnalysisStore(analysisStoreFile, debug)
val analysisStore: AnalysisStore = AnalysisUtil.getAnalysisStore(analysisStoreFile, debug, usePersistence)

val persistence = persistenceDir.fold[ZincPersistence](NullPersistence) { rootDir =>
val path = namespace.getString("label").replaceAll("^/+", "").replaceAll(raw"[^\w/]", "_")
Expand Down Expand Up @@ -211,7 +211,13 @@ object ZincRunner extends WorkerMain[Namespace] {
depMap
.get(file)
.map { analysisStorePath =>
val analysis = AnalysisUtil.getAnalysis(AnalysisUtil.getAnalysisStore(analysisStorePath.toFile, debug))
val analysis = AnalysisUtil.getAnalysis(
AnalysisUtil.getAnalysisStore(
analysisStorePath.toFile,
debug,
isIncremental = usePersistence,
),
)
Analysis.Empty.copy(
apis = analysis.apis,
relations = analysis.relations,
Expand Down Expand Up @@ -333,6 +339,9 @@ final class AnxPerClasspathEntryLookup(analyses: Path => Option[CompileAnalysis]
/**
* We create this to deterministically set the hash code of directories otherwise they get set to the
* System.identityHashCode() of an object created during compilation. That results in non-determinism.
*
* TODO: Get rid of this once the upstream fix is released:
* https://github.com/sbt/zinc/commit/b4db1476d7fdb2c530a97c543ec9710c13ac58e3
*/
final class DeterministicDirectoryHashExternalHooks extends ExternalHooks.Lookup {
// My understanding is that setting all these to None is the same as the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,15 @@ object TestRunner {
val analysisStoreFile = runPath.resolve(testNamespace.get[File]("analysis_store").toPath)
val apis =
try {
AnalysisUtil.getAnalysis(AnalysisUtil.getAnalysisStore(analysisStoreFile.toFile, false)).apis
AnalysisUtil
.getAnalysis(
AnalysisUtil.getAnalysisStore(
analysisStoreFile.toFile,
debug = false,
isIncremental = false,
),
)
.apis
} catch {
case NonFatal(e) => throw new Exception(s"Failed to load APIs from analysis store: $analysisStoreFile", e)
}
Expand Down

0 comments on commit b540e61

Please sign in to comment.