Skip to content

Commit b540e61

Browse files
author
James Judd
committed
Get the consistent format commit to work better
Bring back our read write mappers, so things are machine independent and timestamps don't cause problems.
1 parent 2bf252a commit b540e61

File tree

5 files changed

+179
-6
lines changed

5 files changed

+179
-6
lines changed

src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,23 +2,26 @@ package higherkindness.rules_scala
22
package workers.common
33

44
import java.io.File
5+
import java.nio.file.Paths
56
import sbt.internal.inc.Analysis
67
import sbt.internal.inc.consistent.ConsistentFileAnalysisStore
78
import xsbti.compile.AnalysisStore
89
import xsbti.compile.analysis.ReadWriteMappers
910

1011
object AnalysisUtil {
11-
def getAnalysisStore(analysisStoreFile: File, debug: Boolean): AnalysisStore = {
12+
def getAnalysisStore(analysisStoreFile: File, debug: Boolean, isIncremental: Boolean): AnalysisStore = {
13+
val readWriteMappers = AnnexMapper.mappers(Paths.get(""), isIncremental)
14+
1215
if (debug) {
1316
ConsistentFileAnalysisStore.text(
1417
analysisStoreFile,
15-
ReadWriteMappers.getEmptyMappers,
18+
readWriteMappers,
1619
sort = true,
1720
)
1821
} else {
1922
ConsistentFileAnalysisStore.binary(
2023
analysisStoreFile,
21-
ReadWriteMappers.getEmptyMappers,
24+
readWriteMappers,
2225
sort = true,
2326
)
2427
}
Lines changed: 152 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,152 @@
1+
package higherkindness.rules_scala
2+
package workers.common
3+
4+
import com.google.devtools.build.buildjar.jarhelper.JarHelper
5+
import java.io.{File, InputStream, OutputStream, OutputStreamWriter}
6+
import java.nio.charset.StandardCharsets
7+
import java.nio.file.{Files, NoSuchFileException, Path, Paths}
8+
import java.nio.file.attribute.FileTime
9+
import java.util
10+
import java.util.concurrent.ConcurrentHashMap
11+
import java.util.LinkedHashMap
12+
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
13+
import java.util.Optional
14+
import sbt.internal.inc.binary.converters.{ProtobufReaders, ProtobufWriters}
15+
import sbt.internal.inc.Schema.Type.{Projection, Structure}
16+
import sbt.internal.inc.{APIs, Analysis, FarmHash, Hash, LastModified, PlainVirtualFile, PlainVirtualFileConverter, Relations, Schema, SourceInfos, Stamp => StampImpl, Stamper, Stamps}
17+
import sbt.internal.inc.Schema.{Access, AnalyzedClass, Annotation, AnnotationArgument, ClassDefinition, ClassDependencies, ClassLike, Companions, MethodParameter, NameHash, ParameterList, Path => SchemaPath, Qualifier, Type, TypeParameter, UsedName, UsedNames, Values}
18+
import sbt.internal.shaded.com.google.protobuf.GeneratedMessageV3
19+
import sbt.io.IO
20+
import scala.collection.immutable.TreeMap
21+
import xsbti.compile.analysis.{GenericMapper, ReadMapper, ReadWriteMappers, Stamp, WriteMapper}
22+
import xsbti.compile.{AnalysisContents, AnalysisStore, MiniSetup}
23+
import scala.jdk.CollectionConverters._
24+
import xsbti.VirtualFileRef
25+
import java.util.Objects
26+
27+
// TODO: fix git for this file. Make it a mv to keep history.
28+
29+
object AnnexMapper {
30+
val rootPlaceholder = Paths.get("_ROOT_")
31+
def mappers(root: Path, isIncremental: Boolean) = {
32+
new ReadWriteMappers(new AnxReadMapper(root, isIncremental), new AnxWriteMapper(root))
33+
}
34+
35+
/**
36+
* Gets a reproducible/consistent stamp that we can write to the analysis file and end up with reproducible output
37+
* across machines, jvms, builds, etc.
38+
*
39+
* Practically speaking, all we're doing is setting the timestamp in LastModified stamps to a constant value.
40+
*/
41+
final def getConsistentWriteStamp(stamp: Stamp): Stamp = {
42+
stamp match {
43+
case farmHash: FarmHash => farmHash
44+
case hash: Hash => hash
45+
case lastModified: LastModified => new LastModified(JarHelper.DEFAULT_TIMESTAMP)
46+
case _ => throw new Exception("Unexpected Stamp type encountered when writing.")
47+
}
48+
}
49+
50+
final def getReadStamp(file: VirtualFileRef, stamp: Stamp, isIncremental: Boolean): Stamp = {
51+
if (isIncremental) {
52+
getIncrementalModeReadStamp(file, stamp)
53+
} else {
54+
stamp
55+
}
56+
}
57+
58+
/**
59+
* When in incremental mode we do not want to rely on the timestamp from the AnalysisStore because we're assuming it
60+
* was set to a constant value when written to the AnalysisStore.
61+
*
62+
* Instead, for any LastModified stamps, we read the file's time stamp from disk.
63+
*/
64+
final def getIncrementalModeReadStamp(file: VirtualFileRef, stamp: Stamp): Stamp = {
65+
stamp match {
66+
case farmHash: FarmHash => farmHash
67+
case hash: Hash => hash
68+
case lastModified: LastModified => {
69+
Stamper.forLastModifiedP(PlainVirtualFileConverter.converter.toPath(file))
70+
}
71+
case _ => throw new Exception("Unexpected Stamp type encountered when reading")
72+
}
73+
}
74+
}
75+
76+
final class AnxWriteMapper(root: Path) extends WriteMapper {
77+
private[this] val rootAbs = root.toAbsolutePath
78+
79+
private[this] def mapFile(path: Path): Path = {
80+
if (path.startsWith(rootAbs)) {
81+
AnnexMapper.rootPlaceholder.resolve(rootAbs.relativize(path))
82+
} else {
83+
path
84+
}
85+
}
86+
87+
private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = {
88+
mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef))
89+
}
90+
91+
override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile))
92+
override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile))
93+
override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile))
94+
95+
override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry)
96+
override def mapJavacOption(javacOption: String): String = javacOption
97+
override def mapScalacOption(scalacOption: String): String = scalacOption
98+
99+
override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir)
100+
override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir)
101+
102+
override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = {
103+
AnnexMapper.getConsistentWriteStamp(sourceStamp)
104+
}
105+
override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = {
106+
AnnexMapper.getConsistentWriteStamp(binaryStamp)
107+
}
108+
override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = {
109+
AnnexMapper.getConsistentWriteStamp(productStamp)
110+
}
111+
112+
override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup
113+
}
114+
115+
final class AnxReadMapper(root: Path, isIncremental: Boolean) extends ReadMapper {
116+
private[this] val rootAbs = root.toAbsolutePath
117+
118+
private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = {
119+
mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef))
120+
}
121+
122+
private[this] def mapFile(path: Path): Path = {
123+
if (path.startsWith(AnnexMapper.rootPlaceholder)) {
124+
rootAbs.resolve(AnnexMapper.rootPlaceholder.relativize(path))
125+
} else {
126+
path
127+
}
128+
}
129+
130+
override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile))
131+
override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile))
132+
override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile))
133+
134+
override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry)
135+
override def mapJavacOption(javacOption: String): String = javacOption
136+
override def mapScalacOption(scalacOption: String): String = scalacOption
137+
138+
override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir)
139+
override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir)
140+
141+
override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = {
142+
AnnexMapper.getReadStamp(file, sourceStamp, isIncremental)
143+
}
144+
override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = {
145+
AnnexMapper.getReadStamp(file, binaryStamp, isIncremental)
146+
}
147+
override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = {
148+
AnnexMapper.getReadStamp(file, productStamp, isIncremental)
149+
}
150+
151+
override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup
152+
}

src/main/scala/higherkindness/rules_scala/workers/common/BUILD

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ scala_library(
88
visibility = ["//visibility:public"],
99
deps = [
1010
"//src/main/scala/higherkindness/rules_scala/common/args",
11+
"//third_party/bazel/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/jarhelper",
1112
"@annex//:net_sourceforge_argparse4j_argparse4j",
1213
"@annex//:org_scala_sbt_zinc_2_13",
1314
],

src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincRunner.scala

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ object ZincRunner extends WorkerMain[Namespace] {
141141

142142
val debug = namespace.getBoolean("debug")
143143
val analysisStoreFile = namespace.get[File]("output_analysis_store")
144-
val analysisStore: AnalysisStore = AnalysisUtil.getAnalysisStore(analysisStoreFile, debug)
144+
val analysisStore: AnalysisStore = AnalysisUtil.getAnalysisStore(analysisStoreFile, debug, usePersistence)
145145

146146
val persistence = persistenceDir.fold[ZincPersistence](NullPersistence) { rootDir =>
147147
val path = namespace.getString("label").replaceAll("^/+", "").replaceAll(raw"[^\w/]", "_")
@@ -211,7 +211,13 @@ object ZincRunner extends WorkerMain[Namespace] {
211211
depMap
212212
.get(file)
213213
.map { analysisStorePath =>
214-
val analysis = AnalysisUtil.getAnalysis(AnalysisUtil.getAnalysisStore(analysisStorePath.toFile, debug))
214+
val analysis = AnalysisUtil.getAnalysis(
215+
AnalysisUtil.getAnalysisStore(
216+
analysisStorePath.toFile,
217+
debug,
218+
isIncremental = usePersistence,
219+
),
220+
)
215221
Analysis.Empty.copy(
216222
apis = analysis.apis,
217223
relations = analysis.relations,
@@ -333,6 +339,9 @@ final class AnxPerClasspathEntryLookup(analyses: Path => Option[CompileAnalysis]
333339
/**
334340
* We create this to deterministically set the hash code of directories otherwise they get set to the
335341
* System.identityHashCode() of an object created during compilation. That results in non-determinism.
342+
*
343+
* TODO: Get rid of this once the upstream fix is released:
344+
* https://github.com/sbt/zinc/commit/b4db1476d7fdb2c530a97c543ec9710c13ac58e3
336345
*/
337346
final class DeterministicDirectoryHashExternalHooks extends ExternalHooks.Lookup {
338347
// My understanding is that setting all these to None is the same as the

src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestRunner.scala

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,15 @@ object TestRunner {
126126
val analysisStoreFile = runPath.resolve(testNamespace.get[File]("analysis_store").toPath)
127127
val apis =
128128
try {
129-
AnalysisUtil.getAnalysis(AnalysisUtil.getAnalysisStore(analysisStoreFile.toFile, false)).apis
129+
AnalysisUtil
130+
.getAnalysis(
131+
AnalysisUtil.getAnalysisStore(
132+
analysisStoreFile.toFile,
133+
debug = false,
134+
isIncremental = false,
135+
),
136+
)
137+
.apis
130138
} catch {
131139
case NonFatal(e) => throw new Exception(s"Failed to load APIs from analysis store: $analysisStoreFile", e)
132140
}

0 commit comments

Comments
 (0)