Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[2.x] Replace Compilation Timestamp with content hashes #1430

Merged
merged 2 commits into from
Oct 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,12 @@ public static AnalyzedClass create(long _compilationTimestamp, String _name, xsb
public static AnalyzedClass of(long _compilationTimestamp, String _name, xsbti.api.Lazy<Companions> _api, int _apiHash, NameHash[] _nameHashes, boolean _hasMacro, int _extraHash, String _provenance) {
return new AnalyzedClass(_compilationTimestamp, _name, _api, _apiHash, _nameHashes, _hasMacro, _extraHash, _provenance);
}
public static AnalyzedClass create(long _compilationTimestamp, String _name, xsbti.api.Lazy<Companions> _api, int _apiHash, NameHash[] _nameHashes, boolean _hasMacro, int _extraHash, String _provenance, long _bytecodeHash, long _transitiveBytecodeHash) {
return new AnalyzedClass(_compilationTimestamp, _name, _api, _apiHash, _nameHashes, _hasMacro, _extraHash, _provenance, _bytecodeHash, _transitiveBytecodeHash);
}
public static AnalyzedClass of(long _compilationTimestamp, String _name, xsbti.api.Lazy<Companions> _api, int _apiHash, NameHash[] _nameHashes, boolean _hasMacro, int _extraHash, String _provenance, long _bytecodeHash, long _transitiveBytecodeHash) {
return new AnalyzedClass(_compilationTimestamp, _name, _api, _apiHash, _nameHashes, _hasMacro, _extraHash, _provenance, _bytecodeHash, _transitiveBytecodeHash);
}
private long compilationTimestamp;
private String name;
private xsbti.api.Lazy<Companions> api;
Expand All @@ -32,6 +38,8 @@ public static AnalyzedClass of(long _compilationTimestamp, String _name, xsbti.a
private boolean hasMacro;
private int extraHash;
private String provenance;
private long bytecodeHash;
private long transitiveBytecodeHash;
protected AnalyzedClass(long _compilationTimestamp, String _name, xsbti.api.Lazy<Companions> _api, int _apiHash, NameHash[] _nameHashes, boolean _hasMacro) {
super();
compilationTimestamp = _compilationTimestamp;
Expand All @@ -42,6 +50,8 @@ protected AnalyzedClass(long _compilationTimestamp, String _name, xsbti.api.Lazy
hasMacro = _hasMacro;
extraHash = apiHash;
provenance = "";
bytecodeHash = 0;
transitiveBytecodeHash = 0;
}
protected AnalyzedClass(long _compilationTimestamp, String _name, xsbti.api.Lazy<Companions> _api, int _apiHash, NameHash[] _nameHashes, boolean _hasMacro, int _extraHash) {
super();
Expand All @@ -53,6 +63,8 @@ protected AnalyzedClass(long _compilationTimestamp, String _name, xsbti.api.Lazy
hasMacro = _hasMacro;
extraHash = _extraHash;
provenance = "";
bytecodeHash = 0;
transitiveBytecodeHash = 0;
}
protected AnalyzedClass(long _compilationTimestamp, String _name, xsbti.api.Lazy<Companions> _api, int _apiHash, NameHash[] _nameHashes, boolean _hasMacro, int _extraHash, String _provenance) {
super();
Expand All @@ -64,6 +76,21 @@ protected AnalyzedClass(long _compilationTimestamp, String _name, xsbti.api.Lazy
hasMacro = _hasMacro;
extraHash = _extraHash;
provenance = _provenance;
bytecodeHash = 0;
transitiveBytecodeHash = 0;
}
protected AnalyzedClass(long _compilationTimestamp, String _name, xsbti.api.Lazy<Companions> _api, int _apiHash, NameHash[] _nameHashes, boolean _hasMacro, int _extraHash, String _provenance, long _bytecodeHash, long _transitiveBytecodeHash) {
super();
compilationTimestamp = _compilationTimestamp;
name = _name;
api = _api;
apiHash = _apiHash;
nameHashes = _nameHashes;
hasMacro = _hasMacro;
extraHash = _extraHash;
provenance = _provenance;
bytecodeHash = _bytecodeHash;
transitiveBytecodeHash = _transitiveBytecodeHash;
}

public long compilationTimestamp() {
Expand Down Expand Up @@ -95,29 +122,43 @@ public int extraHash() {
public String provenance() {
return this.provenance;
}
/** A hash of generated bytecode of source file hosting the class */
public long bytecodeHash() {
return this.bytecodeHash;
}
/** A hash of generated bytecode of all upstream dependencies */
public long transitiveBytecodeHash() {
return this.transitiveBytecodeHash;
}
public AnalyzedClass withCompilationTimestamp(long compilationTimestamp) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance);
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public AnalyzedClass withName(String name) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance);
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public AnalyzedClass withApi(xsbti.api.Lazy<Companions> api) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance);
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public AnalyzedClass withApiHash(int apiHash) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance);
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public AnalyzedClass withNameHashes(NameHash[] nameHashes) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance);
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public AnalyzedClass withHasMacro(boolean hasMacro) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance);
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public AnalyzedClass withExtraHash(int extraHash) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance);
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public AnalyzedClass withProvenance(String provenance) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance);
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public AnalyzedClass withBytecodeHash(long bytecodeHash) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public AnalyzedClass withTransitiveBytecodeHash(long transitiveBytecodeHash) {
return new AnalyzedClass(compilationTimestamp, name, api, apiHash, nameHashes, hasMacro, extraHash, provenance, bytecodeHash, transitiveBytecodeHash);
}
public boolean equals(Object obj) {
return this == obj; // We have lazy members, so use object identity to avoid circularity.
Expand Down
18 changes: 18 additions & 0 deletions internal/compiler-interface/src/main/contraband/other.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,24 @@
"Combined with a way to tell if the provenance has changed,",
"it can be used to short-circuit the 'lookupAnalyzedClass' operation."
]
},
{
"name": "bytecodeHash",
"type": "long",
"default": "0",
"since": "2.0.0",
"doc": [
"A hash of generated bytecode of source file hosting the class"
]
},
{
"name": "transitiveBytecodeHash",
"type": "long",
"default": "0",
"since": "2.0.0",
"doc": [
"A hash of generated bytecode of all upstream dependencies"
]
}
]
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,15 @@ object Analysis {

case class Sources(java: Set[String], scala: Set[String])

def computeBytecodeHash(
localProducts: scala.collection.Set[LocalProduct],
nonLocalProduct: scala.collection.Set[NonLocalProduct]
): Int = {
val hashes =
localProducts.map(_.classFileStamp.getHash) ++ nonLocalProduct.map(_.classFileStamp.getHash)
hashes.hashCode()
}

def sources(a: Analysis): Sources = {
def sourceFileForClass(className: String): VirtualFileRef =
a.relations.definesClass(className).headOption.getOrElse {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import java.nio.file.{ Files, Path, Paths }
import java.{ util => ju }
import ju.{ EnumSet, Optional, UUID }
import ju.concurrent.atomic.AtomicBoolean
import sbt.internal.inc.Analysis.{ LocalProduct, NonLocalProduct }
import sbt.internal.inc.Analysis.{ LocalProduct, NonLocalProduct, computeBytecodeHash }
import sbt.internal.inc.JavaInterfaceUtil.EnrichOption
import sbt.util.{ InterfaceUtil, Level, Logger }
import sbt.util.InterfaceUtil.{ jl2l, jo2o, l2jl, t2 }
Expand Down Expand Up @@ -946,7 +946,7 @@ private final class AnalysisCallback(

private def getAnalysis: Analysis = {
val analysis0 = addProductsAndDeps(Analysis.empty)
addUsedNames(addCompilation(analysis0))
addUsedNames(addCompilation(addTransitiveBytecodeHash(analysis0)))
}

def getPostJavaAnalysis: Analysis = {
Expand All @@ -965,6 +965,24 @@ private final class AnalysisCallback(
)
}

private def addTransitiveBytecodeHash(base: Analysis): Analysis = {
import base.{ apis, relations }
val findUpstream = relations.memberRef.internal.forward _
val internalAPIs = apis.internal.map { case (className, analyzedClass) =>
if (!analyzedClass.hasMacro) {
(className, analyzedClass)
} else {
val upstreamClasses =
IncrementalCommon.transitiveDeps(Set(className), log, logging = false)(findUpstream)
val upstreamAnalyzedClasses = upstreamClasses.map(apis.internalAPI)
val upstreamHashes = upstreamAnalyzedClasses.map(_.bytecodeHash())
(className, analyzedClass.withTransitiveBytecodeHash(upstreamHashes.hashCode()))
}
}
val APIs = new MAPIs(internalAPIs, apis.external)
base.copy(apis = APIs)
}

private def companionsWithHash(className: String): (Companions, HashAPI.Hash, HashAPI.Hash) = {
val emptyHash = -1
val emptyClass =
Expand Down Expand Up @@ -992,7 +1010,7 @@ private final class AnalysisCallback(
}
}

private def analyzeClass(name: String): AnalyzedClass = {
private def analyzeClass(name: String, bytecodeHash: Int): AnalyzedClass = {
val hasMacro: Boolean = macroClasses.contains(name)
val (companions, apiHash, extraHash) = companionsWithHash(name)
val nameHashes = nameHashesForCompanions(name)
Expand All @@ -1005,7 +1023,9 @@ private final class AnalysisCallback(
nameHashes,
hasMacro,
extraHash,
provenance
provenance,
bytecodeHash,
0,
)
}

Expand All @@ -1018,7 +1038,6 @@ private final class AnalysisCallback(
.getOrElse(src, ConcurrentHashMap.newKeySet[(String, String)]())
.asScala
.map(_._1)
val analyzedApis = classesInSrc.map(analyzeClass)
val info = SourceInfos.makeInfo(
getOrNil(reporteds.iterator.map { case (k, v) => k -> v.asScala.toSeq }.toMap, src),
getOrNil(unreporteds.iterator.map { case (k, v) => k -> v.asScala.toSeq }.toMap, src),
Expand Down Expand Up @@ -1053,6 +1072,9 @@ private final class AnalysisCallback(
)
val libDeps = libraries.map(d => (d, binaryClassName(d), stampReader.library(d)))

val bytecodeHash = computeBytecodeHash(localProds, nonLocalProds)
val analyzedApis = classesInSrc.map(analyzeClass(_, bytecodeHash))

a.addSource(
src,
analyzedApis,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,6 @@ private[inc] abstract class IncrementalCommon(
options: IncOptions,
profiler: RunProfiler
) extends InvalidationProfilerUtils {
private final val TIMESTAMP_2020 = 1577854800000L

// Work around bugs in classpath handling such as the "currently" problematic -javabootclasspath
private[this] def enableShallowLookup: Boolean =
java.lang.Boolean.getBoolean("xsbt.skip.cp.lookup")
Expand Down Expand Up @@ -308,18 +306,18 @@ private[inc] abstract class IncrementalCommon(
oldAPI: String => AnalyzedClass,
newAPI: String => AnalyzedClass
): APIChanges = {
// ignore timestamp pre-2020 since that likely means that we have a hardcoded 2010 timestamp
def timeStampIsSame(ts1: Long, ts2: Long): Boolean = {
(ts1 < TIMESTAMP_2020) || (ts2 < TIMESTAMP_2020) || (ts1 == ts2)
def hashesMatch(a: AnalyzedClass, b: AnalyzedClass, hasMacro: Boolean): Boolean = {
(a.bytecodeHash() == b.bytecodeHash()) &&
(a.apiHash == b.apiHash) &&
(a.extraHash == b.extraHash) &&
(!hasMacro || a.transitiveBytecodeHash() == b.transitiveBytecodeHash())
}
// log.debug(s"[zinc] detectAPIChanges(recompiledClasses = $recompiledClasses)")
def classDiff(className: String, a: AnalyzedClass, b: AnalyzedClass): Option[APIChange] = {
// log.debug(s"[zinc] classDiff($className, ${a.name}, ${b.name})")
if (
timeStampIsSame(a.compilationTimestamp(), b.compilationTimestamp()) && (a.apiHash == b.apiHash)
) None
val hasMacro = a.hasMacro || b.hasMacro
if (hashesMatch(a, b, hasMacro)) None
else {
val hasMacro = a.hasMacro || b.hasMacro
if (hasMacro && IncOptions.getRecompileOnMacroDef(options)) {
Some(APIChangeDueToMacroDefinition(className))
} else if (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,9 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
out.int(ac.apiHash())
out.bool(ac.hasMacro)
out.string(ac.provenance())
out.int(ac.extraHash())
out.long(ac.bytecodeHash())
out.long(ac.transitiveBytecodeHash())
val nh0 = ac.nameHashes()
val nh = if (nh0.length > 1 && sort) {
val nh = nh0.clone()
Expand All @@ -169,6 +172,9 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
val ah = in.int()
val hm = in.bool()
val p = in.string()
val eh = in.int()
val bh = in.long()
val ebh = in.long()
val nhNames = in.readStringArray()
val nhScopes = in.readArray[UseScope]() { UseScope.values()(in.byte().toInt) }
val nhHashes = in.readArray[Int]() { in.int() }
Expand All @@ -181,7 +187,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
val comp =
if (storeApis) Companions.of(readClassLike(in), readClassLike(in))
else APIs.emptyCompanions
AnalyzedClass.of(ts, name, SafeLazyProxy.strict(comp), ah, nameHashes, hm, ah, p)
AnalyzedClass.of(ts, name, SafeLazyProxy.strict(comp), ah, nameHashes, hm, eh, p, bh, ebh)
}
}

Expand Down