diff --git a/build.sbt b/build.sbt index c685ebcca0..a05ef0b97d 100644 --- a/build.sbt +++ b/build.sbt @@ -680,6 +680,7 @@ lazy val actionsProj = (project in file("main-actions")) stdTaskProj, taskProj, testingProj, + utilCacheResolver, utilLogging, utilRelation, utilTracking, diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/Cont.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/Cont.scala index d237962399..4325452e5a 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/Cont.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/Cont.scala @@ -95,7 +95,7 @@ trait Cont: given qctx.type = qctx Expr .summon[HashWriter[A]] - .getOrElse(sys.error(s"HashWriter[A] not found for ${TypeRepr.of[A].typeSymbol}")) + .getOrElse(sys.error(s"HashWriter[A] not found for ${TypeRepr.of[A].show}")) def summonJsonFormat[A: Type]: Expr[JsonFormat[A]] = import conv.qctx @@ -103,7 +103,7 @@ trait Cont: given qctx.type = qctx Expr .summon[JsonFormat[A]] - .getOrElse(sys.error(s"JsonFormat[A] not found for ${TypeRepr.of[A].typeSymbol}")) + .getOrElse(sys.error(s"JsonFormat[A] not found for ${TypeRepr.of[A].show}")) def summonClassTag[A: Type]: Expr[ClassTag[A]] = import conv.qctx @@ -111,7 +111,7 @@ trait Cont: given qctx.type = qctx Expr .summon[ClassTag[A]] - .getOrElse(sys.error(s"ClassTag[A] not found for ${TypeRepr.of[A].typeSymbol}")) + .getOrElse(sys.error(s"ClassTag[A] not found for ${TypeRepr.of[A].show}")) /** * Implementation of a macro that provides a direct syntax for applicative functors and monads. diff --git a/main-actions/src/main/scala/sbt/Pkg.scala b/main-actions/src/main/scala/sbt/Pkg.scala index 6123b67e06..67f0ffe0fe 100644 --- a/main-actions/src/main/scala/sbt/Pkg.scala +++ b/main-actions/src/main/scala/sbt/Pkg.scala @@ -13,17 +13,25 @@ import java.util.jar.{ Attributes, Manifest } import scala.collection.JavaConverters._ import sbt.io.IO -import sjsonnew.JsonFormat +import sjsonnew.{ + :*:, + Builder, + IsoLList, + JsonFormat, + LList, + LNil, + Unbuilder, + deserializationError, + flatUnionFormat4 +} import sbt.util.Logger - import sbt.util.{ CacheStoreFactory, FilesInfo, ModifiedFileInfo, PlainFileInfo } import sbt.util.FileInfo.{ exists, lastModified } import sbt.util.CacheImplicits._ import sbt.util.Tracked.{ inputChanged, outputChanged } import scala.sys.process.Process - -sealed trait PackageOption +import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFile, VirtualFileRef } /** * == Package == @@ -33,18 +41,17 @@ sealed trait PackageOption * @see [[https://docs.oracle.com/javase/tutorial/deployment/jar/index.html]] */ object Pkg: - final case class JarManifest(m: Manifest) extends PackageOption { - assert(m != null) - } - final case class MainClass(mainClassName: String) extends PackageOption - final case class ManifestAttributes(attributes: (Attributes.Name, String)*) extends PackageOption - def ManifestAttributes(attributes: (String, String)*): ManifestAttributes = { + def JarManifest(m: Manifest) = PackageOption.JarManifest(m) + def MainClass(mainClassName: String) = PackageOption.MainClass(mainClassName) + def MainfestAttributes(attributes: (Attributes.Name, String)*) = + PackageOption.ManifestAttributes(attributes: _*) + def ManifestAttributes(attributes: (String, String)*) = { val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value) - new ManifestAttributes(converted: _*) + PackageOption.ManifestAttributes(converted: _*) } // 2010-01-01 private val default2010Timestamp: Long = 1262304000000L - final case class FixedTimestamp(value: Option[Long]) extends PackageOption + def FixedTimestamp(value: Option[Long]) = PackageOption.FixedTimestamp(value) val keepTimestamps: Option[Long] = None val fixed2010Timestamp: Option[Long] = Some(default2010Timestamp) def gitCommitDateTimestamp: Option[Long] = @@ -72,10 +79,9 @@ object Pkg: .orElse(Some(default2010Timestamp)) def timeFromConfiguration(config: Configuration): Option[Long] = - (config.options.collect { case t: FixedTimestamp => t }).headOption match { - case Some(FixedTimestamp(value)) => value - case _ => defaultTimestamp - } + (config.options.collect { case t: PackageOption.FixedTimestamp => t }).headOption match + case Some(PackageOption.FixedTimestamp(value)) => value + case _ => defaultTimestamp def mergeAttributes(a1: Attributes, a2: Attributes) = a1.asScala ++= a2.asScala // merges `mergeManifest` into `manifest` (mutating `manifest` in the process) @@ -98,18 +104,34 @@ object Pkg: * @param options additional package information, e.g. jar manifest, main class or manifest attributes */ final class Configuration( - val sources: Seq[(File, String)], - val jar: File, + val sources: Seq[(HashedVirtualFileRef, String)], + val jar: VirtualFileRef, val options: Seq[PackageOption] ) + object Configuration: + given IsoLList.Aux[ + Configuration, + Vector[(HashedVirtualFileRef, String)] :*: VirtualFileRef :*: Seq[PackageOption] :*: LNil + ] = + import sbt.util.CacheImplicits.given + import sbt.util.PathHashWriters.given + LList.iso( + (c: Configuration) => + ("sources", c.sources.toVector) :*: ("jar", c.jar) :*: ("options", c.options) :*: LNil, + (in: Vector[(HashedVirtualFileRef, String)] :*: VirtualFileRef :*: Seq[PackageOption] :*: + LNil) => Configuration(in.head, in.tail.head, in.tail.tail.head), + ) + given JsonFormat[Configuration] = summon[JsonFormat[Configuration]] + end Configuration + /** * @param conf the package configuration that should be build * @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible * @param log feedback for the user */ - def apply(conf: Configuration, cacheStoreFactory: CacheStoreFactory, log: Logger): Unit = - apply(conf, cacheStoreFactory, log, timeFromConfiguration(conf)) + def apply(conf: Configuration, converter: FileConverter, log: Logger): VirtualFile = + apply(conf, converter, log, timeFromConfiguration(conf)) /** * @param conf the package configuration that should be build @@ -119,42 +141,31 @@ object Pkg: */ def apply( conf: Configuration, - cacheStoreFactory: CacheStoreFactory, + converter: FileConverter, log: Logger, time: Option[Long] - ): Unit = { + ): VirtualFile = + val manifest = toManifest(conf, log) + val out = converter.toPath(conf.jar).toFile() + val sources = conf.sources.map { case (vf, path) => + converter.toPath(vf).toFile() -> path + } + makeJar(sources, out, manifest, log, time) + converter.toVirtualFile(out.toPath()) + + def toManifest(conf: Configuration, log: Logger): Manifest = val manifest = new Manifest val main = manifest.getMainAttributes - for (option <- conf.options) { - option match { - case JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest); () - case MainClass(mainClassName) => main.put(Attributes.Name.MAIN_CLASS, mainClassName); () - case ManifestAttributes(attributes @ _*) => main.asScala ++= attributes; () - case FixedTimestamp(value) => () + for option <- conf.options do + option match + case PackageOption.JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest); () + case PackageOption.MainClass(mainClassName) => + main.put(Attributes.Name.MAIN_CLASS, mainClassName); () + case PackageOption.ManifestAttributes(attributes @ _*) => main.asScala ++= attributes; () + case PackageOption.FixedTimestamp(value) => () case _ => log.warn("Ignored unknown package option " + option) - } - } setVersion(main) - - type Inputs = (Seq[(File, String)], FilesInfo[ModifiedFileInfo], Manifest) - val cachedMakeJar = inputChanged(cacheStoreFactory make "inputs") { - (inChanged, inputs: Inputs) => - import exists.format - val (sources, _, manifest) = inputs - outputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) => - if (inChanged || outChanged) { - makeJar(sources, jar.file, manifest, log, time) - jar.file - () - } else log.debug("Jar uptodate: " + jar.file) - } - } - - val inputFiles = conf.sources.map(_._1).toSet - val inputs = (conf.sources.distinct, lastModified(inputFiles), manifest) - cachedMakeJar(inputs)(() => exists(conf.jar)) - () - } + manifest /** * updates the manifest version is there is none present. @@ -172,7 +183,7 @@ object Pkg: import Attributes.Name._ val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR) val attribVals = Seq(name, version, orgName) - ManifestAttributes(attribKeys zip attribVals: _*) + PackageOption.ManifestAttributes(attribKeys.zip(attribVals): _*) } def addImplManifestAttributes( name: String, @@ -195,7 +206,7 @@ object Pkg: IMPLEMENTATION_VENDOR_ID, ) val attribVals = Seq(name, version, orgName, org) - ManifestAttributes((attribKeys zip attribVals) ++ { + PackageOption.ManifestAttributes(attribKeys.zip(attribVals) ++ { homepage map (h => (IMPLEMENTATION_URL, h.toString)) }: _*) } @@ -221,7 +232,7 @@ object Pkg: def sourcesDebugString(sources: Seq[(File, String)]): String = "Input file mappings:\n\t" + (sources map { case (f, s) => s + "\n\t " + f } mkString ("\n\t")) - implicit def manifestFormat: JsonFormat[Manifest] = projectFormat[Manifest, Array[Byte]]( + given manifestFormat: JsonFormat[Manifest] = projectFormat[Manifest, Array[Byte]]( m => { val bos = new java.io.ByteArrayOutputStream() m write bos @@ -230,3 +241,98 @@ object Pkg: bs => new Manifest(new java.io.ByteArrayInputStream(bs)) ) end Pkg + +enum PackageOption: + case JarManifest(m: Manifest) + case MainClass(mainClassName: String) + case ManifestAttributes(attributes: (Attributes.Name, String)*) + case FixedTimestamp(value: Option[Long]) + +object PackageOption: + import Pkg.manifestFormat + + private given jarManifestFormat: JsonFormat[PackageOption.JarManifest] = + new JsonFormat[PackageOption.JarManifest]: + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): PackageOption.JarManifest = + jsOpt match + case Some(js) => + unbuilder.beginObject(js) + val m = unbuilder.readField[Manifest]("m") + unbuilder.endObject() + PackageOption.JarManifest(m) + case None => deserializationError("Expected JsObject but found None") + override def write[J](obj: PackageOption.JarManifest, builder: Builder[J]): Unit = + builder.beginObject() + builder.addField("m", obj.m) + builder.endObject() + + private given mainClassFormat: JsonFormat[PackageOption.MainClass] = + new JsonFormat[PackageOption.MainClass]: + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): PackageOption.MainClass = + jsOpt match + case Some(js) => + unbuilder.beginObject(js) + val mainClassName = unbuilder.readField[String]("mainClassName") + unbuilder.endObject() + PackageOption.MainClass(mainClassName) + case None => deserializationError("Expected JsObject but found None") + override def write[J](obj: PackageOption.MainClass, builder: Builder[J]): Unit = + builder.beginObject() + builder.addField("mainClassName", obj.mainClassName) + builder.endObject() + + private given manifestAttributesFormat: JsonFormat[PackageOption.ManifestAttributes] = + new JsonFormat[PackageOption.ManifestAttributes]: + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): PackageOption.ManifestAttributes = + jsOpt match + case Some(js) => + unbuilder.beginObject(js) + val attributes = unbuilder.readField[Vector[(String, String)]]("attributes") + unbuilder.endObject() + PackageOption.ManifestAttributes(attributes.map { case (k, v) => + Attributes.Name(k) -> v + }: _*) + case None => deserializationError("Expected JsObject but found None") + override def write[J](obj: PackageOption.ManifestAttributes, builder: Builder[J]): Unit = + builder.beginObject() + builder.addField( + "attributes", + obj.attributes.toVector.map { case (k, v) => k.toString -> v } + ) + builder.endObject() + + private given fixedTimeStampFormat: JsonFormat[PackageOption.FixedTimestamp] = + new JsonFormat[PackageOption.FixedTimestamp]: + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): PackageOption.FixedTimestamp = + jsOpt match + case Some(js) => + unbuilder.beginObject(js) + val value = unbuilder.readField[Option[Long]]("value") + unbuilder.endObject() + PackageOption.FixedTimestamp(value) + case None => deserializationError("Expected JsObject but found None") + override def write[J](obj: PackageOption.FixedTimestamp, builder: Builder[J]): Unit = + builder.beginObject() + builder.addField("value", obj.value) + builder.endObject() + + given JsonFormat[PackageOption] = flatUnionFormat4[ + PackageOption, + PackageOption.JarManifest, + PackageOption.MainClass, + PackageOption.ManifestAttributes, + PackageOption.FixedTimestamp, + ]("type") +end PackageOption diff --git a/main-command/src/main/scala/sbt/BasicKeys.scala b/main-command/src/main/scala/sbt/BasicKeys.scala index bf91e10e06..e616f4d256 100644 --- a/main-command/src/main/scala/sbt/BasicKeys.scala +++ b/main-command/src/main/scala/sbt/BasicKeys.scala @@ -8,7 +8,7 @@ package sbt import java.io.File - +import java.nio.file.Path import sbt.internal.inc.classpath.{ ClassLoaderCache => IncClassLoaderCache } import sbt.internal.classpath.ClassLoaderCache import sbt.internal.server.ServerHandler @@ -113,6 +113,13 @@ object BasicKeys { 10000 ) + val rootOutputDirectory = + AttributeKey[Path]( + "rootOutputDirectory", + "Build-wide output directory", + 10000 + ) + // Unlike other BasicKeys, this is not used directly as a setting key, // and severLog / logLevel is used instead. private[sbt] val serverLogLevel = diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/CompileRemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/CompileRemoteCacheArtifact.scala index 982fdfd629..9dfa76af8d 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/CompileRemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/CompileRemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache final class CompileRemoteCacheArtifact private ( artifact: sbt.librarymanagement.Artifact, - packaged: sbt.TaskKey[java.io.File], + packaged: sbt.TaskKey[xsbti.VirtualFileRef], val extractDirectory: java.io.File, val analysisFile: java.io.File) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { @@ -22,13 +22,13 @@ final class CompileRemoteCacheArtifact private ( override def toString: String = { "CompileRemoteCacheArtifact(" + artifact + ", " + packaged + ", " + extractDirectory + ", " + analysisFile + ")" } - private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile): CompileRemoteCacheArtifact = { + private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.VirtualFileRef] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile): CompileRemoteCacheArtifact = { new CompileRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile) } def withArtifact(artifact: sbt.librarymanagement.Artifact): CompileRemoteCacheArtifact = { copy(artifact = artifact) } - def withPackaged(packaged: sbt.TaskKey[java.io.File]): CompileRemoteCacheArtifact = { + def withPackaged(packaged: sbt.TaskKey[xsbti.VirtualFileRef]): CompileRemoteCacheArtifact = { copy(packaged = packaged) } def withExtractDirectory(extractDirectory: java.io.File): CompileRemoteCacheArtifact = { @@ -40,5 +40,5 @@ final class CompileRemoteCacheArtifact private ( } object CompileRemoteCacheArtifact { - def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File], extractDirectory: java.io.File, analysisFile: java.io.File): CompileRemoteCacheArtifact = new CompileRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile) + def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.VirtualFileRef], extractDirectory: java.io.File, analysisFile: java.io.File): CompileRemoteCacheArtifact = new CompileRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile) } diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/CustomRemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/CustomRemoteCacheArtifact.scala index 81ade63a26..8b7431b5a9 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/CustomRemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/CustomRemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache final class CustomRemoteCacheArtifact private ( artifact: sbt.librarymanagement.Artifact, - packaged: sbt.TaskKey[java.io.File], + packaged: sbt.TaskKey[xsbti.VirtualFileRef], val extractDirectory: java.io.File, val preserveLastModified: Boolean) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { @@ -22,13 +22,13 @@ final class CustomRemoteCacheArtifact private ( override def toString: String = { "CustomRemoteCacheArtifact(" + artifact + ", " + packaged + ", " + extractDirectory + ", " + preserveLastModified + ")" } - private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged, extractDirectory: java.io.File = extractDirectory, preserveLastModified: Boolean = preserveLastModified): CustomRemoteCacheArtifact = { + private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.VirtualFileRef] = packaged, extractDirectory: java.io.File = extractDirectory, preserveLastModified: Boolean = preserveLastModified): CustomRemoteCacheArtifact = { new CustomRemoteCacheArtifact(artifact, packaged, extractDirectory, preserveLastModified) } def withArtifact(artifact: sbt.librarymanagement.Artifact): CustomRemoteCacheArtifact = { copy(artifact = artifact) } - def withPackaged(packaged: sbt.TaskKey[java.io.File]): CustomRemoteCacheArtifact = { + def withPackaged(packaged: sbt.TaskKey[xsbti.VirtualFileRef]): CustomRemoteCacheArtifact = { copy(packaged = packaged) } def withExtractDirectory(extractDirectory: java.io.File): CustomRemoteCacheArtifact = { @@ -40,5 +40,5 @@ final class CustomRemoteCacheArtifact private ( } object CustomRemoteCacheArtifact { - def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File], extractDirectory: java.io.File, preserveLastModified: Boolean): CustomRemoteCacheArtifact = new CustomRemoteCacheArtifact(artifact, packaged, extractDirectory, preserveLastModified) + def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.VirtualFileRef], extractDirectory: java.io.File, preserveLastModified: Boolean): CustomRemoteCacheArtifact = new CustomRemoteCacheArtifact(artifact, packaged, extractDirectory, preserveLastModified) } diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/PomRemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/PomRemoteCacheArtifact.scala index 7e14d1f73b..b89b5099ee 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/PomRemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/PomRemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache final class PomRemoteCacheArtifact private ( artifact: sbt.librarymanagement.Artifact, - packaged: sbt.TaskKey[java.io.File]) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { + packaged: sbt.TaskKey[xsbti.VirtualFileRef]) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { @@ -20,17 +20,17 @@ final class PomRemoteCacheArtifact private ( override def toString: String = { "PomRemoteCacheArtifact(" + artifact + ", " + packaged + ")" } - private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged): PomRemoteCacheArtifact = { + private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.VirtualFileRef] = packaged): PomRemoteCacheArtifact = { new PomRemoteCacheArtifact(artifact, packaged) } def withArtifact(artifact: sbt.librarymanagement.Artifact): PomRemoteCacheArtifact = { copy(artifact = artifact) } - def withPackaged(packaged: sbt.TaskKey[java.io.File]): PomRemoteCacheArtifact = { + def withPackaged(packaged: sbt.TaskKey[xsbti.VirtualFileRef]): PomRemoteCacheArtifact = { copy(packaged = packaged) } } object PomRemoteCacheArtifact { - def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File]): PomRemoteCacheArtifact = new PomRemoteCacheArtifact(artifact, packaged) + def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.VirtualFileRef]): PomRemoteCacheArtifact = new PomRemoteCacheArtifact(artifact, packaged) } diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/RemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/RemoteCacheArtifact.scala index 01c286e248..d66381ae46 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/RemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/RemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache abstract class RemoteCacheArtifact( val artifact: sbt.librarymanagement.Artifact, - val packaged: sbt.TaskKey[java.io.File]) extends Serializable { + val packaged: sbt.TaskKey[xsbti.VirtualFileRef]) extends Serializable { diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/TestRemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/TestRemoteCacheArtifact.scala index 7fb49f6933..26908ac194 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/TestRemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/TestRemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache final class TestRemoteCacheArtifact private ( artifact: sbt.librarymanagement.Artifact, - packaged: sbt.TaskKey[java.io.File], + packaged: sbt.TaskKey[xsbti.VirtualFileRef], val extractDirectory: java.io.File, val analysisFile: java.io.File, val testResult: java.io.File) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { @@ -23,13 +23,13 @@ final class TestRemoteCacheArtifact private ( override def toString: String = { "TestRemoteCacheArtifact(" + artifact + ", " + packaged + ", " + extractDirectory + ", " + analysisFile + ", " + testResult + ")" } - private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile, testResult: java.io.File = testResult): TestRemoteCacheArtifact = { + private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.VirtualFileRef] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile, testResult: java.io.File = testResult): TestRemoteCacheArtifact = { new TestRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile, testResult) } def withArtifact(artifact: sbt.librarymanagement.Artifact): TestRemoteCacheArtifact = { copy(artifact = artifact) } - def withPackaged(packaged: sbt.TaskKey[java.io.File]): TestRemoteCacheArtifact = { + def withPackaged(packaged: sbt.TaskKey[xsbti.VirtualFileRef]): TestRemoteCacheArtifact = { copy(packaged = packaged) } def withExtractDirectory(extractDirectory: java.io.File): TestRemoteCacheArtifact = { @@ -44,5 +44,5 @@ final class TestRemoteCacheArtifact private ( } object TestRemoteCacheArtifact { - def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File], extractDirectory: java.io.File, analysisFile: java.io.File, testResult: java.io.File): TestRemoteCacheArtifact = new TestRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile, testResult) + def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.VirtualFileRef], extractDirectory: java.io.File, analysisFile: java.io.File, testResult: java.io.File): TestRemoteCacheArtifact = new TestRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile, testResult) } diff --git a/main/src/main/contraband/remotecache.json b/main/src/main/contraband/remotecache.json index 486d25f9be..3bd48c0a61 100644 --- a/main/src/main/contraband/remotecache.json +++ b/main/src/main/contraband/remotecache.json @@ -14,7 +14,7 @@ }, { "name": "packaged", - "type": "sbt.TaskKey[java.io.File]" + "type": "sbt.TaskKey[xsbti.VirtualFileRef]" } ], "types": [ diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 421915910e..da12487666 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -84,7 +84,7 @@ import sbt.nio.file.{ FileTreeView, Glob, RecursiveGlob } import sbt.nio.Watch import sbt.std.TaskExtra.* import sbt.testing.{ AnnotatedFingerprint, Framework, Runner, SubclassFingerprint } -import sbt.util.CacheImplicits._ +import sbt.util.CacheImplicits.given import sbt.util.InterfaceUtil.{ t2, toJavaFunction => f1 } import sbt.util._ import sjsonnew._ @@ -420,7 +420,9 @@ object Defaults extends BuildCommon { val ih = app.provider.scalaProvider.launcher.ivyHome val coursierCache = csrCacheDirectory.value val javaHome = Paths.get(sys.props("java.home")) - Map( + val out = rootOutputDirectory.value + ListMap( + "OUT" -> out, "BASE" -> base.toPath, "SBT_BOOT" -> boot.toPath, "CSR_CACHE" -> coursierCache.toPath, @@ -556,7 +558,7 @@ object Defaults extends BuildCommon { // Appended to JvmPlugin.projectSettings def paths: Seq[Setting[_]] = Seq( baseDirectory := thisProject.value.base, - target := baseDirectory.value / "target", + target := rootOutputDirectory.value.resolve(outputPath.value).toFile(), // Use a different history path for jline3 because the jline2 format is // incompatible. By sbt 1.4.0, we should consider revering this to t / ".history" // and possibly rewriting the jline2 history in a jline3 compatible format if the @@ -663,9 +665,8 @@ object Defaults extends BuildCommon { }, earlyOutput / artifactPath := configArtifactPathSetting(artifact, "early").value, earlyOutput := { - val converter = fileConverter.value - val jar = (earlyOutput / artifactPath).value - converter.toVirtualFile(jar.toPath) + (earlyOutput / artifactPath).value match + case vf: VirtualFile => vf }, semanticdbTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "meta"), compileAnalysisTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "zinc"), @@ -1751,47 +1752,91 @@ object Defaults extends BuildCommon { packageTaskSettings(packageDoc, packageDocMappings) ++ Seq(Keys.`package` := packageBin.value) - def packageBinMappings: Initialize[Task[Seq[(File, String)]]] = - products.map { _ flatMap Path.allSubpaths } - def packageDocMappings: Initialize[Task[Seq[(File, String)]]] = - doc.map { x => Path.allSubpaths(x).toSeq } - def packageSrcMappings: Initialize[Task[Seq[(File, String)]]] = + def packageBinMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = + Def.task { + val converter = fileConverter.value + val xs = products.value + xs + .flatMap(Path.allSubpaths) + .filter(_._1.isFile()) + .map { case (p, path) => + val vf = converter.toVirtualFile(p.toPath()) + (vf: HashedVirtualFileRef) -> path + } + } + + def packageDocMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = + Def.task { + val converter = fileConverter.value + val d = doc.value + Path + .allSubpaths(d) + .toSeq + .filter(_._1.isFile()) + .map { case (p, path) => + val vf = converter.toVirtualFile(p.toPath()) + (vf: HashedVirtualFileRef) -> path + } + } + + def packageSrcMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = concatMappings(resourceMappings, sourceMappings) - private type Mappings = Initialize[Task[Seq[(File, String)]]] + private type Mappings = Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] def concatMappings(as: Mappings, bs: Mappings): Mappings = - as.zipWith(bs) { (a: Task[Seq[(File, String)]], b: Task[Seq[(File, String)]]) => - (a, b).mapN { case (seq1: Seq[(File, String)], seq2: Seq[(File, String)]) => - seq1 ++ seq2 - } + as.zipWith(bs) { + ( + a: Task[Seq[(HashedVirtualFileRef, String)]], + b: Task[Seq[(HashedVirtualFileRef, String)]] + ) => + (a, b).mapN { + case ( + seq1: Seq[(HashedVirtualFileRef, String)], + seq2: Seq[(HashedVirtualFileRef, String)] + ) => + seq1 ++ seq2 + } } // drop base directories, since there are no valid mappings for these - def sourceMappings: Initialize[Task[Seq[(File, String)]]] = + def sourceMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = Def.task { + val converter = fileConverter.value val sdirs = unmanagedSourceDirectories.value val base = baseDirectory.value val relative = (f: File) => relativeTo(sdirs)(f).orElse(relativeTo(base)(f)).orElse(flat(f)) val exclude = Set(sdirs, base) - unmanagedSources.value.flatMap { - case s if !exclude(s) => relative(s).map(s -> _) - case _ => None - } + unmanagedSources.value + .flatMap { + case s if !exclude(s) => relative(s).map(s -> _) + case _ => None + } + .map { case (p, path) => + val vf = converter.toVirtualFile(p.toPath()) + (vf: HashedVirtualFileRef) -> path + } } - def resourceMappings = relativeMappings(unmanagedResources, unmanagedResourceDirectories) + def resourceMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = + relativeMappings(unmanagedResources, unmanagedResourceDirectories) def relativeMappings( files: Taskable[Seq[File]], dirs: Taskable[Seq[File]] - ): Initialize[Task[Seq[(File, String)]]] = + ): Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = Def.task { + val converter = fileConverter.value val rdirs = dirs.toTask.value.toSet val relative = (f: File) => relativeTo(rdirs)(f).orElse(flat(f)) - files.toTask.value.flatMap { - case r if !rdirs(r) => relative(r).map(r -> _) - case _ => None - } + files.toTask.value + .flatMap { + case r if !rdirs(r) => relative(r).map(r -> _) + case _ => None + } + .map { case (p, path) => + val vf = converter.toVirtualFile(p.toPath()) + (vf: HashedVirtualFileRef) -> path + } } def collectFiles( @@ -1806,7 +1851,8 @@ object Defaults extends BuildCommon { def relativeMappings( // forward to widened variant files: ScopedTaskable[Seq[File]], dirs: ScopedTaskable[Seq[File]] - ): Initialize[Task[Seq[(File, String)]]] = relativeMappings(files: Taskable[Seq[File]], dirs) + ): Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = + relativeMappings(files: Taskable[Seq[File]], dirs) def collectFiles( // forward to widened variant dirs: ScopedTaskable[Seq[File]], @@ -1817,10 +1863,11 @@ object Defaults extends BuildCommon { private[sbt] def configArtifactPathSetting( art: SettingKey[Artifact], extraPrefix: String - ): Initialize[File] = + ): Initialize[VirtualFile] = Def.setting { val f = artifactName.value - crossTarget.value / + val converter = fileConverter.value + val p = crossTarget.value / (prefix(configuration.value.name) + extraPrefix) / f( ScalaVersion( (artifactName / scalaVersion).value, @@ -1829,15 +1876,17 @@ object Defaults extends BuildCommon { projectID.value, art.value ) + converter.toVirtualFile(p.toPath()) } private[sbt] def prefixArtifactPathSetting( art: SettingKey[Artifact], extraPrefix: String - ): Initialize[File] = + ): Initialize[VirtualFileRef] = Def.setting { val f = artifactName.value - crossTarget.value / extraPrefix / f( + val converter = fileConverter.value + val p = crossTarget.value / extraPrefix / f( ScalaVersion( (artifactName / scalaVersion).value, (artifactName / scalaBinaryVersion).value @@ -1845,12 +1894,13 @@ object Defaults extends BuildCommon { projectID.value, art.value ) + converter.toVirtualFile(p.toPath()) } - def artifactPathSetting(art: SettingKey[Artifact]): Initialize[File] = + def artifactPathSetting(art: SettingKey[Artifact]): Initialize[VirtualFileRef] = Def.setting { val f = artifactName.value - crossTarget.value / f( + val p = crossTarget.value / f( ScalaVersion( (artifactName / scalaVersion).value, (artifactName / scalaBinaryVersion).value @@ -1858,6 +1908,8 @@ object Defaults extends BuildCommon { projectID.value, art.value ) + val converter = fileConverter.value + converter.toVirtualFile(p.toPath()) } def artifactSetting: Initialize[Artifact] = @@ -1896,29 +1948,34 @@ object Defaults extends BuildCommon { case None => scope :: Nil } - def packageTaskSettings(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File, String)]]]) = + def packageTaskSettings( + key: TaskKey[VirtualFileRef], + mappingsTask: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] + ) = inTask(key)( Seq( (TaskZero / key) := packageTask.value, packageConfiguration := packageConfigurationTask.value, mappings := mappingsTask.value, - packagedArtifact := (artifact.value -> key.value), + packagedArtifact := artifact.value -> key.value, artifact := artifactSetting.value, artifactPath := artifactPathSetting(artifact).value ) ) - def packageTask: Initialize[Task[File]] = - Def.task { + def packageTask: Initialize[Task[VirtualFileRef]] = + Def.cachedTask { val config = packageConfiguration.value val s = streams.value - Pkg( + val converter = fileConverter.value + val out = Pkg( config, - s.cacheStoreFactory, + converter, s.log, Pkg.timeFromConfiguration(config) ) - config.jar + Def.declareOutput(out) + out } def packageConfigurationTask: Initialize[Task[Pkg.Configuration]] = @@ -2856,9 +2913,12 @@ object Classpaths { } def defaultPackageKeys = Seq(packageBin, packageSrc, packageDoc) - lazy val defaultPackages: Seq[TaskKey[File]] = - for (task <- defaultPackageKeys; conf <- Seq(Compile, Test)) yield (conf / task) - lazy val defaultArtifactTasks: Seq[TaskKey[File]] = makePom +: defaultPackages + lazy val defaultPackages: Seq[TaskKey[VirtualFileRef]] = + for + task <- defaultPackageKeys + conf <- Seq(Compile, Test) + yield (conf / task) + lazy val defaultArtifactTasks: Seq[TaskKey[VirtualFileRef]] = makePom +: defaultPackages def findClasspathConfig( map: Configuration => Configuration, @@ -2877,13 +2937,18 @@ object Classpaths { } getOrElse notFound } - def packaged(pkgTasks: Seq[TaskKey[File]]): Initialize[Task[Map[Artifact, File]]] = + def packaged( + pkgTasks: Seq[TaskKey[VirtualFileRef]] + ): Initialize[Task[Map[Artifact, VirtualFileRef]]] = enabledOnly(packagedArtifact.toSettingKey, pkgTasks) apply (_.join.map(_.toMap)) - def artifactDefs(pkgTasks: Seq[TaskKey[File]]): Initialize[Seq[Artifact]] = + def artifactDefs(pkgTasks: Seq[TaskKey[VirtualFileRef]]): Initialize[Seq[Artifact]] = enabledOnly(artifact, pkgTasks) - def enabledOnly[T](key: SettingKey[T], pkgTasks: Seq[TaskKey[File]]): Initialize[Seq[T]] = + def enabledOnly[T]( + key: SettingKey[T], + pkgTasks: Seq[TaskKey[VirtualFileRef]] + ): Initialize[Seq[T]] = (forallIn(key, pkgTasks) zipWith forallIn(publishArtifact, pkgTasks))(_ zip _ collect { case (a, true) => a }) @@ -2914,10 +2979,11 @@ object Classpaths { packagedArtifacts :== Map.empty, crossTarget := target.value, makePom := { + val converter = fileConverter.value val config = makePomConfiguration.value val publisher = Keys.publisher.value publisher.makePomFile(ivyModule.value, config, streams.value.log) - config.file.get + converter.toVirtualFile(config.file.get.toPath()) }, (makePom / packagedArtifact) := ((makePom / artifact).value -> makePom.value), deliver := deliverTask(makeIvyXmlConfiguration).value, @@ -3066,6 +3132,12 @@ object Classpaths { } }).value, moduleName := normalizedName.value, + outputPath := { + val p = platform.value + val m = moduleName.value + val sv = scalaVersion.value + s"$p/$sv/$m" + }, ivyPaths := IvyPaths( baseDirectory.value.toString, bootIvyHome(appConfiguration.value).map(_.toString) @@ -3197,14 +3269,18 @@ object Classpaths { else confs }, moduleSettings := moduleSettings0.value, - makePomConfiguration := MakePomConfiguration() - .withFile((makePom / artifactPath).value) - .withModuleInfo(projectInfo.value) - .withExtra(pomExtra.value) - .withProcess(pomPostProcess.value) - .withFilterRepositories(pomIncludeRepository.value) - .withAllRepositories(pomAllRepositories.value) - .withConfigurations(Configurations.defaultMavenConfigurations), + makePomConfiguration := { + val converter = fileConverter.value + val out = converter.toPath((makePom / artifactPath).value) + MakePomConfiguration() + .withFile(out.toFile()) + .withModuleInfo(projectInfo.value) + .withExtra(pomExtra.value) + .withProcess(pomPostProcess.value) + .withFilterRepositories(pomIncludeRepository.value) + .withAllRepositories(pomAllRepositories.value) + .withConfigurations(Configurations.defaultMavenConfigurations) + }, makeIvyXmlConfiguration := { makeIvyXmlConfig( publishMavenStyle.value, @@ -3225,12 +3301,16 @@ object Classpaths { |so tooling can use it for eviction errors etc - https://www.scala-sbt.org/1.x/docs/Publishing.html""".stripMargin ) else () + val converter = fileConverter.value + val artifacts = (publish / packagedArtifacts).value.toVector.map { (a, vf) => + a -> converter.toPath(vf).toFile + } publishConfig( publishMavenStyle.value, deliverPattern(crossTarget.value), if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, - (publish / packagedArtifacts).value.toVector, + artifacts, (publish / checksums).value.toVector, getPublishTo(publishTo.value).name, ivyLoggingLevel.value, @@ -3249,27 +3329,39 @@ object Classpaths { optResolverName = Some("local") ) }, - publishLocalConfiguration := publishConfig( - false, // publishMavenStyle.value, - deliverPattern(crossTarget.value), - if (isSnapshot.value) "integration" else "release", - ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, - (publishLocal / packagedArtifacts).value.toVector, - (publishLocal / checksums).value.toVector, - logging = ivyLoggingLevel.value, - overwrite = isSnapshot.value - ), - publishM2Configuration := publishConfig( - true, - deliverPattern(crossTarget.value), - if (isSnapshot.value) "integration" else "release", - ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, - (publishM2 / packagedArtifacts).value.toVector, - checksums = (publishM2 / checksums).value.toVector, - resolverName = Resolver.publishMavenLocal.name, - logging = ivyLoggingLevel.value, - overwrite = isSnapshot.value - ), + publishLocalConfiguration := { + val converter = fileConverter.value + val artifacts = (publishLocal / packagedArtifacts).value.toVector.map { (a, vf) => + a -> converter.toPath(vf).toFile + } + publishConfig( + false, // publishMavenStyle.value, + deliverPattern(crossTarget.value), + if (isSnapshot.value) "integration" else "release", + ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, + artifacts, + (publishLocal / checksums).value.toVector, + logging = ivyLoggingLevel.value, + overwrite = isSnapshot.value + ) + }, + publishM2Configuration := { + val converter = fileConverter.value + val artifacts = (publishM2 / packagedArtifacts).value.toVector.map { (a, vf) => + a -> converter.toPath(vf).toFile + } + publishConfig( + true, + deliverPattern(crossTarget.value), + if (isSnapshot.value) "integration" else "release", + ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, + artifacts, + checksums = (publishM2 / checksums).value.toVector, + resolverName = Resolver.publishMavenLocal.name, + logging = ivyLoggingLevel.value, + overwrite = isSnapshot.value + ) + }, ivySbt := ivySbt0.value, ivyModule := { val is = ivySbt.value; new is.Module(moduleSettings.value) }, allCredentials := LMCoursier.allCredentialsTask.value, @@ -4489,7 +4581,7 @@ trait BuildExtra extends BuildCommon with DefExtra { libraryDependencies += compilerPlugin(dependency) /** Constructs a setting that declares a new artifact `a` that is generated by `taskDef`. */ - def addArtifact(a: Artifact, taskDef: TaskKey[File]): SettingsDefinition = { + def addArtifact(a: Artifact, taskDef: TaskKey[VirtualFile]): SettingsDefinition = { val pkgd = packagedArtifacts := packagedArtifacts.value.updated(a, taskDef.value) Seq(artifacts += a, pkgd) } @@ -4497,10 +4589,10 @@ trait BuildExtra extends BuildCommon with DefExtra { /** Constructs a setting that declares a new artifact `artifact` that is generated by `taskDef`. */ def addArtifact( artifact: Initialize[Artifact], - taskDef: Initialize[Task[File]] + taskDef: Initialize[Task[VirtualFile]] ): SettingsDefinition = { val artLocal = SettingKey.local[Artifact] - val taskLocal = TaskKey.local[File] + val taskLocal = TaskKey.local[VirtualFile] val art = artifacts := artLocal.value +: artifacts.value val pkgd = packagedArtifacts := packagedArtifacts.value.updated(artLocal.value, taskLocal.value) Seq(artLocal := artifact.value, taskLocal := taskDef.value, art, pkgd) diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index 3bc09f8545..f75f05c1af 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -36,8 +36,8 @@ import sbt.librarymanagement._ import sbt.librarymanagement.ivy.{ Credentials, IvyConfiguration, IvyPaths, UpdateOptions } import sbt.nio.file.Glob import sbt.testing.Framework -import sbt.util.{ ActionCacheStore, Level, Logger, LoggerContext } -import xsbti.{ FileConverter, VirtualFile } +import sbt.util.{ cacheOptOut, ActionCacheStore, Level, Logger, LoggerContext } +import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFile, VirtualFileRef } import xsbti.compile._ import xsbti.compile.analysis.ReadStamps @@ -111,6 +111,7 @@ object Keys { val fullServerHandlers = SettingKey(BasicKeys.fullServerHandlers) val serverHandlers = settingKey[Seq[ServerHandler]]("User-defined server handlers.") val cacheStores = settingKey[Seq[ActionCacheStore]]("Cache backends") + val rootOutputDirectory = SettingKey(BasicKeys.rootOutputDirectory) // val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting) val analysis = StringAttributeKey("analysis") @@ -257,6 +258,7 @@ object Keys { val sourcePositionMappers = taskKey[Seq[xsbti.Position => Option[xsbti.Position]]]("Maps positions in generated source files to the original source it was generated from").withRank(DTask) private[sbt] val externalHooks = taskKey[ExternalHooks]("The external hooks used by zinc.") val auxiliaryClassFiles = taskKey[Seq[AuxiliaryClassFiles]]("The auxiliary class files that must be managed by Zinc (for instance the TASTy files)") + @cacheOptOut val fileConverter = settingKey[FileConverter]("The file converter used to convert between Path and VirtualFile") val allowMachinePath = settingKey[Boolean]("Allow machine-specific paths during conversion.") val reportAbsolutePath = settingKey[Boolean]("Report absolute paths during compilation.") @@ -265,21 +267,21 @@ object Keys { private[sbt] val reusableStamper = taskKey[ReadStamps]("The stamper can be reused across subprojects and sessions.") // package keys - val packageBin = taskKey[File]("Produces a main artifact, such as a binary jar.").withRank(ATask) - val `package` = taskKey[File]("Produces the main artifact, such as a binary jar. This is typically an alias for the task that actually does the packaging.").withRank(APlusTask) - val packageDoc = taskKey[File]("Produces a documentation artifact, such as a jar containing API documentation.").withRank(AMinusTask) - val packageSrc = taskKey[File]("Produces a source artifact, such as a jar containing sources and resources.").withRank(AMinusTask) - val packageCache = taskKey[File]("Produces the main artifact for caching.") + val packageBin = taskKey[VirtualFileRef]("Produces a main artifact, such as a binary jar.").withRank(ATask) + val `package` = taskKey[VirtualFileRef]("Produces the main artifact, such as a binary jar. This is typically an alias for the task that actually does the packaging.").withRank(APlusTask) + val packageDoc = taskKey[VirtualFileRef]("Produces a documentation artifact, such as a jar containing API documentation.").withRank(AMinusTask) + val packageSrc = taskKey[VirtualFileRef]("Produces a source artifact, such as a jar containing sources and resources.").withRank(AMinusTask) + val packageCache = taskKey[VirtualFileRef]("Produces the main artifact for caching.") val packageOptions = taskKey[Seq[PackageOption]]("Options for packaging.").withRank(BTask) val packageTimestamp = settingKey[Option[Long]]("Overwrites timestamps in JAR file to make the build reproducible; None keeps the existing timestamps (useful for web resources)").withRank(CSetting) val packageConfiguration = taskKey[Pkg.Configuration]("Collects all inputs needed for packaging.").withRank(DTask) - val artifactPath = settingKey[File]("The location of a generated artifact.").withRank(BPlusSetting) + val artifactPath = settingKey[VirtualFileRef]("The location of a generated artifact.").withRank(BPlusSetting) val artifactStr = StringAttributeKey("artifact") val artifact = settingKey[Artifact]("Describes an artifact.").withRank(BMinusSetting) val artifactClassifier = settingKey[Option[String]]("Sets the classifier used by the default artifact definition.").withRank(BSetting) val artifactName = settingKey[(ScalaVersion, ModuleID, Artifact) => String]("Function that produces the artifact name from its definition.").withRank(CSetting) - val mappings = taskKey[Seq[(File, String)]]("Defines the mappings from a file to a path, used by packaging, for example.").withRank(BTask) + val mappings = taskKey[Seq[(HashedVirtualFileRef, String)]]("Defines the mappings from a file to a path, used by packaging, for example.").withRank(BTask) val fileMappings = taskKey[Seq[(File, File)]]("Defines the mappings from a file to a file, used for copying files, for example.").withRank(BMinusTask) // Run Keys @@ -399,7 +401,7 @@ object Keys { val pushRemoteCacheConfiguration = taskKey[PublishConfiguration]("") val pushRemoteCacheTo = settingKey[Option[Resolver]]("The resolver to publish remote cache to.") val remoteCacheResolvers = settingKey[Seq[Resolver]]("Resolvers for remote cache.") - val remoteCachePom = taskKey[File]("Generates a pom for publishing when publishing Maven-style.") + val remoteCachePom = taskKey[VirtualFileRef]("Generates a pom for publishing when publishing Maven-style.") val localCacheDirectory = settingKey[File]("Operating system specific cache directory.") val usePipelining = settingKey[Boolean]("Use subproject pipelining for compilation.").withRank(BSetting) val exportPipelining = settingKey[Boolean]("Product early output so downstream subprojects can do pipelining.").withRank(BSetting) @@ -496,12 +498,12 @@ object Keys { val makePomConfiguration = settingKey[MakePomConfiguration]("Configuration for generating a pom.").withRank(DSetting) val makeIvyXmlConfiguration = taskKey[PublishConfiguration]("Configuration for generating ivy.xml.").withRank(DSetting) val makeIvyXmlLocalConfiguration = taskKey[PublishConfiguration]("Configuration for generating ivy.xml.").withRank(DSetting) - val packagedArtifacts = taskKey[Map[Artifact, File]]("Packages all artifacts for publishing and maps the Artifact definition to the generated file.").withRank(CTask) + val packagedArtifacts = taskKey[Map[Artifact, VirtualFileRef]]("Packages all artifacts for publishing and maps the Artifact definition to the generated file.").withRank(CTask) val publishMavenStyle = settingKey[Boolean]("Configures whether to generate and publish a pom (true) or Ivy file (false).").withRank(BSetting) val credentials = taskKey[Seq[Credentials]]("The credentials to use for updating and publishing.").withRank(BMinusTask) val allCredentials = taskKey[Seq[Credentials]]("Aggregated credentials across current and root subprojects. Do not rewire this task.").withRank(DTask) - val makePom = taskKey[File]("Generates a pom for publishing when publishing Maven-style.").withRank(BPlusTask) + val makePom = taskKey[VirtualFileRef]("Generates a pom for publishing when publishing Maven-style.").withRank(BPlusTask) val deliver = taskKey[File]("Generates the Ivy file for publishing to a repository.").withRank(BTask) val deliverLocal = taskKey[File]("Generates the Ivy file for publishing to the local repository.").withRank(BTask) // makeIvyXml is currently identical to the confusingly-named "deliver", which may be deprecated in the future @@ -516,6 +518,7 @@ object Keys { val pomAllRepositories = settingKey[Boolean]("If true, includes repositories used in module configurations in the pom repositories section. If false, only the common repositories are included.").withRank(BMinusSetting) val moduleName = settingKey[String]("The name of the current module, used for dependency management.").withRank(BSetting) + val outputPath = settingKey[String]("Path of the output directory relative from the rootOutputDirectory.").withRank(DSetting) val version = settingKey[String]("The version/revision of the current module.").withRank(APlusSetting) val isSnapshot = settingKey[Boolean]("True if the version of the project is a snapshot version.").withRank(BPlusSetting) val moduleIDStr = StringAttributeKey("moduleID") @@ -558,7 +561,7 @@ object Keys { val managedDirectory = settingKey[File]("Directory to which managed dependencies are retrieved.").withRank(BSetting) val classpathTypes = settingKey[Set[String]]("Artifact types that are included on the classpath.").withRank(BSetting) val publishArtifact = settingKey[Boolean]("Enables (true) or disables (false) publishing an artifact.").withRank(AMinusSetting) - val packagedArtifact = taskKey[(Artifact, File)]("Generates a packaged artifact, returning the Artifact and the produced File.").withRank(CTask) + val packagedArtifact = taskKey[(Artifact, VirtualFileRef)]("Generates a packaged artifact, returning the Artifact and the produced File.").withRank(CTask) val checksums = settingKey[Seq[String]]("The list of checksums to generate and to verify for dependencies.").withRank(BSetting) val forceUpdatePeriod = settingKey[Option[FiniteDuration]]("Duration after which to force a full update to occur").withRank(CSetting) val versionScheme = settingKey[Option[String]]("""Version scheme used for the subproject: Supported values are Some("early-semver"), Some("pvp"), and Some("semver-spec")""").withRank(BSetting) @@ -592,6 +595,7 @@ object Keys { val forcegc = settingKey[Boolean]("Enables (true) or disables (false) forcing garbage collection after task run when needed.").withRank(BMinusSetting) val minForcegcInterval = settingKey[Duration]("Minimal interval to check for forcing garbage collection.") val settingsData = std.FullInstance.settingsData + @cacheOptOut val streams = taskKey[TaskStreams]("Provides streams for logging and persisting data.").withRank(DTask) val taskDefinitionKey = Def.taskDefinitionKey val (executionRoots, dummyRoots) = Def.dummy[Seq[ScopedKey[_]]]("executionRoots", "The list of root tasks for this task execution. Roots are the top-level tasks that were directly requested to be run.") diff --git a/main/src/main/scala/sbt/RemoteCache.scala b/main/src/main/scala/sbt/RemoteCache.scala index a31b5925b1..c878df2ee4 100644 --- a/main/src/main/scala/sbt/RemoteCache.scala +++ b/main/src/main/scala/sbt/RemoteCache.scala @@ -49,7 +49,7 @@ import sbt.util.{ Logger } import sjsonnew.JsonFormat -import xsbti.HashedVirtualFileRef +import xsbti.{ HashedVirtualFileRef, VirtualFileRef } import xsbti.compile.{ AnalysisContents, CompileAnalysis, MiniSetup, MiniOptions } import scala.annotation.nowarn @@ -143,6 +143,12 @@ object RemoteCache { // base is used only to resolve relative paths, which should never happen IvyPaths(base.toString, localCacheDirectory.value.toString) }, + rootOutputDirectory := { + appConfiguration.value.baseDirectory + .toPath() + .resolve("target") + .resolve("out") + }, cacheStores := { List( DiskActionCacheStore(localCacheDirectory.value.toPath()) @@ -198,17 +204,20 @@ object RemoteCache { remoteCachePom / pushRemoteCacheArtifact := true, remoteCachePom := { val s = streams.value + val converter = fileConverter.value val config = (remoteCachePom / makePomConfiguration).value val publisher = Keys.publisher.value publisher.makePomFile((pushRemoteCache / ivyModule).value, config, s.log) - config.file.get + converter.toVirtualFile(config.file.get.toPath) }, remoteCachePom / artifactPath := { Defaults.prefixArtifactPathSetting(makePom / artifact, "remote-cache").value }, remoteCachePom / makePomConfiguration := { + val converter = fileConverter.value val config = makePomConfiguration.value - config.withFile((remoteCachePom / artifactPath).value) + val out = converter.toPath((remoteCachePom / artifactPath).value) + config.withFile(out.toFile()) }, remoteCachePom / remoteCacheArtifact := { PomRemoteCacheArtifact((makePom / artifact).value, remoteCachePom) @@ -259,17 +268,20 @@ object RemoteCache { inTask(packageCache)( Seq( packageCache.in(Defaults.TaskZero) := { + val converter = fileConverter.value val original = packageBin.in(Defaults.TaskZero).value + val originalFile = converter.toPath(original) val artp = artifactPath.value + val artpFile = converter.toPath(artp) val af = compileAnalysisFile.value - IO.copyFile(original, artp) + IO.copyFile(originalFile.toFile(), artpFile.toFile()) // skip zip manipulation if the artp is a blank file - if (af.exists && artp.length() > 0) { - JarUtils.includeInJar(artp, Vector(af -> s"META-INF/inc_compile.zip")) + if (af.exists && artpFile.toFile().length() > 0) { + JarUtils.includeInJar(artpFile.toFile(), Vector(af -> s"META-INF/inc_compile.zip")) } val rf = getResourceFilePaths().value if (rf.exists) { - JarUtils.includeInJar(artp, Vector(rf -> s"META-INF/copy-resources.txt")) + JarUtils.includeInJar(artpFile.toFile(), Vector(rf -> s"META-INF/copy-resources.txt")) } // val testStream = (test / streams).?.value // testStream foreach { s => @@ -319,12 +331,17 @@ object RemoteCache { combineHash(extractHash(inputs) ++ extractHash(cp) ++ extraInc) }, pushRemoteCacheConfiguration := { + val converter = fileConverter.value + val artifacts = (pushRemoteCacheConfiguration / packagedArtifacts).value.toVector.map { + case (a, vf) => + a -> converter.toPath(vf).toFile + } Classpaths.publishConfig( (pushRemoteCacheConfiguration / publishMavenStyle).value, Classpaths.deliverPattern(crossTarget.value), if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, - (pushRemoteCacheConfiguration / packagedArtifacts).value.toVector, + artifacts, (pushRemoteCacheConfiguration / checksums).value.toVector, Classpaths.getPublishTo(pushRemoteCacheTo.value).name, ivyLoggingLevel.value, @@ -536,12 +553,12 @@ object RemoteCache { // } } - private def defaultArtifactTasks: Seq[TaskKey[File]] = + private def defaultArtifactTasks: Seq[TaskKey[VirtualFileRef]] = Seq(Compile / packageCache, Test / packageCache) private def enabledOnly[A]( key: SettingKey[A], - pkgTasks: Seq[TaskKey[File]] + pkgTasks: Seq[TaskKey[VirtualFileRef]] ): Def.Initialize[Seq[A]] = (Classpaths.forallIn(key, pkgTasks) zipWith Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { case (a, true) => diff --git a/main/src/main/scala/sbt/internal/ClasspathImpl.scala b/main/src/main/scala/sbt/internal/ClasspathImpl.scala index 44d66dc0a4..51e296ea51 100644 --- a/main/src/main/scala/sbt/internal/ClasspathImpl.scala +++ b/main/src/main/scala/sbt/internal/ClasspathImpl.scala @@ -23,6 +23,7 @@ import sbt.librarymanagement.Configurations.names import sbt.std.TaskExtra._ import sbt.util._ import scala.collection.JavaConverters._ +import xsbti.VirtualFileRef import xsbti.compile.CompileAnalysis private[sbt] object ClasspathImpl { @@ -55,8 +56,9 @@ private[sbt] object ClasspathImpl { val art = (packageBin / artifact).value val module = projectID.value val config = configuration.value + val converter = fileConverter.value for { (f, analysis) <- trackedExportedProductsImplTask(track).value } yield APIMappings - .store(analyzed(f, analysis), apiURL.value) + .store(analyzed(converter.toPath(f).toFile(), analysis), apiURL.value) .put(Keys.artifactStr, RemoteCache.artifactToStr(art)) .put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module)) .put(Keys.configurationStr, config.name) @@ -68,8 +70,9 @@ private[sbt] object ClasspathImpl { val art = (packageBin / artifact).value val module = projectID.value val config = configuration.value + val converter = fileConverter.value for { (f, analysis) <- trackedJarProductsImplTask(track).value } yield APIMappings - .store(analyzed(f, analysis), apiURL.value) + .store(analyzed(converter.toPath(f).toFile(), analysis), apiURL.value) .put(Keys.artifactStr, RemoteCache.artifactToStr(art)) .put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module)) .put(Keys.configurationStr, config.name) @@ -77,7 +80,7 @@ private[sbt] object ClasspathImpl { private[this] def trackedExportedProductsImplTask( track: TrackLevel - ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + ): Initialize[Task[Seq[(VirtualFileRef, CompileAnalysis)]]] = Def.taskIf { if { val _ = (packageBin / dynamicDependency).value @@ -88,7 +91,7 @@ private[sbt] object ClasspathImpl { private[this] def trackedNonJarProductsImplTask( track: TrackLevel - ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + ): Initialize[Task[Seq[(VirtualFileRef, CompileAnalysis)]]] = (Def .task { val dirs = productDirectories.value @@ -98,41 +101,54 @@ private[sbt] object ClasspathImpl { .flatMapTask { case (TrackLevel.TrackAlways, _, _) => Def.task { - products.value map { (_, compile.value) } + val converter = fileConverter.value + val a = compile.value + products.value + .map { x => converter.toVirtualFile(x.toPath()) } + .map { (_, a) } } case (TrackLevel.TrackIfMissing, dirs, view) if view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).isEmpty => Def.task { - products.value map { (_, compile.value) } + val converter = fileConverter.value + val a = compile.value + products.value + .map { x => converter.toVirtualFile(x.toPath()) } + .map { (_, a) } } case (_, dirs, _) => Def.task { + val converter = fileConverter.value val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty) - dirs.map(_ -> analysis) + dirs + .map { x => converter.toVirtualFile(x.toPath()) } + .map(_ -> analysis) } } private[this] def trackedJarProductsImplTask( track: TrackLevel - ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + ): Initialize[Task[Seq[(VirtualFileRef, CompileAnalysis)]]] = (Def .task { - val jar = (packageBin / artifactPath).value - (TrackLevel.intersection(track, exportToInternal.value), jar) + val converter = fileConverter.value + val vf = (packageBin / artifactPath).value + val jar = converter.toPath(vf) + (TrackLevel.intersection(track, exportToInternal.value), vf, jar) }) .flatMapTask { - case (TrackLevel.TrackAlways, _) => + case (TrackLevel.TrackAlways, _, _) => Def.task { Seq((packageBin.value, compile.value)) } - case (TrackLevel.TrackIfMissing, jar) if !jar.exists => + case (TrackLevel.TrackIfMissing, _, jar) if !jar.toFile().exists => Def.task { Seq((packageBin.value, compile.value)) } - case (_, jar) => + case (_, vf, _) => Def.task { val analysisOpt = previousCompile.value.analysis.toOption - Seq(jar) map { x => + Seq(vf) map { x => ( x, if (analysisOpt.isDefined) analysisOpt.get