diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index 44498082c697..36e95c788086 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -1,6 +1,6 @@ package dotty.tools.backend.jvm -import java.io.{DataOutputStream, IOException, BufferedOutputStream, FileOutputStream} +import java.io.{DataOutputStream, File, IOException, BufferedOutputStream, FileOutputStream} import java.nio.ByteBuffer import java.nio.channels.{ClosedByInterruptException, FileChannel} import java.nio.charset.StandardCharsets.UTF_8 @@ -12,7 +12,7 @@ import java.util.zip.{CRC32, Deflater, ZipEntry, ZipOutputStream} import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.em -import dotty.tools.io.{AbstractFile, PlainFile} +import dotty.tools.io.{AbstractFile, PlainFile, VirtualFile} import dotty.tools.io.PlainFile.toPlainFile import BTypes.InternalName import scala.util.chaining.* @@ -26,7 +26,6 @@ import scala.language.unsafeNulls * Until then, any changes to this file should be copied to `dotty.tools.io.FileWriters` as well. */ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { - type NullableFile = AbstractFile | Null import frontendAccess.{compilerSettings, backendReporting} sealed trait TastyWriter { @@ -46,7 +45,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { /** * Write a classfile */ - def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile + def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile /** @@ -91,7 +90,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } private final class SingleClassWriter(underlying: FileWriter) extends ClassfileWriter { - override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile = { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile = { underlying.writeFile(classRelativePath(className), bytes) } override def writeTasty(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { @@ -103,7 +102,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } private final class DebugClassWriter(basic: ClassfileWriter, dump: FileWriter) extends ClassfileWriter { - override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile = { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile = { val outFile = basic.writeClass(className, bytes, sourceFile) dump.writeFile(classRelativePath(className), bytes) outFile @@ -121,7 +120,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } sealed trait FileWriter { - def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile + def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile def close(): Unit } @@ -165,7 +164,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { lazy val crc = new CRC32 - override def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile = this.synchronized { + override def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile = this.synchronized { val entry = new ZipEntry(relativePath) if (storeOnly) { // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ @@ -182,7 +181,13 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { jarWriter.putNextEntry(entry) try jarWriter.write(bytes, 0, bytes.length) finally jarWriter.flush() - null + // important detail here, even on Windows, Zinc expects the separator within the jar + // to be the system default, (even if in the actual jar file the entry always uses '/'). + // see https://github.com/sbt/zinc/blob/dcddc1f9cfe542d738582c43f4840e17c053ce81/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala#L47 + val pathInJar = + if File.separatorChar == '/' then relativePath + else relativePath.replace('/', File.separatorChar) + PlainFile.toPlainFile(Paths.get(s"${file.absolutePath}!$pathInJar")) } override def close(): Unit = this.synchronized(jarWriter.close()) @@ -230,7 +235,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) - override def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile = { val path = base.resolve(relativePath) try { ensureDirForPath(base, path) @@ -279,7 +284,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { finally out.close() } - override def writeFile(relativePath: String, bytes: Array[Byte]):NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile = { val outFile = getFile(base, relativePath) writeBytes(outFile, bytes) outFile diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala index 45c6d6ecad44..06c3c7f1cb4f 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -44,11 +44,11 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: backendReporting.error(em"Error while emitting $internalName\n${ex.getMessage}") null - if bytes != null then - if (AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern)) - AsmUtils.traceClass(bytes) - val clsFile = classfileWriter.writeClass(internalName, bytes, sourceFile) - if clsFile != null then clazz.onFileCreated(clsFile) + if bytes != null then + if AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern) then + AsmUtils.traceClass(bytes) + val clsFile = classfileWriter.writeClass(internalName, bytes, sourceFile) + clazz.onFileCreated(clsFile) } def sendToDisk(tasty: GeneratedTasty, sourceFile: AbstractFile): Unit = { diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index 728f89966af0..c396699f93b3 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -10,11 +10,13 @@ import scala.jdk.CollectionConverters.* * This class implements an [[AbstractFile]] backed by a jar * that be can used as the compiler's output directory. */ -class JarArchive private (root: Directory) extends PlainDirectory(root) { +class JarArchive private (val jarPath: Path, root: Directory) extends PlainDirectory(root) { def close(): Unit = this.synchronized(jpath.getFileSystem().close()) override def exists: Boolean = jpath.getFileSystem().isOpen() && super.exists def allFileNames(): Iterator[String] = java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString) + + override def toString: String = jarPath.toString } object JarArchive { @@ -40,6 +42,6 @@ object JarArchive { } } val root = fs.getRootDirectories().iterator.next() - new JarArchive(Directory(root)) + new JarArchive(path, Directory(root)) } } diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala index e47371175de6..0abefe2985c3 100644 --- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala @@ -1,7 +1,6 @@ package xsbt import xsbti.UseScope -import ScalaCompilerForUnitTesting.Callbacks import org.junit.{ Test, Ignore } import org.junit.Assert._ @@ -227,9 +226,9 @@ class ExtractUsedNamesSpecification { def findPatMatUsages(in: String): Set[String] = { val compilerForTesting = new ScalaCompilerForUnitTesting - val (_, Callbacks(callback, _)) = + val output = compilerForTesting.compileSrcs(List(List(sealedClass, in))) - val clientNames = callback.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) + val clientNames = output.analysis.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) val names: Set[String] = clientNames.flatMap { case (_, usages) => diff --git a/sbt-bridge/test/xsbt/ProductsSpecification.scala b/sbt-bridge/test/xsbt/ProductsSpecification.scala new file mode 100644 index 000000000000..b13defecc4cc --- /dev/null +++ b/sbt-bridge/test/xsbt/ProductsSpecification.scala @@ -0,0 +1,34 @@ +package xsbt + +import org.junit.Assert.* +import org.junit.Ignore +import org.junit.Test + +import java.io.File +import java.nio.file.Path +import java.nio.file.Paths + +class ProductsSpecification { + + @Test + def extractProductsFromJar = { + val src = + """package example + | + |class A { + | class B + | def foo = + | class C + |}""".stripMargin + val output = compiler.compileSrcsToJar(src) + val srcFile = output.srcFiles.head + val products = output.analysis.productClassesToSources.filter(_._2 == srcFile).keys.toSet + + def toPathInJar(className: String): Path = + Paths.get(s"${output.classesOutput}!${className.replace('.', File.separatorChar)}.class") + val expected = Set("example.A", "example.A$B", "example.A$C$1").map(toPathInJar) + assertEquals(products, expected) + } + + private def compiler = new ScalaCompilerForUnitTesting +} \ No newline at end of file diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index f17be692ee50..fd125f25560b 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -1,22 +1,19 @@ /** Adapted from https://github.com/sbt/sbt/blob/0.13/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala */ package xsbt -import xsbti.compile.{CompileProgress, SingleOutput} -import java.io.File -import xsbti._ -import sbt.io.IO -import xsbti.api.{ ClassLike, Def, DependencyContext } -import DependencyContext._ -import xsbt.api.SameAPI -import sbt.internal.util.ConsoleLogger -import dotty.tools.io.PlainFile.toPlainFile import dotty.tools.xsbt.CompilerBridge +import sbt.io.IO +import xsbti.* +import xsbti.api.ClassLike +import xsbti.api.DependencyContext.* +import xsbti.compile.SingleOutput + +import java.io.File +import java.nio.file.Path import TestCallback.ExtractedClassDependencies -import ScalaCompilerForUnitTesting.Callbacks -object ScalaCompilerForUnitTesting: - case class Callbacks(analysis: TestCallback, progress: TestCompileProgress) +case class CompileOutput(srcFiles: Seq[VirtualFileRef], classesOutput: Path, analysis: TestCallback, progress: TestCompileProgress) /** * Provides common functionality needed for unit tests that require compiling @@ -25,29 +22,24 @@ object ScalaCompilerForUnitTesting: class ScalaCompilerForUnitTesting { def extractEnteredPhases(srcs: String*): Seq[List[String]] = { - val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(srcs*) - val run = testProgress.runs.head - tempSrcFiles.map(src => run.unitPhases(src.id)) + val output = compileSrcs(srcs*) + val run = output.progress.runs.head + output.srcFiles.map(src => run.unitPhases(src.id)) } - def extractTotal(srcs: String*)(extraSourcePath: String*): Int = { - val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(List(srcs.toList), extraSourcePath.toList) - val run = testProgress.runs.head - run.total - } + def extractTotal(srcs: String*)(extraSourcePath: String*): Int = + compileSrcs(List(srcs.toList), extraSourcePath.toList).progress.runs.head.total - def extractProgressPhases(srcs: String*): List[String] = { - val (_, Callbacks(_, testProgress)) = compileSrcs(srcs*) - testProgress.runs.head.phases - } + def extractProgressPhases(srcs: String*): List[String] = + compileSrcs(srcs*).progress.runs.head.phases /** * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ def extractApiFromSrc(src: String): Seq[ClassLike] = { - val (Seq(tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(src) - analysisCallback.apis(tempSrcFile) + val output = compileSrcs(src) + output.analysis.apis(output.srcFiles.head) } /** @@ -55,8 +47,8 @@ class ScalaCompilerForUnitTesting { * extracted by ExtractAPI class. */ def extractApisFromSrcs(srcs: List[String]*): Seq[Seq[ClassLike]] = { - val (tempSrcFiles, Callbacks(analysisCallback, _)) = compileSrcs(srcs.toList) - tempSrcFiles.map(analysisCallback.apis) + val output = compileSrcs(srcs.toList) + output.srcFiles.map(output.analysis.apis) } /** @@ -73,15 +65,16 @@ class ScalaCompilerForUnitTesting { assertDefaultScope: Boolean = true ): Map[String, Set[String]] = { // we drop temp src file corresponding to the definition src file - val (Seq(_, tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(definitionSrc, actualSrc) + val output = compileSrcs(definitionSrc, actualSrc) + val analysis = output.analysis if (assertDefaultScope) for { - (className, used) <- analysisCallback.usedNamesAndScopes - analysisCallback.TestUsedName(name, scopes) <- used + (className, used) <- analysis.usedNamesAndScopes + analysis.TestUsedName(name, scopes) <- used } assert(scopes.size() == 1 && scopes.contains(UseScope.Default), s"$className uses $name in $scopes") - val classesInActualSrc = analysisCallback.classNames(tempSrcFile).map(_._1) - classesInActualSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + val classesInActualSrc = analysis.classNames(output.srcFiles.head).map(_._1) + classesInActualSrc.map(className => className -> analysis.usedNames(className)).toMap } /** @@ -91,11 +84,11 @@ class ScalaCompilerForUnitTesting { * Only the names used in the last src file are returned. */ def extractUsedNamesFromSrc(sources: String*): Map[String, Set[String]] = { - val (srcFiles, Callbacks(analysisCallback, _)) = compileSrcs(sources*) - srcFiles + val output = compileSrcs(sources*) + output.srcFiles .map { srcFile => - val classesInSrc = analysisCallback.classNames(srcFile).map(_._1) - classesInSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + val classesInSrc = output.analysis.classNames(srcFile).map(_._1) + classesInSrc.map(className => className -> output.analysis.usedNames(className)).toMap } .reduce(_ ++ _) } @@ -113,15 +106,15 @@ class ScalaCompilerForUnitTesting { * file system-independent way of testing dependencies between source code "files". */ def extractDependenciesFromSrcs(srcs: List[List[String]]): ExtractedClassDependencies = { - val (_, Callbacks(testCallback, _)) = compileSrcs(srcs) + val analysis = compileSrcs(srcs).analysis - val memberRefDeps = testCallback.classDependencies collect { + val memberRefDeps = analysis.classDependencies collect { case (target, src, DependencyByMemberRef) => (src, target) } - val inheritanceDeps = testCallback.classDependencies collect { + val inheritanceDeps = analysis.classDependencies collect { case (target, src, DependencyByInheritance) => (src, target) } - val localInheritanceDeps = testCallback.classDependencies collect { + val localInheritanceDeps = analysis.classDependencies collect { case (target, src, LocalDependencyByInheritance) => (src, target) } ExtractedClassDependencies.fromPairs(memberRefDeps, inheritanceDeps, localInheritanceDeps) @@ -142,12 +135,20 @@ class ScalaCompilerForUnitTesting { * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil): (Seq[VirtualFile], Callbacks) = { + def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil, compileToJar: Boolean = false): CompileOutput = { val temp = IO.createTemporaryDirectory val analysisCallback = new TestCallback val testProgress = new TestCompileProgress - val classesDir = new File(temp, "classes") - classesDir.mkdir() + val classesOutput = + if (compileToJar) { + val jar = new File(temp, "classes.jar") + jar.createNewFile() + jar + } else { + val dir = new File(temp, "classes") + dir.mkdir() + dir + } val bridge = new CompilerBridge @@ -164,16 +165,16 @@ class ScalaCompilerForUnitTesting { } val virtualSrcFiles = srcFiles.toArray - val classesDirPath = classesDir.getAbsolutePath.toString + val classesOutputPath = classesOutput.getAbsolutePath() val output = new SingleOutput: - def getOutputDirectory() = classesDir + def getOutputDirectory() = classesOutput val maybeSourcePath = if extraFiles.isEmpty then Nil else List("-sourcepath", temp.getAbsolutePath.toString) bridge.run( virtualSrcFiles, new TestDependencyChanges, - Array("-Yforce-sbt-phases", "-classpath", classesDirPath, "-usejavacp", "-d", classesDirPath) ++ maybeSourcePath, + Array("-Yforce-sbt-phases", "-classpath", classesOutputPath, "-usejavacp", "-d", classesOutputPath) ++ maybeSourcePath, output, analysisCallback, new TestReporter, @@ -185,13 +186,16 @@ class ScalaCompilerForUnitTesting { srcFiles } - (files.flatten.toSeq, Callbacks(analysisCallback, testProgress)) + CompileOutput(files.flatten.toSeq, classesOutput.toPath, analysisCallback, testProgress) } - def compileSrcs(srcs: String*): (Seq[VirtualFile], Callbacks) = { + def compileSrcs(srcs: String*): CompileOutput = { compileSrcs(List(srcs.toList)) } + def compileSrcsToJar(srcs: String*): CompileOutput = + compileSrcs(List(srcs.toList), compileToJar = true) + private def prepareSrcFile(baseDir: File, fileName: String, src: String): VirtualFile = { val srcFile = new File(baseDir, fileName) IO.write(srcFile, src)