From ae8c08a8c395406fbb4517b05e30c1d2bdffa906 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 27 Jun 2023 18:00:55 +0200 Subject: [PATCH] Remove unnecessary stdlib-bootstrapped overriden files --- project/MiMaFilters.scala | 27 + project/TastyMiMaFilters.scala | 48 +- .../src/scala/collection/View.scala | 535 ---------- .../convert/JavaCollectionWrappers.scala | 606 ------------ .../scala/collection/immutable/BitSet.scala | 357 ------- .../scala/collection/immutable/TreeSet.scala | 296 ------ .../src/scala/collection/mutable/BitSet.scala | 392 -------- .../collection/mutable/LinkedHashMap.scala | 264 ----- .../collection/mutable/LinkedHashSet.scala | 177 ---- .../scala/collection/mutable/TreeMap.scala | 257 ----- .../scala/concurrent/duration/Duration.scala | 742 -------------- .../src/scala/math/BigDecimal.scala | 721 -------------- .../src/scala/math/Ordering.scala | 927 ------------------ .../sys/process/ProcessBuilderImpl.scala | 274 ------ 14 files changed, 62 insertions(+), 5561 deletions(-) delete mode 100644 stdlib-bootstrapped/src/scala/collection/View.scala delete mode 100644 stdlib-bootstrapped/src/scala/collection/convert/JavaCollectionWrappers.scala delete mode 100644 stdlib-bootstrapped/src/scala/collection/immutable/BitSet.scala delete mode 100644 stdlib-bootstrapped/src/scala/collection/immutable/TreeSet.scala delete mode 100644 stdlib-bootstrapped/src/scala/collection/mutable/BitSet.scala delete mode 100644 stdlib-bootstrapped/src/scala/collection/mutable/LinkedHashMap.scala delete mode 100644 stdlib-bootstrapped/src/scala/collection/mutable/LinkedHashSet.scala delete mode 100644 stdlib-bootstrapped/src/scala/collection/mutable/TreeMap.scala delete mode 100644 stdlib-bootstrapped/src/scala/concurrent/duration/Duration.scala delete mode 100644 stdlib-bootstrapped/src/scala/math/BigDecimal.scala delete mode 100644 stdlib-bootstrapped/src/scala/math/Ordering.scala delete mode 100644 stdlib-bootstrapped/src/scala/sys/process/ProcessBuilderImpl.scala diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 40bd7c1fce39..ae09c3ad93e3 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -26,6 +26,27 @@ object MiMaFilters { // Files that are not compiled in the bootstrapped library ProblemFilters.exclude[MissingClassProblem]("scala.AnyVal"), + // Inferred result type of non-private member differs (fix in Scala 2) + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#IteratorWrapper.remove"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JCollectionWrapper.iterableFactory"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JDictionaryWrapper.mapFactory"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JIterableWrapper.iterableFactory"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JListWrapper.iterableFactory"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JPropertiesWrapper.mapFactory"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.immutable.BitSet.bitSetFactory"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.immutable.TreeSet.sortedIterableFactory"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.mutable.BitSet.bitSetFactory"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.mutable.TreeMap.sortedMapFactory"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.View#LeftPartitionMapped.iterator"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.View#RightPartitionMapped.iterator"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.concurrent.duration.FiniteDuration.unary_-"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.math.BigDecimal.underlying"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.math.Ordering.tryCompare"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.empty"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.convert.JavaCollectionWrappers#JMapWrapper.empty"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.convert.JavaCollectionWrappers#JPropertiesWrapper.empty"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.LinkedHashMap.newBuilder"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.LinkedHashSet.newBuilder"), // Scala language features ProblemFilters.exclude[DirectMissingMethodProblem]("scala.language."), @@ -124,6 +145,12 @@ object MiMaFilters { ProblemFilters.exclude[MissingFieldProblem]("scala.language.experimental"), ProblemFilters.exclude[MissingFieldProblem]("scala.languageFeature*"), + // Inferred result type of non-private member differs (fix in Scala 2) + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.empty"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.convert.JavaCollectionWrappers#JMapWrapper.empty"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.convert.JavaCollectionWrappers#JPropertiesWrapper.empty"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.math.Ordering.tryCompare"), + // https://github.com/scala/scala/blob/v2.13.10/src/library/scala/collection/immutable/Range.scala#LL155C1-L156C1 // Issue #17519: we do not set final on the default methods of final copy method. ProblemFilters.exclude[FinalMethodProblem]("scala.collection.immutable.Range.copy$default$*"), diff --git a/project/TastyMiMaFilters.scala b/project/TastyMiMaFilters.scala index f8fc34116a52..12d22563dd93 100644 --- a/project/TastyMiMaFilters.scala +++ b/project/TastyMiMaFilters.scala @@ -3,6 +3,41 @@ import tastymima.intf._ object TastyMiMaFilters { val StdlibBootstrapped: java.util.List[ProblemMatcher] = asList( + // Ok (needs library from 2.13.12): Inferred result type of non-private member differs + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.*.elemTag"), // Fix in https://github.com/scala/scala/pull/10444 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.convert.JavaCollectionWrappers.JCollectionWrapper.iterableFactory"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.convert.JavaCollectionWrappers.JConcurrentMapWrapper.empty"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.convert.JavaCollectionWrappers.JDictionaryWrapper.mapFactory"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.convert.JavaCollectionWrappers.JIterableWrapper.iterableFactory"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.convert.JavaCollectionWrappers.JListWrapper.iterableFactory"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.convert.JavaCollectionWrappers.JMapWrapper.empty"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.convert.JavaCollectionWrappers.JPropertiesWrapper.empty"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.convert.JavaCollectionWrappers.JPropertiesWrapper.mapFactory"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.immutable.TreeSet.sortedIterableFactory"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.mutable.TreeMap.sortedMapFactory"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.mutable.UnrolledBuffer.classTagCompanion"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.View.LeftPartitionMapped.iterator"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.View.RightPartitionMapped.iterator"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.concurrent.duration.FiniteDuration.unary_-"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.reflect.ManifestFactory.*.runtimeClass"), // Fix in https://github.com/scala/scala/pull/10444 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.sys.process.ProcessBuilderImpl.AbstractBuilder.toSink"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.sys.process.ProcessBuilderImpl.AbstractBuilder.toSource"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.sys.process.ProcessBuilderImpl.FileImpl.toSink"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.sys.process.ProcessBuilderImpl.FileImpl.toSource"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.sys.process.ProcessBuilderImpl.URLImpl.toSource"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.concurrent.FailedNode.string"), // Fix in https://github.com/scala/scala/pull/10444 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.convert.JavaCollectionWrappers.IterableWrapperTrait.iterator"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.convert.JavaCollectionWrappers.IteratorWrapper.remove"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.mutable.LinkedHashMap.newBuilder"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.mutable.LinkedHashSet.newBuilder"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.math.Ordering.tryCompare"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.sys.process.ProcessBuilderImpl.AndBuilder.createProcess"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.sys.process.ProcessBuilderImpl.OrBuilder.createProcess"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.sys.process.ProcessBuilderImpl.PipedBuilder.createProcess"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.sys.process.ProcessBuilderImpl.SequenceBuilder.createProcess"), // Fix in https://github.com/scala/scala/pull/10435 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.immutable.BitSet.bitSetFactory"), // Fix in https://github.com/scala/scala/pull/10444 + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.mutable.BitSet.bitSetFactory"), // Fix in https://github.com/scala/scala/pull/10444 + // Probably OK ProblemMatcher.make(ProblemKind.IncompatibleSelfTypeChange, "scala.*"), @@ -12,10 +47,6 @@ object TastyMiMaFilters { // Problem: Missing Serializable in companions of serializable classes ProblemMatcher.make(ProblemKind.MissingParent, "scala.*$"), - // Problem: Class[T] or ClassTag[T] with `T` equal to wildcard `_ >: Nothing <: AnyVal` instead of a specific primitive type `T` - ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.reflect.ManifestFactory.*.runtimeClass"), - ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.*.elemTag"), - // Problem: ConstantType for `null` versus `scala.Null` ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.mutable.UnrolledBuffer.Unrolled.$default$4"), @@ -30,11 +61,6 @@ object TastyMiMaFilters { ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.math.Big*.underlying"), ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.math.ScalaNumericConversions.underlying"), - // Problem: Inferred result type of non-private member differs - ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.convert.JavaCollectionWrappers.IterableWrapperTrait.iterator"), - ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.mutable.UnrolledBuffer.classTagCompanion"), - ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.concurrent.FailedNode.string"), - // Problem: super accessors ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.*.superscala$*$*$$*"), // The member scala.collection.mutable.Cloneable.superscala$collection$mutable$Cloneable$$clone was concrete or did not exist but is abstract in current version @@ -47,10 +73,6 @@ object TastyMiMaFilters { // TASTy-MiMa bug? Wildcards in self type ProblemMatcher.make(ProblemKind.MissingTypeMember, "scala.collection.generic.DefaultSerializable._$1"), - // TASTy-MiMa bug? module classes - ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.immutable.BitSet.bitSetFactory"), // The symbol scala.collection.immutable.BitSet.bitSetFactory has an incompatible type in current version: before: scala.collection.immutable.BitSet$; after: scala.collection.immutable.BitSet.type - ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.mutable.BitSet.bitSetFactory"), // The symbol scala.collection.mutable.BitSet.bitSetFactory has an incompatible type in current version: before: scala.collection.mutable.BitSet$; after: scala.collection.mutable.BitSet.type - // TASTy-MiMa bugs ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.appendedAll"), ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.concat"), diff --git a/stdlib-bootstrapped/src/scala/collection/View.scala b/stdlib-bootstrapped/src/scala/collection/View.scala deleted file mode 100644 index 87849744a5dc..000000000000 --- a/stdlib-bootstrapped/src/scala/collection/View.scala +++ /dev/null @@ -1,535 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.annotation.{nowarn, tailrec} -import scala.collection.mutable.{ArrayBuffer, Builder} -import scala.collection.immutable.LazyList - -/** Views are collections whose transformation operations are non strict: the resulting elements - * are evaluated only when the view is effectively traversed (e.g. using `foreach` or `foldLeft`), - * or when the view is converted to a strict collection type (using the `to` operation). - * @define coll view - * @define Coll `View` - */ -trait View[+A] extends Iterable[A] with IterableOps[A, View, View[A]] with IterableFactoryDefaults[A, View] with Serializable { - - override def view: View[A] = this - - override def iterableFactory: IterableFactory[View] = View - - override def empty: scala.collection.View[A] = iterableFactory.empty - - override def toString: String = className + "()" - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "View" - - @deprecated("Views no longer know about their underlying collection type; .force always returns an IndexedSeq", "2.13.0") - @`inline` def force: IndexedSeq[A] = toIndexedSeq -} - -/** This object reifies operations on views as case classes - * - * @define Coll View - * @define coll view - */ -@SerialVersionUID(3L) -object View extends IterableFactory[View] { - - /** - * @return A `View[A]` whose underlying iterator is provided by the `it` parameter-less function. - * - * @param it Function creating the iterator to be used by the view. This function must always return - * a fresh `Iterator`, otherwise the resulting view will be effectively iterable only once. - * - * @tparam A View element type - */ - def fromIteratorProvider[A](it: () => Iterator[A]): View[A] = new AbstractView[A] { - def iterator = it() - } - - /** - * @return A view iterating over the given `Iterable` - * - * @param it The `IterableOnce` to view. A proper `Iterable` is used directly. If it is really only - * `IterableOnce` it gets memoized on the first traversal. - * - * @tparam E View element type - */ - def from[E](it: IterableOnce[E]): View[E] = it match { - case it: View[E] => it - case it: Iterable[E] => View.fromIteratorProvider(() => it.iterator) - case _ => LazyList.from(it).view - } - - def empty[A]: View[A] = Empty - - def newBuilder[A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) - - override def apply[A](xs: A*): View[A] = new Elems(xs: _*) - - /** The empty view */ - @SerialVersionUID(3L) - case object Empty extends AbstractView[Nothing] { - def iterator = Iterator.empty - override def knownSize = 0 - override def isEmpty: Boolean = true - } - - /** A view with exactly one element */ - @SerialVersionUID(3L) - class Single[A](a: A) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.single(a) - override def knownSize: Int = 1 - override def isEmpty: Boolean = false - } - - /** A view with given elements */ - @SerialVersionUID(3L) - class Elems[A](xs: A*) extends AbstractView[A] { - def iterator = xs.iterator - override def knownSize = xs.knownSize - override def isEmpty: Boolean = xs.isEmpty - } - - /** A view containing the results of some element computation a number of times. */ - @SerialVersionUID(3L) - class Fill[A](n: Int)(elem: => A) extends AbstractView[A] { - def iterator = Iterator.fill(n)(elem) - override def knownSize: Int = 0 max n - override def isEmpty: Boolean = n <= 0 - } - - /** A view containing values of a given function over a range of integer values starting from 0. */ - @SerialVersionUID(3L) - class Tabulate[A](n: Int)(f: Int => A) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.tabulate(n)(f) - override def knownSize: Int = 0 max n - override def isEmpty: Boolean = n <= 0 - } - - /** A view containing repeated applications of a function to a start value */ - @SerialVersionUID(3L) - class Iterate[A](start: A, len: Int)(f: A => A) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.iterate(start)(f).take(len) - override def knownSize: Int = 0 max len - override def isEmpty: Boolean = len <= 0 - } - - /** A view that uses a function `f` to produce elements of type `A` and update - * an internal state `S`. - */ - @SerialVersionUID(3L) - class Unfold[A, S](initial: S)(f: S => Option[(A, S)]) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.unfold(initial)(f) - } - - /** An `IterableOps` whose collection type and collection type constructor are unknown */ - type SomeIterableOps[A] = IterableOps[A, AnyConstr, _] - - /** A view that filters an underlying collection. */ - @SerialVersionUID(3L) - class Filter[A](val underlying: SomeIterableOps[A], val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { - def iterator = underlying.iterator.filterImpl(p, isFlipped) - override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - } - - object Filter { - def apply[A](underlying: Iterable[A], p: A => Boolean, isFlipped: Boolean): Filter[A] = - underlying match { - case filter: Filter[A] if filter.isFlipped == isFlipped => new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) - case _ => new Filter(underlying, p, isFlipped) - } - } - - /** A view that removes the duplicated elements as determined by the transformation function `f` */ - @SerialVersionUID(3L) - class DistinctBy[A, B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.distinctBy(f) - override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A1] { - def iterator: AbstractIterator[A1] = new AbstractIterator[A1] { - private[this] val self = underlying.iterator - private[this] var hd: A1 = _ - private[this] var hdDefined: Boolean = false - def hasNext = hdDefined || { - @tailrec - def findNext(): Boolean = - if (self.hasNext) { - f(self.next()) match { - case Left(a1) => hd = a1; hdDefined = true; true - case Right(_) => findNext() - } - } else false - findNext() - } - def next() = - if (hasNext) { - hdDefined = false - hd - } else Iterator.empty.next() - } - } - - @SerialVersionUID(3L) - class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A2] { - def iterator: AbstractIterator[A2] = new AbstractIterator[A2] { - private[this] val self = underlying.iterator - private[this] var hd: A2 = _ - private[this] var hdDefined: Boolean = false - def hasNext = hdDefined || { - @tailrec - def findNext(): Boolean = - if (self.hasNext) { - f(self.next()) match { - case Left(_) => findNext() - case Right(a2) => hd = a2; hdDefined = true; true - } - } else false - findNext() - } - def next() = - if (hasNext) { - hdDefined = false - hd - } else Iterator.empty.next() - } - } - - /** A view that drops leading elements of the underlying collection. */ - @SerialVersionUID(3L) - class Drop[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = underlying.iterator.drop(n) - protected val normN = n max 0 - override def knownSize = { - val size = underlying.knownSize - if (size >= 0) (size - normN) max 0 else -1 - } - override def isEmpty: Boolean = iterator.isEmpty - } - - /** A view that drops trailing elements of the underlying collection. */ - @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = dropRightIterator(underlying.iterator, n) - protected val normN = n max 0 - override def knownSize = { - val size = underlying.knownSize - if (size >= 0) (size - normN) max 0 else -1 - } - override def isEmpty: Boolean = - if(knownSize >= 0) knownSize == 0 - else iterator.isEmpty - } - - @SerialVersionUID(3L) - class DropWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { - def iterator = underlying.iterator.dropWhile(p) - override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - } - - /** A view that takes leading elements of the underlying collection. */ - @SerialVersionUID(3L) - class Take[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = underlying.iterator.take(n) - protected val normN = n max 0 - override def knownSize = { - val size = underlying.knownSize - if (size >= 0) size min normN else -1 - } - override def isEmpty: Boolean = iterator.isEmpty - } - - /** A view that takes trailing elements of the underlying collection. */ - @SerialVersionUID(3L) - class TakeRight[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = takeRightIterator(underlying.iterator, n) - protected val normN = n max 0 - override def knownSize = { - val size = underlying.knownSize - if (size >= 0) size min normN else -1 - } - override def isEmpty: Boolean = - if(knownSize >= 0) knownSize == 0 - else iterator.isEmpty - } - - @SerialVersionUID(3L) - class TakeWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.takeWhile(p) - override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - } - - @SerialVersionUID(3L) - class ScanLeft[+A, +B](underlying: SomeIterableOps[A], z: B, op: (B, A) => B) extends AbstractView[B] { - def iterator: Iterator[B] = underlying.iterator.scanLeft(z)(op) - override def knownSize: Int = { - val size = underlying.knownSize - if (size >= 0) size + 1 else -1 - } - override def isEmpty: Boolean = iterator.isEmpty - } - - /** A view that maps elements of the underlying collection. */ - @SerialVersionUID(3L) - class Map[+A, +B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[B] { - def iterator = underlying.iterator.map(f) - override def knownSize = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - /** A view that flatmaps elements of the underlying collection. */ - @SerialVersionUID(3L) - class FlatMap[A, B](underlying: SomeIterableOps[A], f: A => IterableOnce[B]) extends AbstractView[B] { - def iterator = underlying.iterator.flatMap(f) - override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - } - - /** A view that collects elements of the underlying collection. */ - @SerialVersionUID(3L) - class Collect[+A, B](underlying: SomeIterableOps[A], pf: PartialFunction[A, B]) extends AbstractView[B] { - def iterator = underlying.iterator.collect(pf) - } - - /** A view that concatenates elements of the prefix collection or iterator with the elements - * of the suffix collection or iterator. - */ - @SerialVersionUID(3L) - class Concat[A](prefix: SomeIterableOps[A], suffix: SomeIterableOps[A]) extends AbstractView[A] { - def iterator = prefix.iterator ++ suffix.iterator - override def knownSize = { - val prefixSize = prefix.knownSize - if (prefixSize >= 0) { - val suffixSize = suffix.knownSize - if (suffixSize >= 0) prefixSize + suffixSize - else -1 - } - else -1 - } - override def isEmpty: Boolean = prefix.isEmpty && suffix.isEmpty - } - - /** A view that zips elements of the underlying collection with the elements - * of another collection. - */ - @SerialVersionUID(3L) - class Zip[A, B](underlying: SomeIterableOps[A], other: Iterable[B]) extends AbstractView[(A, B)] { - def iterator = underlying.iterator.zip(other) - override def knownSize = { - val s1 = underlying.knownSize - if (s1 == 0) 0 else { - val s2 = other.knownSize - if (s2 == 0) 0 else s1 min s2 - } - } - override def isEmpty: Boolean = underlying.isEmpty || other.isEmpty - } - - /** A view that zips elements of the underlying collection with the elements - * of another collection. If one of the two collections is shorter than the other, - * placeholder elements are used to extend the shorter collection to the length of the longer. - */ - @SerialVersionUID(3L) - class ZipAll[A, B](underlying: SomeIterableOps[A], other: Iterable[B], thisElem: A, thatElem: B) extends AbstractView[(A, B)] { - def iterator = underlying.iterator.zipAll(other, thisElem, thatElem) - override def knownSize = { - val s1 = underlying.knownSize - if(s1 == -1) -1 else { - val s2 = other.knownSize - if(s2 == -1) -1 else s1 max s2 - } - } - override def isEmpty: Boolean = underlying.isEmpty && other.isEmpty - } - - /** A view that appends an element to its elements */ - @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIterableOps[A], elem: A) extends AbstractView[A] { - def iterator: Iterator[A] = new Concat(underlying, new View.Single(elem)).iterator - override def knownSize: Int = { - val size = underlying.knownSize - if (size >= 0) size + 1 else -1 - } - override def isEmpty: Boolean = false - } - - /** A view that prepends an element to its elements */ - @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIterableOps[A]) extends AbstractView[A] { - def iterator: Iterator[A] = new Concat(new View.Single(elem), underlying).iterator - override def knownSize: Int = { - val size = underlying.knownSize - if (size >= 0) size + 1 else -1 - } - override def isEmpty: Boolean = false - } - - @SerialVersionUID(3L) - class Updated[A](underlying: SomeIterableOps[A], index: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A] = new AbstractIterator[A] { - private[this] val it = underlying.iterator - private[this] var i = 0 - def next(): A = { - val value = if (i == index) { it.next(); elem } else it.next() - i += 1 - value - } - def hasNext: Boolean = - if(it.hasNext) true - else if(index >= i) throw new IndexOutOfBoundsException(index.toString) - else false - } - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = iterator.isEmpty - } - - @SerialVersionUID(3L) - private[collection] class Patched[A](underlying: SomeIterableOps[A], from: Int, other: IterableOnce[A], replaced: Int) extends AbstractView[A] { - // we may be unable to traverse `other` more than once, so we need to cache it if that's the case - private val _other: Iterable[A] = other match { - case other: Iterable[A] => other - case other => LazyList.from(other) - } - - def iterator: Iterator[A] = underlying.iterator.patch(from, _other.iterator, replaced) - override def knownSize: Int = if (underlying.knownSize == 0 && _other.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = if (knownSize == 0) true else iterator.isEmpty - } - - @SerialVersionUID(3L) - class ZipWithIndex[A](underlying: SomeIterableOps[A]) extends AbstractView[(A, Int)] { - def iterator: Iterator[(A, Int)] = underlying.iterator.zipWithIndex - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class PadTo[A](underlying: SomeIterableOps[A], len: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.padTo(len, elem) - - override def knownSize: Int = { - val size = underlying.knownSize - if (size >= 0) size max len else -1 - } - override def isEmpty: Boolean = underlying.isEmpty && len <= 0 - } - - private[collection] def takeRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { - val k = it.knownSize - if(k == 0 || n <= 0) Iterator.empty - else if(n == Int.MaxValue) it - else if(k > 0) it.drop((k-n) max 0) - else new TakeRightIterator[A](it, n) - } - - private final class TakeRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { - private[this] var len: Int = -1 - private[this] var pos: Int = 0 - private[this] var buf: ArrayBuffer[AnyRef] = _ - def init(): Unit = if(buf eq null) { - buf = new ArrayBuffer[AnyRef](maxlen min 256) - len = 0 - while(underlying.hasNext) { - val n = underlying.next().asInstanceOf[AnyRef] - if(pos >= buf.length) buf.addOne(n) - else buf(pos) = n - pos += 1 - if(pos == maxlen) pos = 0 - len += 1 - } - underlying = null - if(len > maxlen) len = maxlen - pos = pos - len - if(pos < 0) pos += maxlen - } - override def knownSize = len - def hasNext: Boolean = { - init() - len > 0 - } - def next(): A = { - init() - if(len == 0) Iterator.empty.next() - else { - val x = buf(pos).asInstanceOf[A] - pos += 1 - if(pos == maxlen) pos = 0 - len -= 1 - x - } - } - override def drop(n: Int): Iterator[A] = { - init() - if (n > 0) { - len = (len - n) max 0 - pos = (pos + n) % maxlen - } - this - } - } - - private[collection] def dropRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { - if(n <= 0) it - else { - val k = it.knownSize - if(k >= 0) it.take(k - n) - else new DropRightIterator[A](it, n) - } - } - - private final class DropRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { - private[this] var len: Int = -1 // known size or -1 if the end of `underlying` has not been seen yet - private[this] var pos: Int = 0 - private[this] var buf: ArrayBuffer[AnyRef] = _ - def init(): Unit = if(buf eq null) { - buf = new ArrayBuffer[AnyRef](maxlen min 256) - while(pos < maxlen && underlying.hasNext) { - buf.addOne(underlying.next().asInstanceOf[AnyRef]) - pos += 1 - } - if(!underlying.hasNext) len = 0 - pos = 0 - } - override def knownSize = len - def hasNext: Boolean = { - init() - len != 0 - } - def next(): A = { - if(!hasNext) Iterator.empty.next() - else { - val x = buf(pos).asInstanceOf[A] - if(len == -1) { - buf(pos) = underlying.next().asInstanceOf[AnyRef] - if(!underlying.hasNext) len = 0 - } else len -= 1 - pos += 1 - if(pos == maxlen) pos = 0 - x - } - } - } -} - -/** Explicit instantiation of the `View` trait to reduce class file size in subclasses. */ -@SerialVersionUID(3L) -abstract class AbstractView[+A] extends scala.collection.AbstractIterable[A] with View[A] diff --git a/stdlib-bootstrapped/src/scala/collection/convert/JavaCollectionWrappers.scala b/stdlib-bootstrapped/src/scala/collection/convert/JavaCollectionWrappers.scala deleted file mode 100644 index d76280fa2d15..000000000000 --- a/stdlib-bootstrapped/src/scala/collection/convert/JavaCollectionWrappers.scala +++ /dev/null @@ -1,606 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package convert - -import java.util.{concurrent => juc} -import java.util.{NavigableMap} -import java.{lang => jl, util => ju} - -import scala.jdk.CollectionConverters._ -import scala.util.Try -import scala.util.chaining._ -import scala.util.control.ControlThrowable - -/** Wrappers for exposing Scala collections as Java collections and vice-versa */ -@SerialVersionUID(3L) -// not private[convert] because `WeakHashMap` uses JMapWrapper -private[collection] object JavaCollectionWrappers extends Serializable { - @SerialVersionUID(3L) - class IteratorWrapper[A](val underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] with Serializable { - def hasNext = underlying.hasNext - def next() = underlying.next() - def hasMoreElements = underlying.hasNext - def nextElement() = underlying.next() - override def remove(): Nothing = throw new UnsupportedOperationException - } - - @SerialVersionUID(3L) - class JIteratorWrapper[A](val underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { - def hasNext = underlying.hasNext - def next() = underlying.next - } - - @SerialVersionUID(3L) - class JEnumerationWrapper[A](val underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { - def hasNext = underlying.hasMoreElements - def next() = underlying.nextElement - } - - trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { - val underlying: Iterable[A] - def size = underlying.size - override def iterator = new IteratorWrapper(underlying.iterator) - override def isEmpty = underlying.isEmpty - } - - @SerialVersionUID(3L) - class IterableWrapper[A](val underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] with Serializable - - @SerialVersionUID(3L) - class JIterableWrapper[A](val underlying: jl.Iterable[A]) - extends AbstractIterable[A] - with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] - with Serializable { - def iterator = underlying.iterator.asScala - override def iterableFactory: mutable.ArrayBuffer.type = mutable.ArrayBuffer - override def isEmpty: Boolean = !underlying.iterator().hasNext - } - - @SerialVersionUID(3L) - class JCollectionWrapper[A](val underlying: ju.Collection[A]) - extends AbstractIterable[A] - with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] - with Serializable { - def iterator = underlying.iterator.asScala - override def size = underlying.size - override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize - override def isEmpty = underlying.isEmpty - override def iterableFactory: mutable.ArrayBuffer.type = mutable.ArrayBuffer - } - - @SerialVersionUID(3L) - class SeqWrapper[A](val underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { - def get(i: Int) = underlying(i) - } - - @SerialVersionUID(3L) - class MutableSeqWrapper[A](val underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { - def get(i: Int) = underlying(i) - override def set(i: Int, elem: A) = { - val p = underlying(i) - underlying(i) = elem - p - } - } - - @SerialVersionUID(3L) - class MutableBufferWrapper[A](val underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { - def get(i: Int) = underlying(i) - override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } - override def add(elem: A) = { underlying += elem; true } - override def remove(i: Int) = underlying remove i - } - - @SerialVersionUID(3L) - class JListWrapper[A](val underlying: ju.List[A]) - extends mutable.AbstractBuffer[A] - with SeqOps[A, mutable.Buffer, mutable.Buffer[A]] - with StrictOptimizedSeqOps[A, mutable.Buffer, mutable.Buffer[A]] - with IterableFactoryDefaults[A, mutable.Buffer] - with Serializable { - def length = underlying.size - override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize - override def isEmpty = underlying.isEmpty - override def iterator: Iterator[A] = underlying.iterator.asScala - def apply(i: Int) = underlying.get(i) - def update(i: Int, elem: A) = underlying.set(i, elem) - def prepend(elem: A) = { underlying.subList(0, 0) add elem; this } - def addOne(elem: A): this.type = { underlying add elem; this } - def insert(idx: Int,elem: A): Unit = underlying.subList(0, idx).add(elem) - def insertAll(i: Int, elems: IterableOnce[A]) = { - val ins = underlying.subList(0, i) - elems.iterator.foreach(ins.add(_)) - } - def remove(i: Int) = underlying.remove(i) - def clear() = underlying.clear() - // Note: Clone cannot just call underlying.clone because in Java, only specific collections - // expose clone methods. Generically, they're protected. - override def clone(): JListWrapper[A] = new JListWrapper(new ju.ArrayList[A](underlying)) - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { - remove(from, replaced) - insertAll(from, patch) - this - } - def remove(from: Int, n: Int): Unit = underlying.subList(from, from+n).clear() - override def iterableFactory: mutable.ArrayBuffer.type = mutable.ArrayBuffer - override def subtractOne(elem: A): this.type = { underlying.remove(elem.asInstanceOf[AnyRef]); this } - } - - @SerialVersionUID(3L) - class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self => - // Note various overrides to avoid performance gotchas. - override def contains(o: Object): Boolean = { - try { underlying.contains(o.asInstanceOf[A]) } - catch { case cce: ClassCastException => false } - } - override def isEmpty = underlying.isEmpty - def size = underlying.size - def iterator = new ju.Iterator[A] { - val ui = underlying.iterator - var prev: Option[A] = None - def hasNext = ui.hasNext - def next = { val e = ui.next(); prev = Some(e); e } - override def remove() = prev match { - case Some(e) => - underlying match { - case ms: mutable.Set[a] => - ms remove e - prev = None - case _ => - throw new UnsupportedOperationException("remove") - } - case _ => - throw new IllegalStateException("next must be called at least once before remove") - } - } - } - - @SerialVersionUID(3L) - class MutableSetWrapper[A](val underlying: mutable.Set[A]) extends SetWrapper[A](underlying) with Serializable { - override def add(elem: A) = { - val sz = underlying.size - underlying += elem - sz < underlying.size - } - override def remove(elem: AnyRef) = - try underlying.remove(elem.asInstanceOf[A]) - catch { case ex: ClassCastException => false } - override def clear() = underlying.clear() - } - - @SerialVersionUID(3L) - class JSetWrapper[A](val underlying: ju.Set[A]) - extends mutable.AbstractSet[A] - with mutable.SetOps[A, mutable.Set, mutable.Set[A]] - with StrictOptimizedSetOps[A, mutable.Set, mutable.Set[A]] - with Serializable { - - override def size: Int = underlying.size - override def isEmpty: Boolean = underlying.isEmpty - override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize - def iterator: Iterator[A] = underlying.iterator.asScala - - def contains(elem: A): Boolean = underlying.contains(elem) - - def addOne(elem: A): this.type = { underlying add elem; this } - def subtractOne(elem: A): this.type = { underlying remove elem; this } - - override def remove(elem: A): Boolean = underlying remove elem - - override def clear(): Unit = { - underlying.clear() - } - - override def empty: mutable.Set[A] = new JSetWrapper(new ju.HashSet[A]) - - // Note: Clone cannot just call underlying.clone because in Java, only specific collections - // expose clone methods. Generically, they're protected. - override def clone(): mutable.Set[A] = new JSetWrapper[A](new ju.LinkedHashSet[A](underlying)) - - override def iterableFactory: IterableFactory[mutable.Set] = mutable.HashSet - - override def filterInPlace(p: A => Boolean): this.type = { - if (underlying.size() > 0) underlying.removeIf(!p(_)) - this - } - } - - @SerialVersionUID(3L) - class MapWrapper[K, V](underlying: Map[K, V]) extends ju.AbstractMap[K, V] with Serializable { self => - override def size = underlying.size - - override def get(key: AnyRef): V = try { - underlying get key.asInstanceOf[K] match { - case None => null.asInstanceOf[V] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[V] - } - - override def entrySet: ju.Set[ju.Map.Entry[K, V]] = new ju.AbstractSet[ju.Map.Entry[K, V]] { - def size = self.size - - def iterator = new ju.Iterator[ju.Map.Entry[K, V]] { - val ui = underlying.iterator - var prev : Option[K] = None - - def hasNext = ui.hasNext - - def next() = { - val (k, v) = ui.next() - prev = Some(k) - new ju.Map.Entry[K, V] { - def getKey = k - def getValue = v - def setValue(v1 : V) = self.put(k, v1) - - // It's important that this implementation conform to the contract - // specified in the javadocs of java.util.Map.Entry.hashCode - // - // See https://github.com/scala/bug/issues/10663 - override def hashCode = { - (if (k == null) 0 else k.hashCode()) ^ - (if (v == null) 0 else v.hashCode()) - } - - override def equals(other: Any) = other match { - case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue - case _ => false - } - } - } - - override def remove(): Unit = { - prev match { - case Some(k) => - underlying match { - case mm: mutable.Map[a, _] => - mm -= k - prev = None - case _ => - throw new UnsupportedOperationException("remove") - } - case _ => - throw new IllegalStateException("next must be called at least once before remove") - } - } - } - } - - override def containsKey(key: AnyRef): Boolean = try { - // Note: Subclass of collection.Map with specific key type may redirect generic - // contains to specific contains, which will throw a ClassCastException if the - // wrong type is passed. This is why we need a type cast to A inside a try/catch. - underlying.contains(key.asInstanceOf[K]) - } catch { - case ex: ClassCastException => false - } - } - - @SerialVersionUID(3L) - class MutableMapWrapper[K, V](val underlying: mutable.Map[K, V]) extends MapWrapper[K, V](underlying) { - override def put(k: K, v: V) = underlying.put(k, v) match { - case Some(v1) => v1 - case None => null.asInstanceOf[V] - } - - override def remove(k: AnyRef): V = try { - underlying remove k.asInstanceOf[K] match { - case None => null.asInstanceOf[V] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[V] - } - - override def clear() = underlying.clear() - } - - @SerialVersionUID(3L) - abstract class AbstractJMapWrapper[K, V] - extends mutable.AbstractMap[K, V] - with JMapWrapperLike[K, V, mutable.Map, mutable.Map[K, V]] with Serializable - - trait JMapWrapperLike[K, V, +CC[X, Y] <: mutable.MapOps[X, Y, CC, _], +C <: mutable.MapOps[K, V, CC, C]] - extends mutable.MapOps[K, V, CC, C] - with StrictOptimizedMapOps[K, V, CC, C] - with StrictOptimizedIterableOps[(K, V), mutable.Iterable, C] { - - def underlying: ju.Map[K, V] - - override def size = underlying.size - - // support Some(null) if currently bound to null - def get(k: K) = { - val v = underlying.get(k) - if (v != null) - Some(v) - else if (underlying.containsKey(k)) - Some(null.asInstanceOf[V]) - else - None - } - - override def getOrElseUpdate(key: K, op: => V): V = - underlying.computeIfAbsent(key, _ => op) match { - case null => update(key, null.asInstanceOf[V]); null.asInstanceOf[V] - case v => v - } - - def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } - def subtractOne(key: K): this.type = { underlying remove key; this } - - // support Some(null) if currently bound to null - override def put(k: K, v: V): Option[V] = - if (v == null) { - val present = underlying.containsKey(k) - val result = underlying.put(k, v) - if (present) Some(result) else None - } else { - var result: Option[V] = None - def recompute(k0: K, v0: V): V = v.tap(_ => - if (v0 != null) result = Some(v0) - else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) - ) - underlying.compute(k, recompute) - result - } - - override def update(k: K, v: V): Unit = underlying.put(k, v) - - override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - def remap(k: K, v: V): V = - remappingFunction(Option(v)) match { - case Some(null) => throw PutNull - case Some(x) => x - case None => null.asInstanceOf[V] - } - try Option(underlying.compute(key, remap)) - catch { - case PutNull => update(key, null.asInstanceOf[V]); Some(null.asInstanceOf[V]) - } - } - - // support Some(null) if currently bound to null - override def remove(k: K): Option[V] = { - var result: Option[V] = None - def recompute(k0: K, v0: V): V = { - if (v0 != null) result = Some(v0) - else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) - null.asInstanceOf[V] - } - underlying.compute(k, recompute) - result - } - - def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { - val ui = underlying.entrySet.iterator - def hasNext = ui.hasNext - def next() = { val e = ui.next(); (e.getKey, e.getValue) } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - val i = underlying.entrySet().iterator() - while (i.hasNext) { - val entry = i.next() - f(entry.getKey, entry.getValue) - } - } - - override def clear() = underlying.clear() - - } - - /** Wraps a Java map as a Scala one. If the map is to support concurrent access, - * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized - * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility - * to wrap all non-atomic operations with `underlying.synchronized`. - * This includes `get`, as `java.util.Map`'s API does not allow for an - * atomic `get` when `null` values may be present. - */ - @SerialVersionUID(3L) - class JMapWrapper[K, V](val underlying : ju.Map[K, V]) - extends AbstractJMapWrapper[K, V] with Serializable { - - override def isEmpty: Boolean = underlying.isEmpty - override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize - override def empty: JMapWrapper[K, V] = new JMapWrapper(new ju.HashMap[K, V]) - } - - @SerialVersionUID(3L) - class ConcurrentMapWrapper[K, V](underlying: concurrent.Map[K, V]) extends MutableMapWrapper[K, V](underlying) with juc.ConcurrentMap[K, V] { - - def underlyingConcurrentMap: concurrent.Map[K, V] = underlying - - override def putIfAbsent(k: K, v: V) = underlying.putIfAbsent(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[V] - } - - override def remove(k: AnyRef, v: AnyRef) = try { - underlying.remove(k.asInstanceOf[K], v.asInstanceOf[V]) - } catch { - case ex: ClassCastException => - false - } - - override def replace(k: K, v: V): V = underlying.replace(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[V] - } - - override def replace(k: K, oldval: V, newval: V) = underlying.replace(k, oldval, newval) - } - - /** Wraps a concurrent Java map as a Scala one. Single-element concurrent - * access is supported; multi-element operations such as maps and filters - * are not guaranteed to be atomic. - */ - @SerialVersionUID(3L) - class JConcurrentMapWrapper[K, V](val underlying: juc.ConcurrentMap[K, V]) - extends AbstractJMapWrapper[K, V] - with concurrent.Map[K, V] { - - override def get(k: K) = Option(underlying get k) - - override def getOrElseUpdate(key: K, op: => V): V = - underlying.computeIfAbsent(key, _ => op) match { - case null => super/*[concurrent.Map]*/.getOrElseUpdate(key, op) - case v => v - } - - override def isEmpty: Boolean = underlying.isEmpty - override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize - override def empty: JConcurrentMapWrapper[K, V] = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[K, V]) - - def putIfAbsent(k: K, v: V): Option[V] = Option(underlying.putIfAbsent(k, v)) - - def remove(k: K, v: V): Boolean = underlying.remove(k, v) - - def replace(k: K, v: V): Option[V] = Option(underlying.replace(k, v)) - - def replace(k: K, oldvalue: V, newvalue: V): Boolean = underlying.replace(k, oldvalue, newvalue) - - override def lastOption: Option[(K, V)] = - underlying match { - case nav: NavigableMap[K @unchecked, V @unchecked] => Option(nav.lastEntry).map(e => (e.getKey, e.getValue)) - case _ if isEmpty => None - case _ => Try(last).toOption - } - - override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - def remap(k: K, v: V): V = - remappingFunction(Option(v)) match { - case Some(null) => throw PutNull // see scala/scala#10129 - case Some(x) => x - case None => null.asInstanceOf[V] - } - try Option(underlying.compute(key, remap)) - catch { - case PutNull => super/*[concurrent.Map]*/.updateWith(key)(remappingFunction) - } - } - } - - @SerialVersionUID(3L) - class DictionaryWrapper[K, V](val underlying: mutable.Map[K, V]) extends ju.Dictionary[K, V] with Serializable { - def size: Int = underlying.size - def isEmpty: Boolean = underlying.isEmpty - def keys: ju.Enumeration[K] = underlying.keysIterator.asJavaEnumeration - def elements: ju.Enumeration[V] = underlying.valuesIterator.asJavaEnumeration - def get(key: AnyRef) = try { - underlying get key.asInstanceOf[K] match { - case None => null.asInstanceOf[V] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[V] - } - def put(key: K, value: V): V = underlying.put(key, value) match { - case Some(v) => v - case None => null.asInstanceOf[V] - } - override def remove(key: AnyRef) = try { - underlying remove key.asInstanceOf[K] match { - case None => null.asInstanceOf[V] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[V] - } - } - - @SerialVersionUID(3L) - class JDictionaryWrapper[K, V](val underlying: ju.Dictionary[K, V]) extends mutable.AbstractMap[K, V] with Serializable { - override def size: Int = underlying.size - override def isEmpty: Boolean = underlying.isEmpty - override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize - - def get(k: K) = Option(underlying get k) - - def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } - def subtractOne(key: K): this.type = { underlying remove key; this } - - override def put(k: K, v: V): Option[V] = Option(underlying.put(k, v)) - - override def update(k: K, v: V): Unit = { underlying.put(k, v) } - - override def remove(k: K): Option[V] = Option(underlying remove k) - def iterator = underlying.keys.asScala map (k => (k, underlying get k)) - - override def clear() = iterator.foreach(entry => underlying.remove(entry._1)) - - override def mapFactory: mutable.HashMap.type = mutable.HashMap - } - - @SerialVersionUID(3L) - class JPropertiesWrapper(underlying: ju.Properties) - extends mutable.AbstractMap[String, String] - with mutable.MapOps[String, String, mutable.Map, mutable.Map[String, String]] - with StrictOptimizedMapOps[String, String, mutable.Map, mutable.Map[String, String]] - with StrictOptimizedIterableOps[(String, String), mutable.Iterable, mutable.Map[String, String]] - with Serializable { - - override def size = underlying.size - override def isEmpty: Boolean = underlying.isEmpty - override def knownSize: Int = size - def get(k: String) = { - val v = underlying get k - if (v != null) Some(v.asInstanceOf[String]) else None - } - - def addOne(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } - def subtractOne(key: String): this.type = { underlying remove key; this } - - override def put(k: String, v: String): Option[String] = { - val r = underlying.put(k, v) - if (r != null) Some(r.asInstanceOf[String]) else None - } - - override def update(k: String, v: String): Unit = { underlying.put(k, v) } - - override def remove(k: String): Option[String] = { - val r = underlying remove k - if (r != null) Some(r.asInstanceOf[String]) else None - } - - def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { - val ui = underlying.entrySet.iterator - def hasNext = ui.hasNext - def next() = { - val e = ui.next() - (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) - } - } - - override def clear() = underlying.clear() - - override def empty: JPropertiesWrapper = new JPropertiesWrapper(new ju.Properties) - - def getProperty(key: String) = underlying.getProperty(key) - - def getProperty(key: String, defaultValue: String) = - underlying.getProperty(key, defaultValue) - - def setProperty(key: String, value: String) = - underlying.setProperty(key, value) - - override def mapFactory: mutable.HashMap.type = mutable.HashMap - } - - /** Thrown when certain Map operations attempt to put a null value. */ - private val PutNull = new ControlThrowable {} -} diff --git a/stdlib-bootstrapped/src/scala/collection/immutable/BitSet.scala b/stdlib-bootstrapped/src/scala/collection/immutable/BitSet.scala deleted file mode 100644 index 5084ea091d06..000000000000 --- a/stdlib-bootstrapped/src/scala/collection/immutable/BitSet.scala +++ /dev/null @@ -1,357 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import BitSetOps.{LogWL, updateArray} -import mutable.Builder -import scala.annotation.{implicitNotFound, nowarn} - -/** A class for immutable bitsets. - * $bitsetinfo - * @see [[https://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable-bitsets "Scala's Collection Library overview"]] - * section on `Immutable BitSets` for more information. - * - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -sealed abstract class BitSet - extends AbstractSet[Int] - with SortedSet[Int] - with SortedSetOps[Int, SortedSet, BitSet] - with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] - with collection.BitSet - with collection.BitSetOps[BitSet] - with Serializable { - - override def unsorted: Set[Int] = this - - override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder - override def empty: BitSet = bitSetFactory.empty - - def bitSetFactory: BitSet.type = BitSet - - protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) - - def incl(elem: Int): BitSet = { - require(elem >= 0, "bitset element must be >= 0") - if (contains(elem)) this - else { - val idx = elem >> LogWL - updateWord(idx, word(idx) | (1L << elem)) - } - } - - def excl(elem: Int): BitSet = { - require(elem >= 0, "bitset element must be >= 0") - if (contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) & ~(1L << elem)) - } else this - } - - /** Update word at index `idx`; enlarge set if `idx` outside range of set. - */ - protected def updateWord(idx: Int, w: Long): BitSet - - override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) - override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].map(f) - - override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) - override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].flatMap(f) - - override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) - override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].collect(pf) - - // necessary for disambiguation - override def zip[B](that: scala.IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = - super.zip(that) - - protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) -} - -/** - * $factoryInfo - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -@nowarn("cat=deprecation&msg=Implementation classes of BitSet should not be accessed directly") -@SerialVersionUID(3L) -object BitSet extends SpecificIterableFactory[Int, BitSet] { - - def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = - it match { - case bs: BitSet => bs - case _ => (newBuilder ++= it).result() - } - - final val empty: BitSet = new BitSet1(0L) - - def newBuilder: Builder[Int, BitSet] = - mutable.BitSet.newBuilder.mapResult(bs => fromBitMaskNoCopy(bs.elems)) - - private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) - - /** A bitset containing all the bits in an array */ - def fromBitMask(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else if (len == 1) new BitSet1(elems(0)) - else if (len == 2) createSmall(elems(0), elems(1)) - else { - val a = java.util.Arrays.copyOf(elems, len) - new BitSetN(a) - } - } - - /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ - def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else if (len == 1) new BitSet1(elems(0)) - else if (len == 2) createSmall(elems(0), elems(1)) - else new BitSetN(elems) - } - - @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") - class BitSet1(val elems: Long) extends BitSet { - protected[collection] def nwords = 1 - protected[collection] def word(idx: Int) = if (idx == 0) elems else 0L - protected[collection] def updateWord(idx: Int, w: Long): BitSet = - if (idx == 0) new BitSet1(w) - else if (idx == 1) createSmall(elems, w) - else this.fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) - - - override def diff(other: collection.Set[Int]): BitSet = other match { - case bs: collection.BitSet => bs.nwords match { - case 0 => this - case _ => - val newElems = elems & ~bs.word(0) - if (newElems == 0L) this.empty else new BitSet1(newElems) - } - case _ => super.diff(other) - } - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - val _elems = BitSetOps.computeWordForFilter(pred, isFlipped, elems, 0) - if (_elems == 0L) this.empty else new BitSet1(_elems) - } - } - - @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") - class BitSet2(val elems0: Long, val elems1: Long) extends BitSet { - protected[collection] def nwords = 2 - protected[collection] def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L - protected[collection] def updateWord(idx: Int, w: Long): BitSet = - if (idx == 0) new BitSet2(w, elems1) - else if (idx == 1) createSmall(elems0, w) - else this.fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) - - - override def diff(other: collection.Set[Int]): BitSet = other match { - case bs: collection.BitSet => bs.nwords match { - case 0 => this - case 1 => - new BitSet2(elems0 & ~bs.word(0), elems1) - case _ => - val _elems0 = elems0 & ~bs.word(0) - val _elems1 = elems1 & ~bs.word(1) - - if (_elems1 == 0L) { - if (_elems0 == 0L) { - this.empty - } else { - new BitSet1(_elems0) - } - } else { - new BitSet2(_elems0, _elems1) - } - } - case _ => super.diff(other) - } - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - val _elems0 = BitSetOps.computeWordForFilter(pred, isFlipped, elems0, 0) - val _elems1 = BitSetOps.computeWordForFilter(pred, isFlipped, elems1, 1) - - if (_elems1 == 0L) { - if (_elems0 == 0L) { - this.empty - } - else new BitSet1(_elems0) - } - else new BitSet2(_elems0, _elems1) - } - } - - @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") - class BitSetN(val elems: Array[Long]) extends BitSet { - protected[collection] def nwords = elems.length - - protected[collection] def word(idx: Int) = if (idx < nwords) elems(idx) else 0L - - protected[collection] def updateWord(idx: Int, w: Long): BitSet = this.fromBitMaskNoCopy(updateArray(elems, idx, w)) - - override def diff(that: collection.Set[Int]): BitSet = that match { - case bs: collection.BitSet => - /* - * Algorithm: - * - * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with - * the fewer words. Two extra concerns for optimization are described below. - * - * Array Shrinking: - * If `this` is not longer than `bs`, then since we must iterate through the full array of words, - * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new - * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` - * - * Tracking Changes: - * If the two sets are disjoint, then we can return `this`. Therefor, until at least one change is detected, - * we check each word for if it has changed from its corresponding word in `this`. Once a single change is - * detected, we stop checking because the cost of the new Array must be paid anyways. - */ - - val bsnwords = bs.nwords - val thisnwords = nwords - if (bsnwords >= thisnwords) { - // here, we may have opportunity to shrink the size of the array - // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length - var i = thisnwords - 1 - var currentWord = 0L - // if there are never any changes, we can return `this` at the end - var anyChanges = false - while (i >= 0 && currentWord == 0L) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (i < 0) { - // all indices >= 0 have had result 0, so the bitset is empty - this.empty - } else { - val minimumNonZeroIndex: Int = i + 1 - while (!anyChanges && i >= 0) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (anyChanges) { - if (minimumNonZeroIndex == -1) { - this.empty - } else if (minimumNonZeroIndex == 0) { - new BitSet1(currentWord) - } else if (minimumNonZeroIndex == 1) { - new BitSet2(word(0) & ~bs.word(0), currentWord) - } else { - val newArray = elems.take(minimumNonZeroIndex + 1) - newArray(i + 1) = currentWord - while (i >= 0) { - newArray(i) = word(i) & ~bs.word(i) - i -= 1 - } - this.fromBitMaskNoCopy(newArray) - } - } else { - this - } - } - } else { - var i = bsnwords - 1 - var anyChanges = false - var currentWord = 0L - while (i >= 0 && !anyChanges) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (anyChanges) { - val newElems = elems.clone() - newElems(i + 1) = currentWord - while (i >= 0) { - newElems(i) = word(i) & ~bs.word(i) - i -= 1 - } - this.fromBitMaskNoCopy(newElems) - } else { - this - } - } - case _ => super.diff(that) - } - - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - // here, we may have opportunity to shrink the size of the array - // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length - var i = nwords - 1 - var currentWord = 0L - // if there are never any changes, we can return `this` at the end - var anyChanges = false - while (i >= 0 && currentWord == 0L) { - val oldWord = word(i) - currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (i < 0) { - // all indices >= 0 have had result 0, so the bitset is empty - if (currentWord == 0) this.empty else this.fromBitMaskNoCopy(Array(currentWord)) - } else { - val minimumNonZeroIndex: Int = i + 1 - while (!anyChanges && i >= 0) { - val oldWord = word(i) - currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (anyChanges) { - if (minimumNonZeroIndex == -1) { - this.empty - } else if (minimumNonZeroIndex == 0) { - new BitSet1(currentWord) - } else if (minimumNonZeroIndex == 1) { - new BitSet2(BitSetOps.computeWordForFilter(pred, isFlipped, word(0), 0), currentWord) - } else { - val newArray = elems.take(minimumNonZeroIndex + 1) - newArray(i + 1) = currentWord - while (i >= 0) { - newArray(i) = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) - i -= 1 - } - this.fromBitMaskNoCopy(newArray) - } - } else { - this - } - } - } - - override def toBitMask: Array[Long] = elems.clone() - } - - @SerialVersionUID(3L) - private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { - protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) - } -} diff --git a/stdlib-bootstrapped/src/scala/collection/immutable/TreeSet.scala b/stdlib-bootstrapped/src/scala/collection/immutable/TreeSet.scala deleted file mode 100644 index d6524cc734e5..000000000000 --- a/stdlib-bootstrapped/src/scala/collection/immutable/TreeSet.scala +++ /dev/null @@ -1,296 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.ReusableBuilder -import scala.collection.immutable.{RedBlackTree => RB} -import scala.runtime.AbstractFunction1 - - -/** This class implements immutable sorted sets using a tree. - * - * @tparam A the type of the elements contained in this tree set - * @param ordering the implicit ordering used to compare objects of type `A` - * - * @see [[https://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] - * section on `Red-Black Trees` for more information. - * - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A]) - extends AbstractSet[A] - with SortedSet[A] - with SortedSetOps[A, TreeSet, TreeSet[A]] - with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] - with SortedSetFactoryDefaults[A, TreeSet, Set] - with DefaultSerializable { - - if (ordering eq null) throw new NullPointerException("ordering must not be null") - - def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - - override def sortedIterableFactory: TreeSet.type = TreeSet - - private[this] def newSetOrSelf(t: RB.Tree[A, Any]) = if(t eq tree) this else new TreeSet[A](t) - - override def size: Int = RB.count(tree) - - override def isEmpty = size == 0 - - override def head: A = RB.smallest(tree).key - - override def last: A = RB.greatest(tree).key - - override def tail: TreeSet[A] = new TreeSet(RB.tail(tree)) - - override def init: TreeSet[A] = new TreeSet(RB.init(tree)) - - override def min[A1 >: A](implicit ord: Ordering[A1]): A = { - if ((ord eq ordering) && nonEmpty) { - head - } else { - super.min(ord) - } - } - - override def max[A1 >: A](implicit ord: Ordering[A1]): A = { - if ((ord eq ordering) && nonEmpty) { - last - } else { - super.max(ord) - } - } - - override def drop(n: Int): TreeSet[A] = { - if (n <= 0) this - else if (n >= size) empty - else new TreeSet(RB.drop(tree, n)) - } - - override def take(n: Int): TreeSet[A] = { - if (n <= 0) empty - else if (n >= size) this - else new TreeSet(RB.take(tree, n)) - } - - override def slice(from: Int, until: Int): TreeSet[A] = { - if (until <= from) empty - else if (from <= 0) take(until) - else if (until >= size) drop(from) - else new TreeSet(RB.slice(tree, from, until)) - } - - override def dropRight(n: Int): TreeSet[A] = take(size - math.max(n, 0)) - - override def takeRight(n: Int): TreeSet[A] = drop(size - math.max(n, 0)) - - private[this] def countWhile(p: A => Boolean): Int = { - var result = 0 - val it = iterator - while (it.hasNext && p(it.next())) result += 1 - result - } - override def dropWhile(p: A => Boolean): TreeSet[A] = drop(countWhile(p)) - - override def takeWhile(p: A => Boolean): TreeSet[A] = take(countWhile(p)) - - override def span(p: A => Boolean): (TreeSet[A], TreeSet[A]) = splitAt(countWhile(p)) - - override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) - - override def minAfter(key: A): Option[A] = { - val v = RB.minAfter(tree, key) - if (v eq null) Option.empty else Some(v.key) - } - - override def maxBefore(key: A): Option[A] = { - val v = RB.maxBefore(tree, key) - if (v eq null) Option.empty else Some(v.key) - } - - def iterator: Iterator[A] = RB.keysIterator(tree) - - def iteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Tree[A, Any] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - /** Checks if this set contains element `elem`. - * - * @param elem the element to check for membership. - * @return true, iff `elem` is contained in this set. - */ - def contains(elem: A): Boolean = RB.contains(tree, elem) - - override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until)) - - def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until)) - - /** Creates a new `TreeSet` with the entry added. - * - * @param elem a new element to add. - * @return a new $coll containing `elem` and all the elements of this $coll. - */ - def incl(elem: A): TreeSet[A] = - newSetOrSelf(RB.update(tree, elem, null, overwrite = false)) - - /** Creates a new `TreeSet` with the entry removed. - * - * @param elem a new element to add. - * @return a new $coll containing all the elements of this $coll except `elem`. - */ - def excl(elem: A): TreeSet[A] = - newSetOrSelf(RB.delete(tree, elem)) - - override def concat(that: collection.IterableOnce[A]): TreeSet[A] = { - val t = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - RB.union(tree, ts.tree) - case _ => - val it = that.iterator - var t = tree - while (it.hasNext) t = RB.update(t, it.next(), null, overwrite = false) - t - } - newSetOrSelf(t) - } - - override def removedAll(that: IterableOnce[A]): TreeSet[A] = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - newSetOrSelf(RB.difference(tree, ts.tree)) - case _ => - //TODO add an implementation of a mutable subtractor similar to TreeMap - //but at least this doesn't create a TreeSet for each iteration - object sub extends AbstractFunction1[A, Unit] { - var currentTree = tree - override def apply(k: A): Unit = { - currentTree = RB.delete(currentTree, k) - } - } - that.iterator.foreach(sub) - newSetOrSelf(sub.currentTree) - } - - override def intersect(that: collection.Set[A]): TreeSet[A] = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - newSetOrSelf(RB.intersect(tree, ts.tree)) - case _ => - super.intersect(that) - } - - override def diff(that: collection.Set[A]): TreeSet[A] = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - newSetOrSelf(RB.difference(tree, ts.tree)) - case _ => - super.diff(that) - } - - override def filter(f: A => Boolean): TreeSet[A] = newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => f(k)})) - - override def partition(p: A => Boolean): (TreeSet[A], TreeSet[A]) = { - val (l, r) = RB.partitionEntries(tree, {(a:A, _: Any) => p(a)}) - (newSetOrSelf(l), newSetOrSelf(r)) - } - - override def equals(obj: Any): Boolean = obj match { - case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) - case _ => super.equals(obj) - } - - override protected[this] def className = "TreeSet" -} - -/** - * $factoryInfo - * - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - */ -@SerialVersionUID(3L) -object TreeSet extends SortedIterableFactory[TreeSet] { - - def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] - - def from[E](it: scala.collection.IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = - it match { - case ts: TreeSet[E] if ordering == ts.ordering => ts - case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => - new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) - case r: Range if (ordering eq Ordering.Int) || (Ordering.Int isReverseOf ordering) => - val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator - val tree = RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size) - // The cast is needed to compile with Dotty: - // Dotty doesn't infer that E =:= Int, since instantiation of covariant GADTs is unsound - new TreeSet[E](tree) - case _ => - var t: RB.Tree[E, Null] = null - val i = it.iterator - while (i.hasNext) t = RB.update(t, i.next(), null, overwrite = false) - new TreeSet[E](t) - } - - def newBuilder[A](implicit ordering: Ordering[A]): ReusableBuilder[A, TreeSet[A]] = new TreeSetBuilder[A] - private class TreeSetBuilder[A](implicit ordering: Ordering[A]) - extends RB.SetHelper[A] - with ReusableBuilder[A, TreeSet[A]] { - type Tree = RB.Tree[A, Any] - private [this] var tree:RB.Tree[A, Any] = null - - override def addOne(elem: A): this.type = { - tree = mutableUpd(tree, elem) - this - } - - override def addAll(xs: IterableOnce[A]): this.type = { - xs match { - // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= - // for the moment we have to force immutability before the union - // which will waste some time and space - // calling `beforePublish` makes `tree` immutable - case ts: TreeSet[A] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree - else tree = RB.union(beforePublish(tree), ts.tree)(ordering) - case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree0 - else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) - case _ => - super.addAll(xs) - } - this - } - - override def clear(): Unit = { - tree = null - } - - override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering) - } -} diff --git a/stdlib-bootstrapped/src/scala/collection/mutable/BitSet.scala b/stdlib-bootstrapped/src/scala/collection/mutable/BitSet.scala deleted file mode 100644 index 643e8c465ef2..000000000000 --- a/stdlib-bootstrapped/src/scala/collection/mutable/BitSet.scala +++ /dev/null @@ -1,392 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.collection.immutable.Range -import BitSetOps.{LogWL, MaxSize} -import scala.annotation.implicitNotFound - -/** - * A class for mutable bitsets. - * - * $bitsetinfo - * - * @see [[https://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] - * section on `Mutable Bitsets` for more information. - * - * @define Coll `BitSet` - * @define coll bitset - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class BitSet(protected[collection] final var elems: Array[Long]) - extends AbstractSet[Int] - with SortedSet[Int] - with SortedSetOps[Int, SortedSet, BitSet] - with StrictOptimizedIterableOps[Int, Set, BitSet] - with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] - with collection.BitSet - with collection.BitSetOps[BitSet] - with Serializable { - - def this(initSize: Int) = this(new Array[Long](math.max((initSize + 63) >> 6, 1))) - - def this() = this(0) - - override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder - override def empty: BitSet = bitSetFactory.empty - - def bitSetFactory: BitSet.type = BitSet - - override def unsorted: Set[Int] = this - - protected[collection] final def nwords: Int = elems.length - - protected[collection] final def word(idx: Int): Long = - if (idx < nwords) elems(idx) else 0L - - protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = - if (elems.length == 0) empty - else new BitSet(elems) - - def addOne(elem: Int): this.type = { - require(elem >= 0) - if (!contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) | (1L << elem)) - } - this - } - - def subtractOne(elem: Int): this.type = { - require(elem >= 0) - if (contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) & ~(1L << elem)) - } - this - } - - def clear(): Unit = { - elems = new Array[Long](elems.length) - } - - protected final def updateWord(idx: Int, w: Long): Unit = { - ensureCapacity(idx) - elems(idx) = w - } - - protected final def ensureCapacity(idx: Int): Unit = { - require(idx < MaxSize) - if (idx >= nwords) { - var newlen = nwords - while (idx >= newlen) newlen = math.min(newlen * 2, MaxSize) - val elems1 = new Array[Long](newlen) - Array.copy(elems, 0, elems1, 0, nwords) - elems = elems1 - } - } - - def unconstrained: collection.Set[Int] = this - - /** Updates this bitset to the union with another bitset by performing a bitwise "or". - * - * @param other the bitset to form the union with. - * @return the bitset itself. - */ - def |= (other: collection.BitSet): this.type = { - ensureCapacity(other.nwords - 1) - var i = 0 - val othernwords = other.nwords - while (i < othernwords) { - elems(i) = elems(i) | other.word(i) - i += 1 - } - this - } - /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". - * - * @param other the bitset to form the intersection with. - * @return the bitset itself. - */ - def &= (other: collection.BitSet): this.type = { - // Different from other operations: no need to ensure capacity because - // anything beyond the capacity is 0. Since we use other.word which is 0 - // off the end, we also don't need to make sure we stay in bounds there. - var i = 0 - val thisnwords = nwords - while (i < thisnwords) { - elems(i) = elems(i) & other.word(i) - i += 1 - } - this - } - /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". - * - * @param other the bitset to form the symmetric difference with. - * @return the bitset itself. - */ - def ^= (other: collection.BitSet): this.type = { - ensureCapacity(other.nwords - 1) - var i = 0 - val othernwords = other.nwords - while (i < othernwords) { - - elems(i) = elems(i) ^ other.word(i) - i += 1 - } - this - } - /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". - * - * @param other the bitset to form the difference with. - * @return the bitset itself. - */ - def &~= (other: collection.BitSet): this.type = { - var i = 0 - val max = Math.min(nwords, other.nwords) - while (i < max) { - elems(i) = elems(i) & ~other.word(i) - i += 1 - } - this - } - - override def clone(): BitSet = new BitSet(java.util.Arrays.copyOf(elems, elems.length)) - - def toImmutable: immutable.BitSet = immutable.BitSet.fromBitMask(elems) - - override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) - override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].map(f) - - override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) - override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].flatMap(f) - - override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) - override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].collect(pf) - - // necessary for disambiguation - override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = - super.zip(that) - - override def addAll(xs: IterableOnce[Int]): this.type = xs match { - case bs: collection.BitSet => - this |= bs - case range: Range => - if (range.nonEmpty) { - val start = range.min - if (start >= 0) { - val end = range.max - val endIdx = end >> LogWL - ensureCapacity(endIdx) - - if (range.step == 1 || range.step == -1) { - val startIdx = start >> LogWL - val wordStart = startIdx * BitSetOps.WordLength - val wordMask = -1L << (start - wordStart) - - if (endIdx > startIdx) { - elems(startIdx) |= wordMask - java.util.Arrays.fill(elems, startIdx + 1, endIdx, -1L) - elems(endIdx) |= -1L >>> (BitSetOps.WordLength - (end - endIdx * BitSetOps.WordLength) - 1) - } else elems(endIdx) |= (wordMask & (-1L >>> (BitSetOps.WordLength - (end - wordStart) - 1))) - } else super.addAll(range) - } else super.addAll(range) - } - this - - case sorted: collection.SortedSet[Int] => - // if `sorted` is using the regular Int ordering, ensure capacity for the largest - // element up front to avoid multiple resizing allocations - if (sorted.nonEmpty) { - val ord = sorted.ordering - if (ord eq Ordering.Int) { - ensureCapacity(sorted.lastKey >> LogWL) - } else if (ord eq Ordering.Int.reverse) { - ensureCapacity(sorted.firstKey >> LogWL) - } - val iter = sorted.iterator - while (iter.hasNext) { - addOne(iter.next()) - } - } - - this - - case other => - super.addAll(other) - } - - override def subsetOf(that: collection.Set[Int]): Boolean = that match { - case bs: collection.BitSet => - val thisnwords = this.nwords - val bsnwords = bs.nwords - val minWords = Math.min(thisnwords, bsnwords) - - // if any bits are set to `1` in words out of range of `bs`, then this is not a subset. Start there - var i = bsnwords - while (i < thisnwords) { - if (word(i) != 0L) return false - i += 1 - } - - // the higher range of `this` is all `0`s, fall back to lower range - var j = 0 - while (j < minWords) { - if ((word(j) & ~bs.word(j)) != 0L) return false - j += 1 - } - - true - case other => - super.subsetOf(other) - } - - override def subtractAll(xs: IterableOnce[Int]): this.type = xs match { - case bs: collection.BitSet => this &~= bs - case other => super.subtractAll(other) - } - - protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) - - override def diff(that: collection.Set[Int]): BitSet = that match { - case bs: collection.BitSet => - /* - * Algorithm: - * - * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with - * the fewer words. - * - * Array Shrinking: - * If `this` is not longer than `bs`, then since we must iterate through the full array of words, - * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new - * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` - */ - - val bsnwords = bs.nwords - val thisnwords = nwords - if (bsnwords >= thisnwords) { - // here, we may have opportunity to shrink the size of the array - // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length - var i = thisnwords - 1 - var currentWord = 0L - - while (i >= 0 && currentWord == 0L) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - i -= 1 - } - - if (i < 0) { - fromBitMaskNoCopy(Array(currentWord)) - } else { - val minimumNonZeroIndex: Int = i + 1 - val newArray = elems.take(minimumNonZeroIndex + 1) - newArray(i + 1) = currentWord - while (i >= 0) { - newArray(i) = word(i) & ~bs.word(i) - i -= 1 - } - fromBitMaskNoCopy(newArray) - } - } else { - // here, there is no opportunity to shrink the array size, no use in tracking highest non-zero index - val newElems = elems.clone() - var i = bsnwords - 1 - while (i >= 0) { - newElems(i) = word(i) & ~bs.word(i) - i -= 1 - } - fromBitMaskNoCopy(newElems) - } - case _ => super.diff(that) - } - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - // We filter the BitSet from highest to lowest, so we can determine exactly the highest non-zero word - // index which lets us avoid: - // * over-allocating -- the resulting array will be exactly the right size - // * multiple resizing allocations -- the array is allocated one time, not log(n) times. - var i = nwords - 1 - var newArray: Array[Long] = null - while (i >= 0) { - val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) - if (w != 0L) { - if (newArray eq null) { - newArray = new Array(i + 1) - } - newArray(i) = w - } - i -= 1 - } - if (newArray eq null) { - empty - } else { - fromBitMaskNoCopy(newArray) - } - } - - override def filterInPlace(p: Int => Boolean): this.type = { - val thisnwords = nwords - var i = 0 - while (i < thisnwords) { - elems(i) = BitSetOps.computeWordForFilter(p, isFlipped = false, elems(i), i) - i += 1 - } - this - } - - override def toBitMask: Array[Long] = elems.clone() -} - -@SerialVersionUID(3L) -object BitSet extends SpecificIterableFactory[Int, BitSet] { - - def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = Growable.from(empty, it) - - def empty: BitSet = new BitSet() - - def newBuilder: Builder[Int, BitSet] = new GrowableBuilder(empty) - - /** A bitset containing all the bits in an array */ - def fromBitMask(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else { - val a = java.util.Arrays.copyOf(elems, len) - new BitSet(a) - } - } - - /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ - def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else new BitSet(elems) - } - - @SerialVersionUID(3L) - private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { - protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) - } -} diff --git a/stdlib-bootstrapped/src/scala/collection/mutable/LinkedHashMap.scala b/stdlib-bootstrapped/src/scala/collection/mutable/LinkedHashMap.scala deleted file mode 100644 index b06174b6653c..000000000000 --- a/stdlib-bootstrapped/src/scala/collection/mutable/LinkedHashMap.scala +++ /dev/null @@ -1,264 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.nowarn -import scala.collection.generic.DefaultSerializable - -/** $factoryInfo - * @define Coll `LinkedHashMap` - * @define coll linked hash map - */ -@SerialVersionUID(3L) -object LinkedHashMap extends MapFactory[LinkedHashMap] { - - def empty[K, V] = new LinkedHashMap[K, V] - - def from[K, V](it: collection.IterableOnce[(K, V)]) = - it match { - case lhm: LinkedHashMap[K, V] => lhm - case _ => Growable.from(empty[K, V], it) - } - - def newBuilder[K, V]: GrowableBuilder[(K, V), LinkedHashMap[K, V]] = new GrowableBuilder(empty[K, V]) - - /** Class for the linked hash map entry, used internally. - */ - private[mutable] final class LinkedEntry[K, V](val key: K, var value: V) - extends HashEntry[K, LinkedEntry[K, V]] { - var earlier: LinkedEntry[K, V] = null - var later: LinkedEntry[K, V] = null - } - -} - -/** This class implements mutable maps using a hashtable. - * The iterator and all traversal methods of this class visit elements in the order they were inserted. - * - * @tparam K the type of the keys contained in this hash map. - * @tparam V the type of the values assigned to keys in this hash map. - * - * @define Coll `LinkedHashMap` - * @define coll linked hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -class LinkedHashMap[K, V] - extends AbstractMap[K, V] - with SeqMap[K, V] - with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]] - with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] - with MapFactoryDefaults[K, V, LinkedHashMap, Iterable] - with DefaultSerializable { - - override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap - - // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper - // would not return the elements in insertion order - - private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] - private[collection] def _firstEntry: Entry = firstEntry - - @transient protected var firstEntry: Entry = null - @transient protected var lastEntry: Entry = null - @transient private[this] var table: HashTable[K, V, Entry] = newHashTable - - // Used by scala-java8-compat (private[mutable] erases to public, so Java code can access it) - private[mutable] def getTable: HashTable[K, V, Entry] = table - - private def newHashTable = - new HashTable[K, V, Entry] { - def createNewEntry(key: K, value: V): Entry = { - val e = new Entry(key, value) - if (firstEntry eq null) firstEntry = e - else { lastEntry.later = e; e.earlier = lastEntry } - lastEntry = e - e - } - - override def foreachEntry[U](f: Entry => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur) - cur = cur.later - } - } - - } - - override def last: (K, V) = - if (size > 0) (lastEntry.key, lastEntry.value) - else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") - - override def lastOption: Option[(K, V)] = - if (size > 0) Some((lastEntry.key, lastEntry.value)) - else None - - override def head: (K, V) = - if (size > 0) (firstEntry.key, firstEntry.value) - else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") - - override def headOption: Option[(K, V)] = - if (size > 0) Some((firstEntry.key, firstEntry.value)) - else None - - override def size = table.tableSize - override def knownSize: Int = size - override def isEmpty: Boolean = table.tableSize == 0 - def get(key: K): Option[V] = { - val e = table.findEntry(key) - if (e == null) None - else Some(e.value) - } - - override def contains(key: K): Boolean = { - if (getClass eq classOf[LinkedHashMap[_, _]]) - table.findEntry(key) != null - else - super.contains(key) // A subclass might override `get`, use the default implementation `contains`. - } - - override def put(key: K, value: V): Option[V] = { - val e = table.findOrAddEntry(key, value) - if (e eq null) None - else { val v = e.value; e.value = value; Some(v) } - } - - override def update(key: K, value: V): Unit = { - val e = table.findOrAddEntry(key, value) - if (e ne null) e.value = value - } - - override def remove(key: K): Option[V] = { - val e = table.removeEntry(key) - if (e eq null) None - else Some(remove0(e)) - } - - private[this] def remove0(e: Entry): V = { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null // Null references to prevent nepotism - e.later = null - e.value - } - - def addOne(kv: (K, V)): this.type = { put(kv._1, kv._2); this } - - def subtractOne(key: K): this.type = { remove(key); this } - - def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { - private[this] var cur = firstEntry - def hasNext = cur ne null - def next() = - if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res } - else Iterator.empty.next() - } - - protected class LinkedKeySet extends KeySet { - override def iterableFactory: IterableFactory[collection.Set] = LinkedHashSet - } - - override def keySet: collection.Set[K] = new LinkedKeySet - - override def keysIterator: Iterator[K] = new AbstractIterator[K] { - private[this] var cur = firstEntry - def hasNext = cur ne null - def next() = - if (hasNext) { val res = cur.key; cur = cur.later; res } - else Iterator.empty.next() - } - - // Override updateWith for performance, so we can do the update while hashing - // the input key only once and performing one lookup into the hash table - override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - val keyIndex = table.index(table.elemHashCode(key)) - val entry = table.findEntry0(key, keyIndex) - - val previousValue = - if (entry == null) None - else Some(entry.value) - - val nextValue = remappingFunction(previousValue) - - (previousValue, nextValue) match { - case (None, None) => // do nothing - case (Some(_), None) => - remove0(entry) - table.removeEntry0(key, keyIndex) - - case (None, Some(value)) => - table.addEntry0(table.createNewEntry(key, value), keyIndex) - - case (Some(_), Some(value)) => - entry.value = value - } - - nextValue - } - - override def valuesIterator: Iterator[V] = new AbstractIterator[V] { - private[this] var cur = firstEntry - def hasNext = cur ne null - def next() = - if (hasNext) { val res = cur.value; cur = cur.later; res } - else Iterator.empty.next() - } - - override def foreach[U](f: ((K, V)) => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f((cur.key, cur.value)) - cur = cur.later - } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur.key, cur.value) - cur = cur.later - } - } - - override def clear(): Unit = { - table.clearTable() - firstEntry = null - lastEntry = null - } - - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - out.defaultWriteObject() - table.serializeTo(out, { entry => - out.writeObject(entry.key) - out.writeObject(entry.value) - }) - } - - private def readObject(in: java.io.ObjectInputStream): Unit = { - in.defaultReadObject() - table = newHashTable - table.init(in, table.createNewEntry(in.readObject().asInstanceOf[K], in.readObject().asInstanceOf[V])) - } - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "LinkedHashMap" -} - diff --git a/stdlib-bootstrapped/src/scala/collection/mutable/LinkedHashSet.scala b/stdlib-bootstrapped/src/scala/collection/mutable/LinkedHashSet.scala deleted file mode 100644 index 06b0650ed82f..000000000000 --- a/stdlib-bootstrapped/src/scala/collection/mutable/LinkedHashSet.scala +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.nowarn -import scala.collection.generic.DefaultSerializable - -/** This class implements mutable sets using a hashtable. - * The iterator and all traversal methods of this class visit elements in the order they were inserted. - * - * @tparam A the type of the elements contained in this set. - * - * @define Coll `LinkedHashSet` - * @define coll linked hash set - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -class LinkedHashSet[A] - extends AbstractSet[A] - with SetOps[A, LinkedHashSet, LinkedHashSet[A]] - with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] - with IterableFactoryDefaults[A, LinkedHashSet] - with DefaultSerializable { - - override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet - - // stepper is not overridden to use XTableStepper because that stepper would not return the - // elements in insertion order - - type Entry = LinkedHashSet.Entry[A] - - @transient protected var firstEntry: Entry = null - @transient protected var lastEntry: Entry = null - @transient private[this] var table: HashTable[A, AnyRef, Entry] = newHashTable - - // Used by scala-java8-compat (private[mutable] erases to public, so Java code can access it) - private[mutable] def getTable: HashTable[A, AnyRef, Entry] = table - - private def newHashTable = - new HashTable[A, AnyRef, Entry] { - def createNewEntry(key: A, value: AnyRef) = { - val e = new Entry(key) - if (firstEntry eq null) firstEntry = e - else { lastEntry.later = e; e.earlier = lastEntry } - lastEntry = e - e - } - override def foreachEntry[U](f: Entry => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur) - cur = cur.later - } - } - } - - override def last: A = - if (size > 0) lastEntry.key - else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") - - override def lastOption: Option[A] = - if (size > 0) Some(lastEntry.key) - else None - - override def head: A = - if (size > 0) firstEntry.key - else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") - - override def headOption: Option[A] = - if (size > 0) Some(firstEntry.key) - else None - - override def size: Int = table.tableSize - override def knownSize: Int = size - override def isEmpty: Boolean = size == 0 - def contains(elem: A): Boolean = table.findEntry(elem) ne null - - def addOne(elem: A): this.type = { - table.findOrAddEntry(elem, null) - this - } - - def subtractOne(elem: A): this.type = { - remove(elem) - this - } - - override def remove(elem: A): Boolean = { - val e = table.removeEntry(elem) - if (e eq null) false - else { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null // Null references to prevent nepotism - e.later = null - true - } - } - - def iterator: Iterator[A] = new AbstractIterator[A] { - private[this] var cur = firstEntry - def hasNext = cur ne null - def next() = - if (hasNext) { val res = cur.key; cur = cur.later; res } - else Iterator.empty.next() - } - - override def foreach[U](f: A => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur.key) - cur = cur.later - } - } - - override def clear(): Unit = { - table.clearTable() - firstEntry = null - lastEntry = null - } - - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - out.defaultWriteObject() - table.serializeTo(out, { e => out.writeObject(e.key) }) - } - - private def readObject(in: java.io.ObjectInputStream): Unit = { - in.defaultReadObject() - table = newHashTable - table.init(in, table.createNewEntry(in.readObject().asInstanceOf[A], null)) - } - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "LinkedHashSet" -} - -/** $factoryInfo - * @define Coll `LinkedHashSet` - * @define coll linked hash set - */ -@SerialVersionUID(3L) -object LinkedHashSet extends IterableFactory[LinkedHashSet] { - - override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] - - def from[E](it: collection.IterableOnce[E]) = - it match { - case lhs: LinkedHashSet[E] => lhs - case _ => Growable.from(empty[E], it) - } - - def newBuilder[A]: GrowableBuilder[A, LinkedHashSet[A]] = new GrowableBuilder(empty[A]) - - /** Class for the linked hash set entry, used internally. - */ - private[mutable] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] { - var earlier: Entry[A] = null - var later: Entry[A] = null - } -} - diff --git a/stdlib-bootstrapped/src/scala/collection/mutable/TreeMap.scala b/stdlib-bootstrapped/src/scala/collection/mutable/TreeMap.scala deleted file mode 100644 index 8fcb7b688a1f..000000000000 --- a/stdlib-bootstrapped/src/scala/collection/mutable/TreeMap.scala +++ /dev/null @@ -1,257 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.{RedBlackTree => RB} - -/** - * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. - * - * @param ordering the implicit ordering used to compare objects of type `A`. - * @tparam K the type of the keys contained in this tree map. - * @tparam V the type of the values associated with the keys. - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -sealed class TreeMap[K, V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) - extends AbstractMap[K, V] - with SortedMap[K, V] - with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, TreeMap[K, V]] - with StrictOptimizedMapOps[K, V, Map, TreeMap[K, V]] - with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] - with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] - with DefaultSerializable { - - override def sortedMapFactory: TreeMap.type = TreeMap - - /** - * Creates an empty `TreeMap`. - * @param ord the implicit ordering used to compare objects of type `K`. - * @return an empty `TreeMap`. - */ - def this()(implicit ord: Ordering[K]) = this(RB.Tree.empty)(ord) - - def iterator: Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else RB.iterator(tree) - } - - override def keysIterator: Iterator[K] = { - if (isEmpty) Iterator.empty - else RB.keysIterator(tree, None) - } - - override def valuesIterator: Iterator[V] = { - if (isEmpty) Iterator.empty - else RB.valuesIterator(tree, None) - } - - def keysIteratorFrom(start: K): Iterator[K] = { - if (isEmpty) Iterator.empty - else RB.keysIterator(tree, Some(start)) - } - - def iteratorFrom(start: K): Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else RB.iterator(tree, Some(start)) - } - - override def valuesIteratorFrom(start: K): Iterator[V] = { - if (isEmpty) Iterator.empty - else RB.valuesIterator(tree, Some(start)) - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = - shape.parUnbox( - scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Node[K, V]]( - size, tree.root, _.left, _.right, x => (x.key, x.value) - ) - ) - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Node[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree.root, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Node[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree.root, _.left, _.right, _.value)) - } - s.asInstanceOf[S with EfficientSplit] - } - - def addOne(elem: (K, V)): this.type = { RB.insert(tree, elem._1, elem._2); this } - - def subtractOne(elem: K): this.type = { RB.delete(tree, elem); this } - - override def clear(): Unit = RB.clear(tree) - - def get(key: K): Option[V] = RB.get(tree, key) - - /** - * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and - * vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = new TreeMapProjection(from, until) - - override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) - override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) - - override def size: Int = RB.size(tree) - override def knownSize: Int = size - override def isEmpty: Boolean = RB.isEmpty(tree) - - override def contains(key: K): Boolean = RB.contains(tree, key) - - override def head: (K, V) = RB.min(tree).get - - override def last: (K, V) = RB.max(tree).get - - override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) - - override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) - - override protected[this] def className: String = "TreeMap" - - - /** - * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - private[this] final class TreeMapProjection(from: Option[K], until: Option[K]) extends TreeMap[K, V](tree) { - - /** - * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). - */ - private[this] def pickLowerBound(newFrom: Option[K]): Option[K] = (from, newFrom) match { - case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) - case (None, _) => newFrom - case _ => from - } - - /** - * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). - */ - private[this] def pickUpperBound(newUntil: Option[K]): Option[K] = (until, newUntil) match { - case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) - case (None, _) => newUntil - case _ => until - } - - /** - * Returns true if the argument is inside the view bounds (between `from` and `until`). - */ - private[this] def isInsideViewBounds(key: K): Boolean = { - val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 - val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 - afterFrom && beforeUntil - } - - override def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = - new TreeMapProjection(pickLowerBound(from), pickUpperBound(until)) - - override def get(key: K) = if (isInsideViewBounds(key)) RB.get(tree, key) else None - - override def iterator = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, from, until) - override def keysIterator: Iterator[K] = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, from, until) - override def valuesIterator: Iterator[V] = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, from, until) - override def keysIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, pickLowerBound(Some(start)), until) - override def iteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, pickLowerBound(Some(start)), until) - override def valuesIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, pickLowerBound(Some(start)), until) - override def size = if (RB.size(tree) == 0) 0 else iterator.length - override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 - override def isEmpty = RB.size(tree) == 0 || !iterator.hasNext - override def contains(key: K) = isInsideViewBounds(key) && RB.contains(tree, key) - - override def head = headOption.get - override def headOption = { - val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree) - (entry, until) match { - case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None - case _ => entry - } - } - - override def last = lastOption.get - override def lastOption = { - val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree) - (entry, from) match { - case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None - case _ => entry - } - } - - // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized - // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See - // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. - override def foreach[U](f: ((K, V)) => U): Unit = iterator.foreach(f) - - override def clone() = super.clone().rangeImpl(from, until) - } - -} - -/** - * $factoryInfo - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -@SerialVersionUID(3L) -object TreeMap extends SortedMapFactory[TreeMap] { - - def from[K : Ordering, V](it: IterableOnce[(K, V)]): TreeMap[K, V] = - Growable.from(empty[K, V], it) - - def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap[K, V]() - - def newBuilder[K: Ordering, V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) - -} diff --git a/stdlib-bootstrapped/src/scala/concurrent/duration/Duration.scala b/stdlib-bootstrapped/src/scala/concurrent/duration/Duration.scala deleted file mode 100644 index 1f84b1e67340..000000000000 --- a/stdlib-bootstrapped/src/scala/concurrent/duration/Duration.scala +++ /dev/null @@ -1,742 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.concurrent.duration - -import java.lang.{ Double => JDouble } -import scala.collection.StringParsers - -object Duration { - - /** - * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if - * - * - the unit is NANOSECONDS - * - and the length has an absolute value greater than `2^53` - * - * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively. - * - * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]] - */ - def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length) - - /** - * Construct a finite duration from the given length and time unit. The unit given is retained - * throughout calculations as long as possible, so that it can be retrieved later. - */ - def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit) - - /** - * Construct a finite duration from the given length and time unit, where the latter is - * looked up in a list of string representation. Valid choices are: - * - * `d, day, h, hr, hour, m, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond` - * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days"). - */ - def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit)) - - // Double stores 52 bits mantissa, but there is an implied '1' in front, making the limit 2^53 - // private[this] final val maxPreciseDouble = 9007199254740992d // not used after https://github.com/scala/scala/pull/9233 - - /** - * Parse String into Duration. Format is `""`, where - * whitespace is allowed before, between and after the parts. Infinities are - * designated by `"Inf"`, `"PlusInf"`, `"+Inf"`, `"Duration.Inf"` and `"-Inf"`, `"MinusInf"` or `"Duration.MinusInf"`. - * Undefined is designated by `"Duration.Undefined"`. - * - * @throws NumberFormatException if format is not parsable - */ - def apply(s: String): Duration = { - val s1: String = s filterNot (_.isWhitespace) - s1 match { - case "Inf" | "PlusInf" | "+Inf" | "Duration.Inf" => Inf - case "MinusInf" | "-Inf" | "Duration.MinusInf" => MinusInf - case "Duration.Undefined" => Undefined - case _ => - val unitName = s1.reverse.takeWhile(_.isLetter).reverse - timeUnit get unitName match { - case Some(unit) => - val valueStr = s1 dropRight unitName.length - StringParsers.parseLong(valueStr).map(Duration(_, unit)) - .getOrElse(Duration(JDouble.parseDouble(valueStr), unit)) - case _ => throw new NumberFormatException("format error " + s) - } - } - } - - // "ms milli millisecond" -> List("ms", "milli", "millis", "millisecond", "milliseconds") - private[this] def words(s: String) = (s.trim split "\\s+").toList - private[this] def expandLabels(labels: String): List[String] = { - val hd :: rest = words(labels): @unchecked - hd :: rest.flatMap(s => List(s, s + "s")) - } - private[this] val timeUnitLabels = List( - DAYS -> "d day", - HOURS -> "h hr hour", - MINUTES -> "m min minute", - SECONDS -> "s sec second", - MILLISECONDS -> "ms milli millisecond", - MICROSECONDS -> "µs micro microsecond", - NANOSECONDS -> "ns nano nanosecond" - ) - - // TimeUnit => standard label - protected[duration] val timeUnitName: Map[TimeUnit, String] = - timeUnitLabels.toMap.view.mapValues(s => words(s).last).toMap - - // Label => TimeUnit - protected[duration] val timeUnit: Map[String, TimeUnit] = - timeUnitLabels.flatMap{ case (unit, names) => expandLabels(names) map (_ -> unit) }.toMap - - /** - * Extract length and time unit out of a string, where the format must match the description for [[Duration$.apply(s:String)* apply(String)]]. - * The extractor will not match for malformed strings or non-finite durations. - */ - def unapply(s: String): Option[(Long, TimeUnit)] = - ( try Some(apply(s)) catch { case _: RuntimeException => None } ) flatMap unapply - - /** - * Extract length and time unit out of a duration, if it is finite. - */ - def unapply(d: Duration): Option[(Long, TimeUnit)] = - if (d.isFinite) Some((d.length, d.unit)) else None - - /** - * Construct a possibly infinite or undefined Duration from the given number of nanoseconds. - * - * - `Double.PositiveInfinity` is mapped to [[Duration.Inf]] - * - `Double.NegativeInfinity` is mapped to [[Duration.MinusInf]] - * - `Double.NaN` is mapped to [[Duration.Undefined]] - * - `-0d` is mapped to [[Duration.Zero]] (exactly like `0d`) - * - * The semantics of the resulting Duration objects matches the semantics of their Double - * counterparts with respect to arithmetic operations. - * - * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]] - */ - def fromNanos(nanos: Double): Duration = { - if (nanos.isInfinite) - if (nanos > 0) Inf else MinusInf - else if (JDouble.isNaN(nanos)) - Undefined - else if (nanos > Long.MaxValue || nanos < Long.MinValue) - throw new IllegalArgumentException("trying to construct too large duration with " + nanos + "ns") - else - fromNanos(nanos.round) - } - - private[this] final val ns_per_µs = 1000L - private[this] final val ns_per_ms = ns_per_µs * 1000 - private[this] final val ns_per_s = ns_per_ms * 1000 - private[this] final val ns_per_min = ns_per_s * 60 - private[this] final val ns_per_h = ns_per_min * 60 - private[this] final val ns_per_d = ns_per_h * 24 - - /** - * Construct a finite duration from the given number of nanoseconds. The - * result will have the coarsest possible time unit which can exactly express - * this duration. - * - * @throws IllegalArgumentException for `Long.MinValue` since that would lead to inconsistent behavior afterwards (cannot be negated) - */ - def fromNanos(nanos: Long): FiniteDuration = { - if (nanos % ns_per_d == 0) Duration(nanos / ns_per_d , DAYS) - else if (nanos % ns_per_h == 0) Duration(nanos / ns_per_h , HOURS) - else if (nanos % ns_per_min == 0) Duration(nanos / ns_per_min, MINUTES) - else if (nanos % ns_per_s == 0) Duration(nanos / ns_per_s , SECONDS) - else if (nanos % ns_per_ms == 0) Duration(nanos / ns_per_ms , MILLISECONDS) - else if (nanos % ns_per_µs == 0) Duration(nanos / ns_per_µs , MICROSECONDS) - else Duration(nanos, NANOSECONDS) - } - - /** - * Preconstructed value of `0.days`. - */ - // unit as coarse as possible to keep (_ + Zero) sane unit-wise - val Zero: FiniteDuration = new FiniteDuration(0, DAYS) - - /** - * The Undefined value corresponds closely to Double.NaN: - * - * - it is the result of otherwise invalid operations - * - it does not equal itself (according to `equals()`) - * - it compares greater than any other Duration apart from itself (for which `compare` returns 0) - * - * The particular comparison semantics mirror those of Double.NaN. - * - * '''''Use [[eq]] when checking an input of a method against this value.''''' - */ - val Undefined: Infinite = new Infinite { - override def toString = "Duration.Undefined" - override def equals(other: Any): Boolean = false - override def +(other: Duration): Duration = this - override def -(other: Duration): Duration = this - override def *(factor: Double): Duration = this - override def /(factor: Double): Duration = this - override def /(other: Duration): Double = Double.NaN - def compare(other: Duration): Int = if (other eq this) 0 else 1 - def unary_- : Duration = this - def toUnit(unit: TimeUnit): Double = Double.NaN - private def readResolve(): AnyRef = Undefined // Instructs deserialization to use this same instance - } - - sealed abstract class Infinite extends Duration { - def +(other: Duration): Duration = other match { - case x if x eq Undefined => Undefined - case x: Infinite if x ne this => Undefined - case _ => this - } - def -(other: Duration): Duration = other match { - case x if x eq Undefined => Undefined - case x: Infinite if x eq this => Undefined - case _ => this - } - - def *(factor: Double): Duration = - if (factor == 0d || JDouble.isNaN(factor)) Undefined - else if (factor < 0d) -this - else this - def /(divisor: Double): Duration = - if (JDouble.isNaN(divisor) || divisor.isInfinite) Undefined - else if ((divisor compare 0d) < 0) -this - else this - def /(divisor: Duration): Double = divisor match { - case _: Infinite => Double.NaN - case x => Double.PositiveInfinity * (if ((this > Zero) ^ (divisor >= Zero)) -1 else 1) - } - - final def isFinite = false - - private[this] def fail(what: String) = throw new IllegalArgumentException(s"$what not allowed on infinite Durations") - final def length: Long = fail("length") - final def unit: TimeUnit = fail("unit") - final def toNanos: Long = fail("toNanos") - final def toMicros: Long = fail("toMicros") - final def toMillis: Long = fail("toMillis") - final def toSeconds: Long = fail("toSeconds") - final def toMinutes: Long = fail("toMinutes") - final def toHours: Long = fail("toHours") - final def toDays: Long = fail("toDays") - - final def toCoarsest: Duration = this - } - - /** - * Infinite duration: greater than any other (apart from Undefined) and not equal to any other - * but itself. This value closely corresponds to Double.PositiveInfinity, - * matching its semantics in arithmetic operations. - */ - val Inf: Infinite = new Infinite { - override def toString: String = "Duration.Inf" - def compare(other: Duration): Int = other match { - case x if x eq Undefined => -1 // Undefined != Undefined - case x if x eq this => 0 // `case Inf` will include null checks in the byte code - case _ => 1 - } - def unary_- : Duration = MinusInf - def toUnit(unit: TimeUnit): Double = Double.PositiveInfinity - private def readResolve(): AnyRef = Inf // Instructs deserialization to use this same instance - } - - /** - * Infinite duration: less than any other and not equal to any other - * but itself. This value closely corresponds to Double.NegativeInfinity, - * matching its semantics in arithmetic operations. - */ - val MinusInf: Infinite = new Infinite { - override def toString: String = "Duration.MinusInf" - def compare(other: Duration): Int = if (other eq this) 0 else -1 - def unary_- : Duration = Inf - def toUnit(unit: TimeUnit): Double = Double.NegativeInfinity - private def readResolve(): AnyRef = MinusInf // Instructs deserialization to use this same instance - } - - // Java Factories - - /** - * Construct a finite duration from the given length and time unit. The unit given is retained - * throughout calculations as long as possible, so that it can be retrieved later. - */ - def create(length: Long, unit: TimeUnit): FiniteDuration = apply(length, unit) - /** - * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if - * - * - the unit is NANOSECONDS - * - and the length has an absolute value greater than `2^53` - * - * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively. - * - * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]] - */ - def create(length: Double, unit: TimeUnit): Duration = apply(length, unit) - /** - * Construct a finite duration from the given length and time unit, where the latter is - * looked up in a list of string representation. Valid choices are: - * - * `d, day, h, hour, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond` - * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days"). - */ - def create(length: Long, unit: String): FiniteDuration = apply(length, unit) - /** - * Parse String into Duration. Format is `""`, where - * whitespace is allowed before, between and after the parts. Infinities are - * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`. - * - * @throws NumberFormatException if format is not parsable - */ - def create(s: String): Duration = apply(s) - - /** - * The natural ordering of durations matches the natural ordering for Double, including non-finite values. - */ - implicit object DurationIsOrdered extends Ordering[Duration] { - def compare(a: Duration, b: Duration): Int = a compare b - } -} - -/** - *

Utility for working with java.util.concurrent.TimeUnit durations.

- * - * '''''This class is not meant as a general purpose representation of time, it is - * optimized for the needs of `scala.concurrent`.''''' - * - *

Basic Usage

- * - *

- * Examples: - * {{{ - * import scala.concurrent.duration._ - * - * val duration = Duration(100, MILLISECONDS) - * val duration = Duration(100, "millis") - * - * duration.toNanos - * duration < 1.second - * duration <= Duration.Inf - * }}} - * - * '''''Invoking inexpressible conversions (like calling `toSeconds` on an infinite duration) will throw an IllegalArgumentException.''''' - * - *

- * Implicits are also provided for Int, Long and Double. Example usage: - * {{{ - * import scala.concurrent.duration._ - * - * val duration = 100.millis - * }}} - * - * '''''The DSL provided by the implicit conversions always allows construction of finite durations, even for infinite Double inputs; use Duration.Inf instead.''''' - * - * Extractors, parsing and arithmetic are also included: - * {{{ - * val d = Duration("1.2 µs") - * val Duration(length, unit) = 5 millis - * val d2 = d * 2.5 - * val d3 = d2 + 1.millisecond - * }}} - * - *

Handling of Time Units

- * - * Calculations performed on finite durations always retain the more precise unit of either operand, no matter - * whether a coarser unit would be able to exactly express the same duration. This means that Duration can be - * used as a lossless container for a (length, unit) pair if it is constructed using the corresponding methods - * and no arithmetic is performed on it; adding/subtracting durations should in that case be done with care. - * - *

Correspondence to Double Semantics

- * - * The semantics of arithmetic operations on Duration are two-fold: - * - * - exact addition/subtraction with nanosecond resolution for finite durations, independent of the summands' magnitude - * - isomorphic to `java.lang.Double` when it comes to infinite or undefined values - * - * The conversion between Duration and Double is done using [[Duration.toUnit]] (with unit NANOSECONDS) - * and [[Duration$.fromNanos(nanos:Double)* Duration.fromNanos(Double)]] - * - *

Ordering

- * - * The default ordering is consistent with the ordering of Double numbers, which means that Undefined is - * considered greater than all other durations, including [[Duration.Inf]]. - * - * @define exc @throws IllegalArgumentException when invoked on a non-finite duration - * - * @define ovf @throws IllegalArgumentException in case of a finite overflow: the range of a finite duration is `+-(2^63-1)`ns, and no conversion to infinite durations takes place. - */ -sealed abstract class Duration extends Serializable with Ordered[Duration] { - /** - * Obtain the length of this Duration measured in the unit obtained by the `unit` method. - * - * $exc - */ - def length: Long - /** - * Obtain the time unit in which the length of this duration is measured. - * - * $exc - */ - def unit: TimeUnit - /** - * Return the length of this duration measured in whole nanoseconds, rounding towards zero. - * - * $exc - */ - def toNanos: Long - /** - * Return the length of this duration measured in whole microseconds, rounding towards zero. - * - * $exc - */ - def toMicros: Long - /** - * Return the length of this duration measured in whole milliseconds, rounding towards zero. - * - * $exc - */ - def toMillis: Long - /** - * Return the length of this duration measured in whole seconds, rounding towards zero. - * - * $exc - */ - def toSeconds: Long - /** - * Return the length of this duration measured in whole minutes, rounding towards zero. - * - * $exc - */ - def toMinutes: Long - /** - * Return the length of this duration measured in whole hours, rounding towards zero. - * - * $exc - */ - def toHours: Long - /** - * Return the length of this duration measured in whole days, rounding towards zero. - * - * $exc - */ - def toDays: Long - /** - * Return the number of nanoseconds as floating point number, scaled down to the given unit. - * The result may not precisely represent this duration due to the Double datatype's inherent - * limitations (mantissa size effectively 53 bits). Non-finite durations are represented as - * - [[Duration.Undefined]] is mapped to Double.NaN - * - [[Duration.Inf]] is mapped to Double.PositiveInfinity - * - [[Duration.MinusInf]] is mapped to Double.NegativeInfinity - */ - def toUnit(unit: TimeUnit): Double - - /** - * Return the sum of that duration and this. When involving non-finite summands the semantics match those - * of Double. - * - * $ovf - */ - def +(other: Duration): Duration - /** - * Return the difference of that duration and this. When involving non-finite summands the semantics match those - * of Double. - * - * $ovf - */ - def -(other: Duration): Duration - /** - * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those - * of Double. - * - * $ovf - */ - def *(factor: Double): Duration - /** - * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those - * of Double. - * - * $ovf - */ - def /(divisor: Double): Duration - /** - * Return the quotient of this and that duration as floating-point number. The semantics are - * determined by Double as if calculating the quotient of the nanosecond lengths of both factors. - */ - def /(divisor: Duration): Double - /** - * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]]. - */ - def unary_- : Duration - /** - * This method returns whether this duration is finite, which is not the same as - * `!isInfinite` for Double because this method also returns `false` for [[Duration.Undefined]]. - */ - def isFinite: Boolean - /** - * Return the smaller of this and that duration as determined by the natural ordering. - */ - def min(other: Duration): Duration = if (this < other) this else other - /** - * Return the larger of this and that duration as determined by the natural ordering. - */ - def max(other: Duration): Duration = if (this > other) this else other - - // Java API - - /** - * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those - * of Double. - * - * $ovf - */ - def div(divisor: Double): Duration = this / divisor - /** - * Return the quotient of this and that duration as floating-point number. The semantics are - * determined by Double as if calculating the quotient of the nanosecond lengths of both factors. - */ - def div(other: Duration): Double = this / other - def gt(other: Duration): Boolean = this > other - def gteq(other: Duration): Boolean = this >= other - def lt(other: Duration): Boolean = this < other - def lteq(other: Duration): Boolean = this <= other - /** - * Return the difference of that duration and this. When involving non-finite summands the semantics match those - * of Double. - * - * $ovf - */ - def minus(other: Duration): Duration = this - other - /** - * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those - * of Double. - * - * $ovf - */ - def mul(factor: Double): Duration = this * factor - /** - * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]]. - */ - def neg(): Duration = -this - /** - * Return the sum of that duration and this. When involving non-finite summands the semantics match those - * of Double. - * - * $ovf - */ - def plus(other: Duration): Duration = this + other - /** - * Return duration which is equal to this duration but with a coarsest Unit, or self in case it is already the coarsest Unit - *

- * Examples: - * {{{ - * Duration(60, MINUTES).toCoarsest // Duration(1, HOURS) - * Duration(1000, MILLISECONDS).toCoarsest // Duration(1, SECONDS) - * Duration(48, HOURS).toCoarsest // Duration(2, DAYS) - * Duration(5, SECONDS).toCoarsest // Duration(5, SECONDS) - * }}} - */ - def toCoarsest: Duration -} - -object FiniteDuration { - - implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] { - def compare(a: FiniteDuration, b: FiniteDuration): Int = a compare b - } - - def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit) - def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit)) - - // limit on abs. value of durations in their units - private final val max_ns = Long.MaxValue - private final val max_µs = max_ns / 1000 - private final val max_ms = max_µs / 1000 - private final val max_s = max_ms / 1000 - private final val max_min= max_s / 60 - private final val max_h = max_min / 60 - private final val max_d = max_h / 24 -} - -/** - * This class represents a finite duration. Its addition and subtraction operators are overloaded to retain - * this guarantee statically. The range of this class is limited to `+-(2^63-1)`ns, which is roughly 292 years. - */ -final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration { - import FiniteDuration._ - import Duration._ - - private[this] def bounded(max: Long) = -max <= length && length <= max - - require(unit match { - /* - * enforce the 2^63-1 ns limit, must be pos/neg symmetrical because of unary_- - */ - case NANOSECONDS => bounded(max_ns) - case MICROSECONDS => bounded(max_µs) - case MILLISECONDS => bounded(max_ms) - case SECONDS => bounded(max_s) - case MINUTES => bounded(max_min) - case HOURS => bounded(max_h) - case DAYS => bounded(max_d) - case _ => - val v = DAYS.convert(length, unit) - -max_d <= v && v <= max_d - }, "Duration is limited to +-(2^63-1)ns (ca. 292 years)") - - def toNanos: Long = unit.toNanos(length) - def toMicros: Long = unit.toMicros(length) - def toMillis: Long = unit.toMillis(length) - def toSeconds: Long = unit.toSeconds(length) - def toMinutes: Long = unit.toMinutes(length) - def toHours: Long = unit.toHours(length) - def toDays: Long = unit.toDays(length) - def toUnit(u: TimeUnit): Double = toNanos.toDouble / NANOSECONDS.convert(1, u) - - /** - * Construct a [[Deadline]] from this duration by adding it to the current instant `Deadline.now`. - */ - def fromNow: Deadline = Deadline.now + this - - private[this] def unitString = timeUnitName(unit) + ( if (length == 1) "" else "s" ) - override def toString: String = "" + length + " " + unitString - - def compare(other: Duration): Int = other match { - case x: FiniteDuration => toNanos compare x.toNanos - case _ => -(other compare this) - } - - // see https://www.securecoding.cert.org/confluence/display/java/NUM00-J.+Detect+or+prevent+integer+overflow - private[this] def safeAdd(a: Long, b: Long): Long = { - if ((b > 0) && (a > Long.MaxValue - b) || - (b < 0) && (a < Long.MinValue - b)) throw new IllegalArgumentException("integer overflow") - a + b - } - private[this] def add(otherLength: Long, otherUnit: TimeUnit): FiniteDuration = { - val commonUnit = if (otherUnit.convert(1, unit) == 0) unit else otherUnit - val totalLength = safeAdd(commonUnit.convert(length, unit), commonUnit.convert(otherLength, otherUnit)) - new FiniteDuration(totalLength, commonUnit) - } - - def +(other: Duration): Duration = other match { - case x: FiniteDuration => add(x.length, x.unit) - case _ => other - } - def -(other: Duration): Duration = other match { - case x: FiniteDuration => add(-x.length, x.unit) - case _ => -other - } - - def *(factor: Double): Duration = - if (!factor.isInfinite) fromNanos(toNanos * factor) - else if (JDouble.isNaN(factor)) Undefined - else if ((factor > 0) ^ (this < Zero)) Inf - else MinusInf - - def /(divisor: Double): Duration = - if (!divisor.isInfinite) fromNanos(toNanos / divisor) - else if (JDouble.isNaN(divisor)) Undefined - else Zero - - // if this is made a constant, then scalac will elide the conditional and always return +0.0, scala/bug#6331 - private[this] def minusZero = -0d - def /(divisor: Duration): Double = - if (divisor.isFinite) toNanos.toDouble / divisor.toNanos - else if (divisor eq Undefined) Double.NaN - else if ((length < 0) ^ (divisor > Zero)) 0d - else minusZero - - // overloaded methods taking FiniteDurations, so that you can calculate while statically staying finite - def +(other: FiniteDuration): FiniteDuration = add(other.length, other.unit) - def -(other: FiniteDuration): FiniteDuration = add(-other.length, other.unit) - def plus(other: FiniteDuration): FiniteDuration = this + other - def minus(other: FiniteDuration): FiniteDuration = this - other - def min(other: FiniteDuration): FiniteDuration = if (this < other) this else other - def max(other: FiniteDuration): FiniteDuration = if (this > other) this else other - - // overloaded methods taking Long so that you can calculate while statically staying finite - - /** - * Return the quotient of this duration and the given integer factor. - * - * @throws java.lang.ArithmeticException if the factor is 0 - */ - def /(divisor: Long): FiniteDuration = fromNanos(toNanos / divisor) - - /** - * Return the product of this duration and the given integer factor. - * - * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration - */ - def *(factor: Long): FiniteDuration = new FiniteDuration(safeMul(length, factor), unit) - - /* - * This method avoids the use of Long division, which saves 95% of the time spent, - * by checking that there are enough leading zeros so that the result has a chance - * to fit into a Long again; the remaining edge cases are caught by using the sign - * of the product for overflow detection. - * - * This method is not general purpose because it disallows the (otherwise legal) - * case of Long.MinValue * 1, but that is okay for use in FiniteDuration, since - * Long.MinValue is not a legal `length` anyway. - */ - private def safeMul(_a: Long, _b: Long): Long = { - val a = scala.math.abs(_a) - val b = scala.math.abs(_b) - import java.lang.Long.{ numberOfLeadingZeros => leading } - if (leading(a) + leading(b) < 64) throw new IllegalArgumentException("multiplication overflow") - val product = a * b - if (product < 0) throw new IllegalArgumentException("multiplication overflow") - if (a == _a ^ b == _b) -product else product - } - - /** - * Return the quotient of this duration and the given integer factor. - * - * @throws java.lang.ArithmeticException if the factor is 0 - */ - def div(divisor: Long): FiniteDuration = this / divisor - - /** - * Return the product of this duration and the given integer factor. - * - * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration - */ - def mul(factor: Long): FiniteDuration = this * factor - - def unary_- : FiniteDuration = Duration(-length, unit) - - final def isFinite = true - - final override def toCoarsest: FiniteDuration = { - def loop(length: Long, unit: TimeUnit): FiniteDuration = { - def coarserOrThis(coarser: TimeUnit, divider: Int): FiniteDuration = - if (length % divider == 0) loop(length / divider, coarser) - else if (unit == this.unit) this - else FiniteDuration(length, unit) - - unit match { - case DAYS => FiniteDuration(length, unit) - case HOURS => coarserOrThis(DAYS, 24) - case MINUTES => coarserOrThis(HOURS, 60) - case SECONDS => coarserOrThis(MINUTES, 60) - case MILLISECONDS => coarserOrThis(SECONDS, 1000) - case MICROSECONDS => coarserOrThis(MILLISECONDS, 1000) - case NANOSECONDS => coarserOrThis(MICROSECONDS, 1000) - } - } - - if (unit == DAYS || length == 0) this - else loop(length, unit) - } - - override def equals(other: Any): Boolean = other match { - case x: FiniteDuration => toNanos == x.toNanos - case _ => super.equals(other) - } - override def hashCode: Int = toNanos.toInt -} diff --git a/stdlib-bootstrapped/src/scala/math/BigDecimal.scala b/stdlib-bootstrapped/src/scala/math/BigDecimal.scala deleted file mode 100644 index ff54ea15abf8..000000000000 --- a/stdlib-bootstrapped/src/scala/math/BigDecimal.scala +++ /dev/null @@ -1,721 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package math - -import scala.language.implicitConversions - -import java.math.{ - BigDecimal => BigDec, - MathContext, - RoundingMode => JRM, -} -import scala.collection.immutable.NumericRange - -object BigDecimal { - private final val maximumHashScale = 4934 // Quit maintaining hash identity with BigInt beyond this scale - private final val hashCodeNotComputed = 0x5D50690F // Magic value (happens to be "BigDecimal" old MurmurHash3 value) - private final val deci2binary = 3.3219280948873626 // Ratio of log(10) to log(2) - private[this] val minCached = -512 - private[this] val maxCached = 512 - val defaultMathContext = MathContext.DECIMAL128 - - /** Cache only for defaultMathContext using BigDecimals in a small range. */ - private[this] lazy val cache = new Array[BigDecimal](maxCached - minCached + 1) - - object RoundingMode extends Enumeration { - // Annoying boilerplate to ensure consistency with java.math.RoundingMode - type RoundingMode = Value - val UP = Value(JRM.UP.ordinal) - val DOWN = Value(JRM.DOWN.ordinal) - val CEILING = Value(JRM.CEILING.ordinal) - val FLOOR = Value(JRM.FLOOR.ordinal) - val HALF_UP = Value(JRM.HALF_UP.ordinal) - val HALF_DOWN = Value(JRM.HALF_DOWN.ordinal) - val HALF_EVEN = Value(JRM.HALF_EVEN.ordinal) - val UNNECESSARY = Value(JRM.UNNECESSARY.ordinal) - } - - /** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */ - def decimal(d: Double, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(java.lang.Double.toString(d), mc), mc) - - /** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */ - def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext) - - /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`, rounding if necessary. - * Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and - * `0.1 != 0.1f`. - */ - def decimal(f: Float, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(java.lang.Float.toString(f), mc), mc) - - /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`. - * Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and - * `0.1 != 0.1f`. - */ - def decimal(f: Float): BigDecimal = decimal(f, defaultMathContext) - - // This exists solely to avoid conversion from Int/Long to Float, screwing everything up. - /** Constructs a `BigDecimal` from a `Long`, rounding if necessary. This is identical to `BigDecimal(l, mc)`. */ - def decimal(l: Long, mc: MathContext): BigDecimal = apply(l, mc) - - // This exists solely to avoid conversion from Int/Long to Float, screwing everything up. - /** Constructs a `BigDecimal` from a `Long`. This is identical to `BigDecimal(l)`. */ - def decimal(l: Long): BigDecimal = apply(l) - - /** Constructs a `BigDecimal` using a `java.math.BigDecimal`, rounding if necessary. */ - def decimal(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd.round(mc), mc) - - /** Constructs a `BigDecimal` by expanding the binary fraction - * contained by `Double` value `d` into a decimal representation, - * rounding if necessary. When a `Float` is converted to a - * `Double`, the binary fraction is preserved, so this method - * also works for converted `Float`s. - */ - def binary(d: Double, mc: MathContext): BigDecimal = new BigDecimal(new BigDec(d, mc), mc) - - /** Constructs a `BigDecimal` by expanding the binary fraction - * contained by `Double` value `d` into a decimal representation. - * Note: this also works correctly on converted `Float`s. - */ - def binary(d: Double): BigDecimal = binary(d, defaultMathContext) - - /** Constructs a `BigDecimal` from a `java.math.BigDecimal`. The - * precision is the default for `BigDecimal` or enough to represent - * the `java.math.BigDecimal` exactly, whichever is greater. - */ - def exact(repr: BigDec): BigDecimal = { - val mc = - if (repr.precision <= defaultMathContext.getPrecision) defaultMathContext - else new MathContext(repr.precision, java.math.RoundingMode.HALF_EVEN) - new BigDecimal(repr, mc) - } - - /** Constructs a `BigDecimal` by fully expanding the binary fraction - * contained by `Double` value `d`, adjusting the precision as - * necessary. Note: this works correctly on converted `Float`s also. - */ - def exact(d: Double): BigDecimal = exact(new BigDec(d)) - - /** Constructs a `BigDecimal` that exactly represents a `BigInt`. - */ - def exact(bi: BigInt): BigDecimal = exact(new BigDec(bi.bigInteger)) - - /** Constructs a `BigDecimal` that exactly represents a `Long`. Note that - * all creation methods for `BigDecimal` that do not take a `MathContext` - * represent a `Long`; this is equivalent to `apply`, `valueOf`, etc.. - */ - def exact(l: Long): BigDecimal = apply(l) - - /** Constructs a `BigDecimal` that exactly represents the number - * specified in a `String`. - */ - def exact(s: String): BigDecimal = exact(new BigDec(s)) - - /** Constructs a `BigDecimal` that exactly represents the number - * specified in base 10 in a character array. - */ - def exact(cs: Array[Char]): BigDecimal = exact(new BigDec(cs)) - - - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor. Equivalent to `BigDecimal.decimal`. - * - * @param d the specified double value - * @return the constructed `BigDecimal` - */ - def valueOf(d: Double): BigDecimal = apply(BigDec valueOf d) - - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor. - * - * @param x the specified `Long` value - * @return the constructed `BigDecimal` - */ - def valueOf(x: Long): BigDecimal = apply(x) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified `Integer` value. - * - * @param i the specified integer value - * @return the constructed `BigDecimal` - */ - def apply(i: Int): BigDecimal = apply(i, defaultMathContext) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified `Integer` value, rounding if necessary. - * - * @param i the specified integer value - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(i: Int, mc: MathContext): BigDecimal = - if (mc == defaultMathContext && minCached <= i && i <= maxCached) { - val offset = i - minCached - var n = cache(offset) - if (n eq null) { n = new BigDecimal(BigDec.valueOf(i.toLong), mc); cache(offset) = n } - n - } - else apply(i.toLong, mc) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified long value. - * - * @param l the specified long value - * @return the constructed `BigDecimal` - */ - def apply(l: Long): BigDecimal = - if (minCached <= l && l <= maxCached) apply(l.toInt) - else new BigDecimal(BigDec.valueOf(l), defaultMathContext) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified long value, but rounded if necessary. - * - * @param l the specified long value - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(l: Long, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(l, mc), mc) - - /** Constructs a `BigDecimal` whose unscaled value is equal to that - * of the specified long value. - * - * @param unscaledVal the value - * @param scale the scale - * @return the constructed `BigDecimal` - */ - def apply(unscaledVal: Long, scale: Int): BigDecimal = - apply(BigInt(unscaledVal), scale) - - /** Constructs a `BigDecimal` whose unscaled value is equal to that - * of the specified long value, but rounded if necessary. - * - * @param unscaledVal the value - * @param scale the scale - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(unscaledVal: Long, scale: Int, mc: MathContext): BigDecimal = - apply(BigInt(unscaledVal), scale, mc) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified double value. Equivalent to `BigDecimal.decimal`. - * - * @param d the specified `Double` value - * @return the constructed `BigDecimal` - */ - def apply(d: Double): BigDecimal = decimal(d, defaultMathContext) - - // note we don't use the static valueOf because it doesn't let us supply - // a MathContext, but we should be duplicating its logic, modulo caching. - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified double value, but rounded if necessary. Equivalent to - * `BigDecimal.decimal`. - * - * @param d the specified `Double` value - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(d: Double, mc: MathContext): BigDecimal = decimal(d, mc) - - /** Translates a character array representation of a `BigDecimal` - * into a `BigDecimal`. - */ - def apply(x: Array[Char]): BigDecimal = exact(x) - - /** Translates a character array representation of a `BigDecimal` - * into a `BigDecimal`, rounding if necessary. - */ - def apply(x: Array[Char], mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(x, mc), mc) - - /** Translates the decimal String representation of a `BigDecimal` - * into a `BigDecimal`. - */ - def apply(x: String): BigDecimal = exact(x) - - /** Translates the decimal String representation of a `BigDecimal` - * into a `BigDecimal`, rounding if necessary. - */ - def apply(x: String, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(x, mc), mc) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified `BigInt` value. - * - * @param x the specified `BigInt` value - * @return the constructed `BigDecimal` - */ - def apply(x: BigInt): BigDecimal = exact(x) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified `BigInt` value, rounding if necessary. - * - * @param x the specified `BigInt` value - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(x: BigInt, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(x.bigInteger, mc), mc) - - /** Constructs a `BigDecimal` whose unscaled value is equal to that - * of the specified `BigInt` value. - * - * @param unscaledVal the specified `BigInt` value - * @param scale the scale - * @return the constructed `BigDecimal` - */ - def apply(unscaledVal: BigInt, scale: Int): BigDecimal = - exact(new BigDec(unscaledVal.bigInteger, scale)) - - /** Constructs a `BigDecimal` whose unscaled value is equal to that - * of the specified `BigInt` value. - * - * @param unscaledVal the specified `BigInt` value - * @param scale the scale - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(unscaledVal: BigInt, scale: Int, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(unscaledVal.bigInteger, scale, mc), mc) - - /** Constructs a `BigDecimal` from a `java.math.BigDecimal`. */ - def apply(bd: BigDec): BigDecimal = new BigDecimal(bd, defaultMathContext) - - /** Implicit conversion from `Int` to `BigDecimal`. */ - implicit def int2bigDecimal(i: Int): BigDecimal = apply(i) - - /** Implicit conversion from `Long` to `BigDecimal`. */ - implicit def long2bigDecimal(l: Long): BigDecimal = apply(l) - - /** Implicit conversion from `Double` to `BigDecimal`. */ - implicit def double2bigDecimal(d: Double): BigDecimal = decimal(d) - - /** Implicit conversion from `java.math.BigDecimal` to `scala.BigDecimal`. */ - implicit def javaBigDecimal2bigDecimal(x: BigDec): BigDecimal = if (x == null) null else apply(x) -} - -/** - * `BigDecimal` represents decimal floating-point numbers of arbitrary precision. - * By default, the precision approximately matches that of IEEE 128-bit floating - * point numbers (34 decimal digits, `HALF_EVEN` rounding mode). Within the range - * of IEEE binary128 numbers, `BigDecimal` will agree with `BigInt` for both - * equality and hash codes (and will agree with primitive types as well). Beyond - * that range--numbers with more than 4934 digits when written out in full--the - * `hashCode` of `BigInt` and `BigDecimal` is allowed to diverge due to difficulty - * in efficiently computing both the decimal representation in `BigDecimal` and the - * binary representation in `BigInt`. - * - * When creating a `BigDecimal` from a `Double` or `Float`, care must be taken as - * the binary fraction representation of `Double` and `Float` does not easily - * convert into a decimal representation. Three explicit schemes are available - * for conversion. `BigDecimal.decimal` will convert the floating-point number - * to a decimal text representation, and build a `BigDecimal` based on that. - * `BigDecimal.binary` will expand the binary fraction to the requested or default - * precision. `BigDecimal.exact` will expand the binary fraction to the - * full number of digits, thus producing the exact decimal value corresponding to - * the binary fraction of that floating-point number. `BigDecimal` equality - * matches the decimal expansion of `Double`: `BigDecimal.decimal(0.1) == 0.1`. - * Note that since `0.1f != 0.1`, the same is not true for `Float`. Instead, - * `0.1f == BigDecimal.decimal((0.1f).toDouble)`. - * - * To test whether a `BigDecimal` number can be converted to a `Double` or - * `Float` and then back without loss of information by using one of these - * methods, test with `isDecimalDouble`, `isBinaryDouble`, or `isExactDouble` - * or the corresponding `Float` versions. Note that `BigInt`'s `isValidDouble` - * will agree with `isExactDouble`, not the `isDecimalDouble` used by default. - * - * `BigDecimal` uses the decimal representation of binary floating-point numbers - * to determine equality and hash codes. This yields different answers than - * conversion between `Long` and `Double` values, where the exact form is used. - * As always, since floating-point is a lossy representation, it is advisable to - * take care when assuming identity will be maintained across multiple conversions. - * - * `BigDecimal` maintains a `MathContext` that determines the rounding that - * is applied to certain calculations. In most cases, the value of the - * `BigDecimal` is also rounded to the precision specified by the `MathContext`. - * To create a `BigDecimal` with a different precision than its `MathContext`, - * use `new BigDecimal(new java.math.BigDecimal(...), mc)`. Rounding will - * be applied on those mathematical operations that can dramatically change the - * number of digits in a full representation, namely multiplication, division, - * and powers. The left-hand argument's `MathContext` always determines the - * degree of rounding, if any, and is the one propagated through arithmetic - * operations that do not apply rounding themselves. - */ -final class BigDecimal(val bigDecimal: BigDec, val mc: MathContext) -extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[BigDecimal] { - def this(bigDecimal: BigDec) = this(bigDecimal, BigDecimal.defaultMathContext) - import BigDecimal.RoundingMode._ - import BigDecimal.{decimal, binary, exact} - - if (bigDecimal eq null) throw new IllegalArgumentException("null value for BigDecimal") - if (mc eq null) throw new IllegalArgumentException("null MathContext for BigDecimal") - - // There was an implicit to cut down on the wrapper noise for BigDec -> BigDecimal. - // However, this may mask introduction of surprising behavior (e.g. lack of rounding - // where one might expect it). Wrappers should be applied explicitly with an - // eye to correctness. - - // Sane hash code computation (which is surprisingly hard). - // Note--not lazy val because we can't afford the extra space. - private final var computedHashCode: Int = BigDecimal.hashCodeNotComputed - private final def computeHashCode(): Unit = { - computedHashCode = - if (isWhole && (precision - scale) < BigDecimal.maximumHashScale) toBigInt.hashCode - else if (isDecimalDouble) doubleValue.## - else { - val temp = bigDecimal.stripTrailingZeros - scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale ) - } - } - - /** Returns the hash code for this BigDecimal. - * Note that this does not merely use the underlying java object's - * `hashCode` because we compare `BigDecimal`s with `compareTo` - * which deems 2 == 2.00, whereas in java these are unequal - * with unequal `hashCode`s. These hash codes agree with `BigInt` - * for whole numbers up ~4934 digits (the range of IEEE 128 bit floating - * point). Beyond this, hash codes will disagree; this prevents the - * explicit representation of the `BigInt` form for `BigDecimal` values - * with large exponents. - */ - override def hashCode(): Int = { - if (computedHashCode == BigDecimal.hashCodeNotComputed) computeHashCode() - computedHashCode - } - - /** Compares this BigDecimal with the specified value for equality. Where `Float` and `Double` - * disagree, `BigDecimal` will agree with the `Double` value - */ - override def equals (that: Any): Boolean = that match { - case that: BigDecimal => this equals that - case that: BigInt => - that.bitLength > (precision-scale-2)*BigDecimal.deci2binary && - this.toBigIntExact.exists(that equals _) - case that: Double => - !that.isInfinity && { - val d = toDouble - !d.isInfinity && d == that && equals(decimal(d)) - } - case that: Float => - !that.isInfinity && { - val f = toFloat - !f.isInfinity && f == that && equals(decimal(f.toDouble)) - } - case _ => isValidLong && unifiedPrimitiveEquals(that) - } - override def isValidByte = noArithmeticException(toByteExact) - override def isValidShort = noArithmeticException(toShortExact) - override def isValidChar = isValidInt && toIntExact >= Char.MinValue && toIntExact <= Char.MaxValue - override def isValidInt = noArithmeticException(toIntExact) - def isValidLong = noArithmeticException(toLongExact) - - /** Tests whether this `BigDecimal` holds the decimal representation of a `Double`. */ - def isDecimalDouble = { - val d = toDouble - !d.isInfinity && equals(decimal(d)) - } - - /** Tests whether this `BigDecimal` holds the decimal representation of a `Float`. */ - def isDecimalFloat = { - val f = toFloat - !f.isInfinity && equals(decimal(f)) - } - - /** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Double`. */ - def isBinaryDouble = { - val d = toDouble - !d.isInfinity && equals(binary(d,mc)) - } - - /** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Float`. */ - def isBinaryFloat = { - val f = toFloat - !f.isInfinity && equals(binary(f,mc)) - } - - /** Tests whether this `BigDecimal` holds the exact expansion of a `Double`'s binary fractional form into base 10. */ - def isExactDouble = { - val d = toDouble - !d.isInfinity && equals(exact(d)) - } - - /** Tests whether this `BigDecimal` holds the exact expansion of a `Float`'s binary fractional form into base 10. */ - def isExactFloat = { - val f = toFloat - !f.isInfinity && equals(exact(f.toDouble)) - } - - - private def noArithmeticException(body: => Unit): Boolean = { - try { body ; true } - catch { case _: ArithmeticException => false } - } - - def isWhole = scale <= 0 || bigDecimal.stripTrailingZeros.scale <= 0 - - def underlying: java.math.BigDecimal = bigDecimal - - - /** Compares this BigDecimal with the specified BigDecimal for equality. - */ - def equals (that: BigDecimal): Boolean = compare(that) == 0 - - /** Compares this BigDecimal with the specified BigDecimal - */ - def compare (that: BigDecimal): Int = this.bigDecimal compareTo that.bigDecimal - - /** Addition of BigDecimals - */ - def + (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.add(that.bigDecimal, mc), mc) - - /** Subtraction of BigDecimals - */ - def - (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.subtract(that.bigDecimal, mc), mc) - - /** Multiplication of BigDecimals - */ - def * (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.multiply(that.bigDecimal, mc), mc) - - /** Division of BigDecimals - */ - def / (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.divide(that.bigDecimal, mc), mc) - - /** Division and Remainder - returns tuple containing the result of - * divideToIntegralValue and the remainder. The computation is exact: no rounding is applied. - */ - def /% (that: BigDecimal): (BigDecimal, BigDecimal) = { - val qr = this.bigDecimal.divideAndRemainder(that.bigDecimal, mc) - (new BigDecimal(qr(0), mc), new BigDecimal(qr(1), mc)) - } - - /** Divide to Integral value. - */ - def quot (that: BigDecimal): BigDecimal = - new BigDecimal(this.bigDecimal.divideToIntegralValue(that.bigDecimal, mc), mc) - - /** Returns the minimum of this and that, or this if the two are equal - */ - def min (that: BigDecimal): BigDecimal = (this compare that) match { - case x if x <= 0 => this - case _ => that - } - - /** Returns the maximum of this and that, or this if the two are equal - */ - def max (that: BigDecimal): BigDecimal = (this compare that) match { - case x if x >= 0 => this - case _ => that - } - - /** Remainder after dividing this by that. - */ - def remainder (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.remainder(that.bigDecimal, mc), mc) - - /** Remainder after dividing this by that. - */ - def % (that: BigDecimal): BigDecimal = this.remainder(that) - - /** Returns a BigDecimal whose value is this ** n. - */ - def pow (n: Int): BigDecimal = new BigDecimal(this.bigDecimal.pow(n, mc), mc) - - /** Returns a BigDecimal whose value is the negation of this BigDecimal - */ - def unary_- : BigDecimal = new BigDecimal(this.bigDecimal.negate(mc), mc) - - /** Returns the absolute value of this BigDecimal - */ - def abs: BigDecimal = if (signum < 0) unary_- else this - - /** Returns the sign of this BigDecimal; - * -1 if it is less than 0, - * +1 if it is greater than 0, - * 0 if it is equal to 0. - */ - def signum: Int = this.bigDecimal.signum() - - /** Returns the sign of this BigDecimal; - * -1 if it is less than 0, - * +1 if it is greater than 0, - * 0 if it is equal to 0. - */ - def sign: BigDecimal = signum - - /** Returns the precision of this `BigDecimal`. - */ - def precision: Int = this.bigDecimal.precision - - /** Returns a BigDecimal rounded according to the supplied MathContext settings, but - * preserving its own MathContext for future operations. - */ - def round(mc: MathContext): BigDecimal = { - val r = this.bigDecimal round mc - if (r eq bigDecimal) this else new BigDecimal(r, this.mc) - } - - /** Returns a `BigDecimal` rounded according to its own `MathContext` */ - def rounded: BigDecimal = { - val r = bigDecimal round mc - if (r eq bigDecimal) this else new BigDecimal(r, mc) - } - - /** Returns the scale of this `BigDecimal`. - */ - def scale: Int = this.bigDecimal.scale - - /** Returns the size of an ulp, a unit in the last place, of this BigDecimal. - */ - def ulp: BigDecimal = new BigDecimal(this.bigDecimal.ulp, mc) - - /** Returns a new BigDecimal based on the supplied MathContext, rounded as needed. - */ - def apply(mc: MathContext): BigDecimal = new BigDecimal(this.bigDecimal round mc, mc) - - /** Returns a `BigDecimal` whose scale is the specified value, and whose value is - * numerically equal to this BigDecimal's. - */ - def setScale(scale: Int): BigDecimal = - if (this.scale == scale) this - else new BigDecimal(this.bigDecimal.setScale(scale), mc) - - def setScale(scale: Int, mode: RoundingMode): BigDecimal = - if (this.scale == scale) this - else new BigDecimal(this.bigDecimal.setScale(scale, JRM.valueOf(mode.id)), mc) - - /** Converts this BigDecimal to a Byte. - * If the BigDecimal is too big to fit in a Byte, only the low-order 8 bits are returned. - * Note that this conversion can lose information about the overall magnitude of the - * BigDecimal value as well as return a result with the opposite sign. - */ - override def byteValue = intValue.toByte - - /** Converts this BigDecimal to a Short. - * If the BigDecimal is too big to fit in a Short, only the low-order 16 bits are returned. - * Note that this conversion can lose information about the overall magnitude of the - * BigDecimal value as well as return a result with the opposite sign. - */ - override def shortValue = intValue.toShort - - /** Converts this BigDecimal to a Char. - * If the BigDecimal is too big to fit in a Char, only the low-order 16 bits are returned. - * Note that this conversion can lose information about the overall magnitude of the - * BigDecimal value and that it always returns a positive result. - */ - def charValue = intValue.toChar - - /** Converts this BigDecimal to an Int. - * If the BigDecimal is too big to fit in an Int, only the low-order 32 bits - * are returned. Note that this conversion can lose information about the - * overall magnitude of the BigDecimal value as well as return a result with - * the opposite sign. - */ - def intValue = this.bigDecimal.intValue - - /** Converts this BigDecimal to a Long. - * If the BigDecimal is too big to fit in a Long, only the low-order 64 bits - * are returned. Note that this conversion can lose information about the - * overall magnitude of the BigDecimal value as well as return a result with - * the opposite sign. - */ - def longValue = this.bigDecimal.longValue - - /** Converts this BigDecimal to a Float. - * if this BigDecimal has too great a magnitude to represent as a float, - * it will be converted to `Float.NEGATIVE_INFINITY` or - * `Float.POSITIVE_INFINITY` as appropriate. - */ - def floatValue = this.bigDecimal.floatValue - - /** Converts this BigDecimal to a Double. - * if this BigDecimal has too great a magnitude to represent as a double, - * it will be converted to `Double.NEGATIVE_INFINITY` or - * `Double.POSITIVE_INFINITY` as appropriate. - */ - def doubleValue = this.bigDecimal.doubleValue - - /** Converts this `BigDecimal` to a [[scala.Byte]], checking for lost information. - * If this `BigDecimal` has a nonzero fractional part, or is out of the possible - * range for a [[scala.Byte]] result, then a `java.lang.ArithmeticException` is - * thrown. - */ - def toByteExact = bigDecimal.byteValueExact - - /** Converts this `BigDecimal` to a [[scala.Short]], checking for lost information. - * If this `BigDecimal` has a nonzero fractional part, or is out of the possible - * range for a [[scala.Short]] result, then a `java.lang.ArithmeticException` is - * thrown. - */ - def toShortExact = bigDecimal.shortValueExact - - /** Converts this `BigDecimal` to a [[scala.Int]], checking for lost information. - * If this `BigDecimal` has a nonzero fractional part, or is out of the possible - * range for an [[scala.Int]] result, then a `java.lang.ArithmeticException` is - * thrown. - */ - def toIntExact = bigDecimal.intValueExact - - /** Converts this `BigDecimal` to a [[scala.Long]], checking for lost information. - * If this `BigDecimal` has a nonzero fractional part, or is out of the possible - * range for a [[scala.Long]] result, then a `java.lang.ArithmeticException` is - * thrown. - */ - def toLongExact = bigDecimal.longValueExact - - /** Creates a partially constructed NumericRange[BigDecimal] in range - * `[start;end)`, where start is the target BigDecimal. The step - * must be supplied via the "by" method of the returned object in order - * to receive the fully constructed range. For example: - * {{{ - * val partial = BigDecimal(1.0) to 2.0 // not usable yet - * val range = partial by 0.01 // now a NumericRange - * val range2 = BigDecimal(0) to 1.0 by 0.01 // all at once of course is fine too - * }}} - * - * @param end the end value of the range (exclusive) - * @return the partially constructed NumericRange - */ - def until(end: BigDecimal): Range.Partial[BigDecimal, NumericRange.Exclusive[BigDecimal]] = - new Range.Partial(until(end, _)) - - /** Same as the one-argument `until`, but creates the range immediately. */ - def until(end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = Range.BigDecimal(this, end, step) - - /** Like `until`, but inclusive of the end value. */ - def to(end: BigDecimal): Range.Partial[BigDecimal, NumericRange.Inclusive[BigDecimal]] = - new Range.Partial(to(end, _)) - - /** Like `until`, but inclusive of the end value. */ - def to(end: BigDecimal, step: BigDecimal) = Range.BigDecimal.inclusive(this, end, step) - - /** Converts this `BigDecimal` to a scala.BigInt. - */ - def toBigInt: BigInt = new BigInt(this.bigDecimal.toBigInteger) - - /** Converts this `BigDecimal` to a scala.BigInt if it - * can be done losslessly, returning Some(BigInt) or None. - */ - def toBigIntExact: Option[BigInt] = - if (isWhole) { - try Some(new BigInt(this.bigDecimal.toBigIntegerExact)) - catch { case _: ArithmeticException => None } - } - else None - - /** Returns the decimal String representation of this BigDecimal. - */ - override def toString: String = this.bigDecimal.toString - -} diff --git a/stdlib-bootstrapped/src/scala/math/Ordering.scala b/stdlib-bootstrapped/src/scala/math/Ordering.scala deleted file mode 100644 index 8ff5a72d6c27..000000000000 --- a/stdlib-bootstrapped/src/scala/math/Ordering.scala +++ /dev/null @@ -1,927 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package math - -import java.util.Comparator - -import scala.language.implicitConversions -import scala.annotation.migration - -/** Ordering is a trait whose instances each represent a strategy for sorting - * instances of a type. - * - * Ordering's companion object defines many implicit objects to deal with - * subtypes of [[AnyVal]] (e.g. `Int`, `Double`), `String`, and others. - * - * To sort instances by one or more member variables, you can take advantage - * of these built-in orderings using [[Ordering.by]] and [[Ordering.on]]: - * - * {{{ - * import scala.util.Sorting - * val pairs = Array(("a", 5, 2), ("c", 3, 1), ("b", 1, 3)) - * - * // sort by 2nd element - * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2)) - * - * // sort by the 3rd element, then 1st - * Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1))) - * }}} - * - * An `Ordering[T]` is implemented by specifying the [[compare]] method, - * `compare(a: T, b: T): Int`, which decides how to order two instances - * `a` and `b`. Instances of `Ordering[T]` can be used by things like - * `scala.util.Sorting` to sort collections like `Array[T]`. - * - * For example: - * - * {{{ - * import scala.util.Sorting - * - * case class Person(name:String, age:Int) - * val people = Array(Person("bob", 30), Person("ann", 32), Person("carl", 19)) - * - * // sort by age - * object AgeOrdering extends Ordering[Person] { - * def compare(a:Person, b:Person) = a.age.compare(b.age) - * } - * Sorting.quickSort(people)(AgeOrdering) - * }}} - * - * This trait and [[scala.math.Ordered]] both provide this same functionality, but - * in different ways. A type `T` can be given a single way to order itself by - * extending `Ordered`. Using `Ordering`, this same type may be sorted in many - * other ways. `Ordered` and `Ordering` both provide implicits allowing them to be - * used interchangeably. - * - * You can `import scala.math.Ordering.Implicits._` to gain access to other - * implicit orderings. - * - * @see [[scala.math.Ordered]], [[scala.util.Sorting]], [[scala.math.Ordering.Implicits]] - */ -@annotation.implicitNotFound(msg = "No implicit Ordering defined for ${T}.") -trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializable { - outer => - - /** Returns whether a comparison between `x` and `y` is defined, and if so - * the result of `compare(x, y)`. - */ - def tryCompare(x: T, y: T): Some[Int] = Some(compare(x, y)) - - /** Returns an integer whose sign communicates how x compares to y. - * - * The result sign has the following meaning: - * - * - negative if x < y - * - positive if x > y - * - zero otherwise (if x == y) - */ - def compare(x: T, y: T): Int - - /** Return true if `x` <= `y` in the ordering. */ - override def lteq(x: T, y: T): Boolean = compare(x, y) <= 0 - - /** Return true if `x` >= `y` in the ordering. */ - override def gteq(x: T, y: T): Boolean = compare(x, y) >= 0 - - /** Return true if `x` < `y` in the ordering. */ - override def lt(x: T, y: T): Boolean = compare(x, y) < 0 - - /** Return true if `x` > `y` in the ordering. */ - override def gt(x: T, y: T): Boolean = compare(x, y) > 0 - - /** Return true if `x` == `y` in the ordering. */ - override def equiv(x: T, y: T): Boolean = compare(x, y) == 0 - - /** Return `x` if `x` >= `y`, otherwise `y`. */ - def max[U <: T](x: U, y: U): U = if (gteq(x, y)) x else y - - /** Return `x` if `x` <= `y`, otherwise `y`. */ - def min[U <: T](x: U, y: U): U = if (lteq(x, y)) x else y - - /** Return the opposite ordering of this one. - * - * Implementations overriding this method MUST override [[isReverseOf]] - * as well if they change the behavior at all (for example, caching does - * not require overriding it). - */ - override def reverse: Ordering[T] = new Ordering.Reverse[T](this) - - /** Returns whether or not the other ordering is the opposite - * ordering of this one. - * - * Equivalent to `other == this.reverse`. - * - * Implementations should only override this method if they are overriding - * [[reverse]] as well. - */ - def isReverseOf(other: Ordering[_]): Boolean = other match { - case that: Ordering.Reverse[_] => that.outer == this - case _ => false - } - - /** Given f, a function from U into T, creates an Ordering[U] whose compare - * function is equivalent to: - * - * {{{ - * def compare(x:U, y:U) = Ordering[T].compare(f(x), f(y)) - * }}} - */ - def on[U](f: U => T): Ordering[U] = new Ordering[U] { - def compare(x: U, y: U) = outer.compare(f(x), f(y)) - } - - /** Creates an Ordering[T] whose compare function returns the - * result of this Ordering's compare function, if it is non-zero, - * or else the result of `other`s compare function. - * - * @example - * {{{ - * case class Pair(a: Int, b: Int) - * - * val pairOrdering = Ordering.by[Pair, Int](_.a) - * .orElse(Ordering.by[Pair, Int](_.b)) - * }}} - * - * @param other an Ordering to use if this Ordering returns zero - */ - def orElse(other: Ordering[T]): Ordering[T] = (x, y) => { - val res1 = outer.compare(x, y) - if (res1 != 0) res1 else other.compare(x, y) - } - - /** Given f, a function from T into S, creates an Ordering[T] whose compare - * function returns the result of this Ordering's compare function, - * if it is non-zero, or else a result equivalent to: - * - * {{{ - * Ordering[S].compare(f(x), f(y)) - * }}} - * - * This function is equivalent to passing the result of `Ordering.by(f)` - * to `orElse`. - * - * @example - * {{{ - * case class Pair(a: Int, b: Int) - * - * val pairOrdering = Ordering.by[Pair, Int](_.a) - * .orElseBy[Int](_.b) - * }}} - */ - def orElseBy[S](f: T => S)(implicit ord: Ordering[S]): Ordering[T] = (x, y) => { - val res1 = outer.compare(x, y) - if (res1 != 0) res1 else ord.compare(f(x), f(y)) - } - - /** This inner class defines comparison operators available for `T`. - * - * It can't extend `AnyVal` because it is not a top-level class - * or a member of a statically accessible object. - */ - class OrderingOps(lhs: T) { - def <(rhs: T): Boolean = lt(lhs, rhs) - def <=(rhs: T): Boolean = lteq(lhs, rhs) - def >(rhs: T): Boolean = gt(lhs, rhs) - def >=(rhs: T): Boolean = gteq(lhs, rhs) - def equiv(rhs: T): Boolean = Ordering.this.equiv(lhs, rhs) - def max(rhs: T): T = Ordering.this.max(lhs, rhs) - def min(rhs: T): T = Ordering.this.min(lhs, rhs) - } - - /** This implicit method augments `T` with the comparison operators defined - * in `scala.math.Ordering.Ops`. - */ - implicit def mkOrderingOps(lhs: T): OrderingOps = new OrderingOps(lhs) -} - -trait LowPriorityOrderingImplicits { - - type AsComparable[A] = A => Comparable[_ >: A] - - /** This would conflict with all the nice implicit Orderings - * available, but thanks to the magic of prioritized implicits - * via subclassing we can make `Ordered[A] => Ordering[A]` only - * turn up if nothing else works. Since `Ordered[A]` extends - * `Comparable[A]` anyway, we can throw in some Java interop too. - */ - implicit def ordered[A](implicit asComparable: AsComparable[A]): Ordering[A] = new Ordering[A] { - def compare(x: A, y: A): Int = asComparable(x).compareTo(y) - } - - implicit def comparatorToOrdering[A](implicit cmp: Comparator[A]): Ordering[A] = new Ordering[A] { - def compare(x: A, y: A) = cmp.compare(x, y) - } -} - -/** This is the companion object for the [[scala.math.Ordering]] trait. - * - * It contains many implicit orderings as well as well as methods to construct - * new orderings. - */ -object Ordering extends LowPriorityOrderingImplicits { - private final val reverseSeed = 41 - private final val optionSeed = 43 - private final val iterableSeed = 47 - - @inline def apply[T](implicit ord: Ordering[T]) = ord - - /** An ordering which caches the value of its reverse. */ - sealed trait CachedReverse[T] extends Ordering[T] { - private[this] val _reverse = super.reverse - override final def reverse: Ordering[T] = _reverse - override final def isReverseOf(other: Ordering[_]): Boolean = other eq _reverse - } - - /** A reverse ordering */ - private final class Reverse[T](private[Ordering] val outer: Ordering[T]) extends Ordering[T] { - override def reverse: Ordering[T] = outer - override def isReverseOf(other: Ordering[_]): Boolean = other == outer - - def compare(x: T, y: T): Int = outer.compare(y, x) - override def lteq(x: T, y: T): Boolean = outer.lteq(y, x) - override def gteq(x: T, y: T): Boolean = outer.gteq(y, x) - override def lt(x: T, y: T): Boolean = outer.lt(y, x) - override def gt(x: T, y: T): Boolean = outer.gt(y, x) - override def equiv(x: T, y: T): Boolean = outer.equiv(y, x) - override def max[U <: T](x: U, y: U): U = outer.min(x, y) - override def min[U <: T](x: U, y: U): U = outer.max(x, y) - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: Reverse[_] => this.outer == that.outer - case _ => false - } - override def hashCode(): Int = outer.hashCode() * reverseSeed - } - - @SerialVersionUID(-2996748994664583574L) - private final class IterableOrdering[CC[X] <: Iterable[X], T](private val ord: Ordering[T]) extends Ordering[CC[T]] { - def compare(x: CC[T], y: CC[T]): Int = { - val xe = x.iterator - val ye = y.iterator - - while (xe.hasNext && ye.hasNext) { - val res = ord.compare(xe.next(), ye.next()) - if (res != 0) return res - } - - Boolean.compare(xe.hasNext, ye.hasNext) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: IterableOrdering[_, _] => this.ord == that.ord - case _ => false - } - override def hashCode(): Int = ord.hashCode() * iterableSeed - } - - trait ExtraImplicits { - /** Not in the standard scope due to the potential for divergence: - * For instance `implicitly[Ordering[Any]]` diverges in its presence. - */ - implicit def seqOrdering[CC[X] <: scala.collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] = - new IterableOrdering[CC, T](ord) - - implicit def sortedSetOrdering[CC[X] <: scala.collection.SortedSet[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] = - new IterableOrdering[CC, T](ord) - - /** This implicit creates a conversion from any value for which an - * implicit `Ordering` exists to the class which creates infix operations. - * With it imported, you can write methods as follows: - * - * {{{ - * def lessThan[T: Ordering](x: T, y: T) = x < y - * }}} - */ - implicit def infixOrderingOps[T](x: T)(implicit ord: Ordering[T]): Ordering[T]#OrderingOps = new ord.OrderingOps(x) - } - - /** An object containing implicits which are not in the default scope. */ - object Implicits extends ExtraImplicits { } - - /** Construct an Ordering[T] given a function `lt`. */ - def fromLessThan[T](cmp: (T, T) => Boolean): Ordering[T] = new Ordering[T] { - def compare(x: T, y: T) = if (cmp(x, y)) -1 else if (cmp(y, x)) 1 else 0 - // overrides to avoid multiple comparisons - override def lt(x: T, y: T): Boolean = cmp(x, y) - override def gt(x: T, y: T): Boolean = cmp(y, x) - override def gteq(x: T, y: T): Boolean = !cmp(x, y) - override def lteq(x: T, y: T): Boolean = !cmp(y, x) - } - - /** Given f, a function from T into S, creates an Ordering[T] whose compare - * function is equivalent to: - * - * {{{ - * def compare(x:T, y:T) = Ordering[S].compare(f(x), f(y)) - * }}} - * - * This function is an analogue to Ordering.on where the Ordering[S] - * parameter is passed implicitly. - */ - def by[T, S](f: T => S)(implicit ord: Ordering[S]): Ordering[T] = new Ordering[T] { - def compare(x: T, y: T) = ord.compare(f(x), f(y)) - override def lt(x: T, y: T): Boolean = ord.lt(f(x), f(y)) - override def gt(x: T, y: T): Boolean = ord.gt(f(x), f(y)) - override def gteq(x: T, y: T): Boolean = ord.gteq(f(x), f(y)) - override def lteq(x: T, y: T): Boolean = ord.lteq(f(x), f(y)) - } - - trait UnitOrdering extends Ordering[Unit] { - def compare(x: Unit, y: Unit) = 0 - } - @SerialVersionUID(4089257611611206746L) - implicit object Unit extends UnitOrdering - - trait BooleanOrdering extends Ordering[Boolean] { - def compare(x: Boolean, y: Boolean): Int = java.lang.Boolean.compare(x, y) - } - @SerialVersionUID(-94703182178890445L) - implicit object Boolean extends BooleanOrdering - - trait ByteOrdering extends Ordering[Byte] { - def compare(x: Byte, y: Byte): Int = java.lang.Byte.compare(x, y) - } - @SerialVersionUID(-2268545360148786406L) - implicit object Byte extends ByteOrdering - - trait CharOrdering extends Ordering[Char] { - def compare(x: Char, y: Char): Int = java.lang.Character.compare(x, y) - } - @SerialVersionUID(2588141633104296698L) - implicit object Char extends CharOrdering - - trait ShortOrdering extends Ordering[Short] { - def compare(x: Short, y: Short): Int = java.lang.Short.compare(x, y) - } - @SerialVersionUID(4919657051864630912L) - implicit object Short extends ShortOrdering - - trait IntOrdering extends Ordering[Int] { - def compare(x: Int, y: Int): Int = java.lang.Integer.compare(x, y) - } - @SerialVersionUID(-8412871093094815037L) - implicit object Int extends IntOrdering with CachedReverse[Int] - - trait LongOrdering extends Ordering[Long] { - def compare(x: Long, y: Long): Int = java.lang.Long.compare(x, y) - } - @SerialVersionUID(-5231423581640563981L) - implicit object Long extends LongOrdering - - /** `Ordering`s for `Float`s. - * - * The behavior of the comparison operations provided by the default (implicit) - * ordering on `Float` changed in 2.10.0 and 2.13.0. - * Prior to Scala 2.10.0, the `Ordering` instance used semantics - * consistent with `java.lang.Float.compare`. - * - * Scala 2.10.0 changed the implementation of `lt`, `equiv`, `min`, etc., to be - * IEEE 754 compliant, while keeping the `compare` method NOT compliant, - * creating an internally inconsistent instance. IEEE 754 specifies that - * `0.0F == -0.0F`. In addition, it requires all comparisons with `Float.NaN` return - * `false` thus `0.0F < Float.NaN`, `0.0F > Float.NaN`, and - * `Float.NaN == Float.NaN` all yield `false`, analogous `None` in `flatMap`. - * - * Recognizing the limitation of the IEEE 754 semantics in terms of ordering, - * Scala 2.13.0 created two instances: `Ordering.Float.IeeeOrdering`, which retains - * the IEEE 754 semantics from Scala 2.12.x, and `Ordering.Float.TotalOrdering`, - * which brings back the `java.lang.Float.compare` semantics for all operations. - * The default extends `TotalOrdering`. - * - * {{{ - * List(0.0F, 1.0F, 0.0F / 0.0F, -1.0F / 0.0F).sorted // List(-Infinity, 0.0, 1.0, NaN) - * List(0.0F, 1.0F, 0.0F / 0.0F, -1.0F / 0.0F).min // -Infinity - * implicitly[Ordering[Float]].lt(0.0F, 0.0F / 0.0F) // true - * { - * import Ordering.Float.IeeeOrdering - * List(0.0F, 1.0F, 0.0F / 0.0F, -1.0F / 0.0F).sorted // List(-Infinity, 0.0, 1.0, NaN) - * List(0.0F, 1.0F, 0.0F / 0.0F, -1.0F / 0.0F).min // NaN - * implicitly[Ordering[Float]].lt(0.0F, 0.0F / 0.0F) // false - * } - * }}} - * - * @define floatOrdering Because the behavior of `Float`s specified by IEEE is - * not consistent with a total ordering when dealing with - * `NaN`, there are two orderings defined for `Float`: - * `TotalOrdering`, which is consistent with a total - * ordering, and `IeeeOrdering`, which is consistent - * as much as possible with IEEE spec and floating point - * operations defined in [[scala.math]]. - */ - object Float { - /** An ordering for `Float`s which is a fully consistent total ordering, - * and treats `NaN` as larger than all other `Float` values; it behaves - * the same as [[java.lang.Float.compare]]. - * - * $floatOrdering - * - * This ordering may be preferable for sorting collections. - * - * @see [[IeeeOrdering]] - */ - trait TotalOrdering extends Ordering[Float] { - def compare(x: Float, y: Float) = java.lang.Float.compare(x, y) - } - @SerialVersionUID(2951539161283192433L) - implicit object TotalOrdering extends TotalOrdering - - /** An ordering for `Float`s which is consistent with IEEE specifications - * whenever possible. - * - * - `lt`, `lteq`, `equiv`, `gteq` and `gt` are consistent with primitive - * comparison operations for `Float`s, and return `false` when called with - * `NaN`. - * - `min` and `max` are consistent with `math.min` and `math.max`, and - * return `NaN` when called with `NaN` as either argument. - * - `compare` behaves the same as [[java.lang.Float.compare]]. - * - * $floatOrdering - * - * This ordering may be preferable for numeric contexts. - * - * @see [[TotalOrdering]] - */ - trait IeeeOrdering extends Ordering[Float] { - def compare(x: Float, y: Float) = java.lang.Float.compare(x, y) - - override def lteq(x: Float, y: Float): Boolean = x <= y - override def gteq(x: Float, y: Float): Boolean = x >= y - override def lt(x: Float, y: Float): Boolean = x < y - override def gt(x: Float, y: Float): Boolean = x > y - override def equiv(x: Float, y: Float): Boolean = x == y - override def max[U <: Float](x: U, y: U): U = math.max(x, y).asInstanceOf[U] - override def min[U <: Float](x: U, y: U): U = math.min(x, y).asInstanceOf[U] - } - @SerialVersionUID(2142189527751553605L) - implicit object IeeeOrdering extends IeeeOrdering - } - @migration( - " The default implicit ordering for floats now maintains consistency\n" + - " between its `compare` method and its `lt`, `min`, `equiv`, etc., methods,\n" + - " which means nonconforming to IEEE 754's behavior for -0.0F and NaN.\n" + - " The sort order of floats remains the same, however, with NaN at the end.\n" + - " Import Ordering.Float.IeeeOrdering to recover the previous behavior.\n" + - " See also https://www.scala-lang.org/api/current/scala/math/Ordering$$Float$.html.", "2.13.0") - @SerialVersionUID(-8500693657289762132L) - implicit object DeprecatedFloatOrdering extends Float.TotalOrdering - - /** `Ordering`s for `Double`s. - * - * The behavior of the comparison operations provided by the default (implicit) - * ordering on `Double` changed in 2.10.0 and 2.13.0. - * Prior to Scala 2.10.0, the `Ordering` instance used semantics - * consistent with `java.lang.Double.compare`. - * - * Scala 2.10.0 changed the implementation of `lt`, `equiv`, `min`, etc., to be - * IEEE 754 compliant, while keeping the `compare` method NOT compliant, - * creating an internally inconsistent instance. IEEE 754 specifies that - * `0.0 == -0.0`. In addition, it requires all comparisons with `Double.NaN` return - * `false` thus `0.0 < Double.NaN`, `0.0 > Double.NaN`, and - * `Double.NaN == Double.NaN` all yield `false`, analogous `None` in `flatMap`. - * - * Recognizing the limitation of the IEEE 754 semantics in terms of ordering, - * Scala 2.13.0 created two instances: `Ordering.Double.IeeeOrdering`, which retains - * the IEEE 754 semantics from Scala 2.12.x, and `Ordering.Double.TotalOrdering`, - * which brings back the `java.lang.Double.compare` semantics for all operations. - * The default extends `TotalOrdering`. - * - * {{{ - * List(0.0, 1.0, 0.0 / 0.0, -1.0 / 0.0).sorted // List(-Infinity, 0.0, 1.0, NaN) - * List(0.0, 1.0, 0.0 / 0.0, -1.0 / 0.0).min // -Infinity - * implicitly[Ordering[Double]].lt(0.0, 0.0 / 0.0) // true - * { - * import Ordering.Double.IeeeOrdering - * List(0.0, 1.0, 0.0 / 0.0, -1.0 / 0.0).sorted // List(-Infinity, 0.0, 1.0, NaN) - * List(0.0, 1.0, 0.0 / 0.0, -1.0 / 0.0).min // NaN - * implicitly[Ordering[Double]].lt(0.0, 0.0 / 0.0) // false - * } - * }}} - * - * @define doubleOrdering Because the behavior of `Double`s specified by IEEE is - * not consistent with a total ordering when dealing with - * `NaN`, there are two orderings defined for `Double`: - * `TotalOrdering`, which is consistent with a total - * ordering, and `IeeeOrdering`, which is consistent - * as much as possible with IEEE spec and floating point - * operations defined in [[scala.math]]. - */ - object Double { - /** An ordering for `Double`s which is a fully consistent total ordering, - * and treats `NaN` as larger than all other `Double` values; it behaves - * the same as [[java.lang.Double.compare]]. - * - * $doubleOrdering - * - * This ordering may be preferable for sorting collections. - * - * @see [[IeeeOrdering]] - */ - trait TotalOrdering extends Ordering[Double] { - def compare(x: Double, y: Double) = java.lang.Double.compare(x, y) - } - @SerialVersionUID(-831119229746134011L) - implicit object TotalOrdering extends TotalOrdering - - /** An ordering for `Double`s which is consistent with IEEE specifications - * whenever possible. - * - * - `lt`, `lteq`, `equiv`, `gteq` and `gt` are consistent with primitive - * comparison operations for `Double`s, and return `false` when called with - * `NaN`. - * - `min` and `max` are consistent with `math.min` and `math.max`, and - * return `NaN` when called with `NaN` as either argument. - * - `compare` behaves the same as [[java.lang.Double.compare]]. - * - * $doubleOrdering - * - * This ordering may be preferable for numeric contexts. - * - * @see [[TotalOrdering]] - */ - trait IeeeOrdering extends Ordering[Double] { - def compare(x: Double, y: Double) = java.lang.Double.compare(x, y) - - override def lteq(x: Double, y: Double): Boolean = x <= y - override def gteq(x: Double, y: Double): Boolean = x >= y - override def lt(x: Double, y: Double): Boolean = x < y - override def gt(x: Double, y: Double): Boolean = x > y - override def equiv(x: Double, y: Double): Boolean = x == y - override def max[U <: Double](x: U, y: U): U = math.max(x, y).asInstanceOf[U] - override def min[U <: Double](x: U, y: U): U = math.min(x, y).asInstanceOf[U] - } - @SerialVersionUID(5722631152457877238L) - implicit object IeeeOrdering extends IeeeOrdering - } - @migration( - " The default implicit ordering for doubles now maintains consistency\n" + - " between its `compare` method and its `lt`, `min`, `equiv`, etc., methods,\n" + - " which means nonconforming to IEEE 754's behavior for -0.0 and NaN.\n" + - " The sort order of doubles remains the same, however, with NaN at the end.\n" + - " Import Ordering.Double.IeeeOrdering to recover the previous behavior.\n" + - " See also https://www.scala-lang.org/api/current/scala/math/Ordering$$Double$.html.", "2.13.0") - @SerialVersionUID(-7340686892557971538L) - implicit object DeprecatedDoubleOrdering extends Double.TotalOrdering - - trait BigIntOrdering extends Ordering[BigInt] { - def compare(x: BigInt, y: BigInt) = x.compare(y) - } - @SerialVersionUID(-3075297647817530785L) - implicit object BigInt extends BigIntOrdering - - trait BigDecimalOrdering extends Ordering[BigDecimal] { - def compare(x: BigDecimal, y: BigDecimal) = x.compare(y) - } - @SerialVersionUID(-833457937756812905L) - implicit object BigDecimal extends BigDecimalOrdering - - trait StringOrdering extends Ordering[String] { - def compare(x: String, y: String) = x.compareTo(y) - } - @SerialVersionUID(1302240016074071079L) - implicit object String extends StringOrdering - - trait SymbolOrdering extends Ordering[Symbol] { - def compare(x: Symbol, y: Symbol): Int = x.name.compareTo(y.name) - } - @SerialVersionUID(1996702162912307637L) - implicit object Symbol extends SymbolOrdering - - trait OptionOrdering[T] extends Ordering[Option[T]] { - def optionOrdering: Ordering[T] - def compare(x: Option[T], y: Option[T]) = (x, y) match { - case (None, None) => 0 - case (None, _) => -1 - case (_, None) => 1 - case (Some(x), Some(y)) => optionOrdering.compare(x, y) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: OptionOrdering[_] => this.optionOrdering == that.optionOrdering - case _ => false - } - override def hashCode(): Int = optionOrdering.hashCode() * optionSeed - } - implicit def Option[T](implicit ord: Ordering[T]): Ordering[Option[T]] = { - @SerialVersionUID(6958068162830323876L) - class O extends OptionOrdering[T] { val optionOrdering = ord } - new O() - } - - /** @deprecated Iterables are not guaranteed to have a consistent order, so the `Ordering` - * returned by this method may not be stable or meaningful. If you are using a type - * with a consistent order (such as `Seq`), use its `Ordering` (found in the - * [[Implicits]] object) instead. - */ - @deprecated("Iterables are not guaranteed to have a consistent order; if using a type with a " + - "consistent order (e.g. Seq), use its Ordering (found in the Ordering.Implicits object)", since = "2.13.0") - implicit def Iterable[T](implicit ord: Ordering[T]): Ordering[Iterable[T]] = - new IterableOrdering[Iterable, T](ord) - - implicit def Tuple2[T1, T2](implicit ord1: Ordering[T1], ord2: Ordering[T2]): Ordering[(T1, T2)] = - new Tuple2Ordering(ord1, ord2) - - @SerialVersionUID(4945084135299531202L) - private[this] final class Tuple2Ordering[T1, T2](private val ord1: Ordering[T1], - private val ord2: Ordering[T2]) extends Ordering[(T1, T2)] { - def compare(x: (T1, T2), y: (T1, T2)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - ord2.compare(x._2, y._2) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: Tuple2Ordering[_, _] => - this.ord1 == that.ord1 && - this.ord2 == that.ord2 - case _ => false - } - override def hashCode(): Int = (ord1, ord2).hashCode() - } - - implicit def Tuple3[T1, T2, T3](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3]) : Ordering[(T1, T2, T3)] = - new Tuple3Ordering(ord1, ord2, ord3) - - @SerialVersionUID(-5367223704121832335L) - private[this] final class Tuple3Ordering[T1, T2, T3](private val ord1: Ordering[T1], - private val ord2: Ordering[T2], - private val ord3: Ordering[T3]) extends Ordering[(T1, T2, T3)] { - def compare(x: (T1, T2, T3), y: (T1, T2, T3)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - ord3.compare(x._3, y._3) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: Tuple3Ordering[_, _, _] => - this.ord1 == that.ord1 && - this.ord2 == that.ord2 && - this.ord3 == that.ord3 - case _ => false - } - override def hashCode(): Int = (ord1, ord2, ord3).hashCode() - } - - implicit def Tuple4[T1, T2, T3, T4](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4]) : Ordering[(T1, T2, T3, T4)] = - new Tuple4Ordering(ord1, ord2, ord3, ord4) - - @SerialVersionUID(-6055313861145218178L) - private[this] final class Tuple4Ordering[T1, T2, T3, T4](private val ord1: Ordering[T1], - private val ord2: Ordering[T2], - private val ord3: Ordering[T3], - private val ord4: Ordering[T4]) - extends Ordering[(T1, T2, T3, T4)] { - def compare(x: (T1, T2, T3, T4), y: (T1, T2, T3, T4)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - ord4.compare(x._4, y._4) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: Tuple4Ordering[_, _, _, _] => - this.ord1 == that.ord1 && - this.ord2 == that.ord2 && - this.ord3 == that.ord3 && - this.ord4 == that.ord4 - case _ => false - } - override def hashCode(): Int = (ord1, ord2, ord3, ord4).hashCode() - } - - implicit def Tuple5[T1, T2, T3, T4, T5](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5]): Ordering[(T1, T2, T3, T4, T5)] = - new Tuple5Ordering(ord1, ord2, ord3, ord4, ord5) - - @SerialVersionUID(-5517329921227646061L) - private[this] final class Tuple5Ordering[T1, T2, T3, T4, T5](private val ord1: Ordering[T1], - private val ord2: Ordering[T2], - private val ord3: Ordering[T3], - private val ord4: Ordering[T4], - private val ord5: Ordering[T5]) - extends Ordering[(T1, T2, T3, T4, T5)] { - def compare(x: (T1, T2, T3, T4, T5), y: (T1, T2, T3, T4, T5)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - ord5.compare(x._5, y._5) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: Tuple5Ordering[_, _, _, _, _] => - this.ord1 == that.ord1 && - this.ord2 == that.ord2 && - this.ord3 == that.ord3 && - this.ord4 == that.ord4 && - this.ord5 == that.ord5 - case _ => false - } - override def hashCode(): Int = (ord1, ord2, ord3, ord4, ord5).hashCode() - } - - @SerialVersionUID(3045467524192969060L) - implicit def Tuple6[T1, T2, T3, T4, T5, T6](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6]): Ordering[(T1, T2, T3, T4, T5, T6)] = - new Tuple6Ordering(ord1, ord2, ord3, ord4, ord5, ord6) - - private[this] final class Tuple6Ordering[T1, T2, T3, T4, T5, T6](private val ord1: Ordering[T1], - private val ord2: Ordering[T2], - private val ord3: Ordering[T3], - private val ord4: Ordering[T4], - private val ord5: Ordering[T5], - private val ord6: Ordering[T6]) - extends Ordering[(T1, T2, T3, T4, T5, T6)] { - def compare(x: (T1, T2, T3, T4, T5, T6), y: (T1, T2, T3, T4, T5, T6)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - ord6.compare(x._6, y._6) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: Tuple6Ordering[_, _, _, _, _, _] => - this.ord1 == that.ord1 && - this.ord2 == that.ord2 && - this.ord3 == that.ord3 && - this.ord4 == that.ord4 && - this.ord5 == that.ord5 && - this.ord6 == that.ord6 - case _ => false - } - override def hashCode(): Int = (ord1, ord2, ord3, ord4, ord5, ord6).hashCode() - } - - implicit def Tuple7[T1, T2, T3, T4, T5, T6, T7](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6], ord7: Ordering[T7]): Ordering[(T1, T2, T3, T4, T5, T6, T7)] = - new Tuple7Ordering(ord1, ord2, ord3, ord4, ord5, ord6, ord7) - - @SerialVersionUID(1253188205893682451L) - private[this] final class Tuple7Ordering[T1, T2, T3, T4, T5, T6, T7](private val ord1: Ordering[T1], - private val ord2: Ordering[T2], - private val ord3: Ordering[T3], - private val ord4: Ordering[T4], - private val ord5: Ordering[T5], - private val ord6: Ordering[T6], - private val ord7: Ordering[T7]) - extends Ordering[(T1, T2, T3, T4, T5, T6, T7)] { - def compare(x: (T1, T2, T3, T4, T5, T6, T7), y: (T1, T2, T3, T4, T5, T6, T7)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - val compare6 = ord6.compare(x._6, y._6) - if (compare6 != 0) return compare6 - ord7.compare(x._7, y._7) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: Tuple7Ordering[_, _, _, _, _, _, _] => - this.ord1 == that.ord1 && - this.ord2 == that.ord2 && - this.ord3 == that.ord3 && - this.ord4 == that.ord4 && - this.ord5 == that.ord5 && - this.ord6 == that.ord6 && - this.ord7 == that.ord7 - case _ => false - } - override def hashCode(): Int = (ord1, ord2, ord3, ord4, ord5, ord6, ord7).hashCode() - } - - @SerialVersionUID(4003095353309354068L) - implicit def Tuple8[T1, T2, T3, T4, T5, T6, T7, T8](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6], ord7: Ordering[T7], ord8: Ordering[T8]): Ordering[(T1, T2, T3, T4, T5, T6, T7, T8)] = - new Tuple8Ordering(ord1, ord2, ord3, ord4, ord5, ord6, ord7, ord8) - - private[this] final class Tuple8Ordering[T1, T2, T3, T4, T5, T6, T7, T8](private val ord1: Ordering[T1], - private val ord2: Ordering[T2], - private val ord3: Ordering[T3], - private val ord4: Ordering[T4], - private val ord5: Ordering[T5], - private val ord6: Ordering[T6], - private val ord7: Ordering[T7], - private val ord8: Ordering[T8]) - extends Ordering[(T1, T2, T3, T4, T5, T6, T7, T8)] { - def compare(x: (T1, T2, T3, T4, T5, T6, T7, T8), y: (T1, T2, T3, T4, T5, T6, T7, T8)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - val compare6 = ord6.compare(x._6, y._6) - if (compare6 != 0) return compare6 - val compare7 = ord7.compare(x._7, y._7) - if (compare7 != 0) return compare7 - ord8.compare(x._8, y._8) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: Tuple8Ordering[_, _, _, _, _, _, _, _] => - this.ord1 == that.ord1 && - this.ord2 == that.ord2 && - this.ord3 == that.ord3 && - this.ord4 == that.ord4 && - this.ord5 == that.ord5 && - this.ord6 == that.ord6 && - this.ord7 == that.ord7 && - this.ord8 == that.ord8 - case _ => false - } - override def hashCode(): Int = (ord1, ord2, ord3, ord4, ord5, ord6, ord7, ord8).hashCode() - } - - @SerialVersionUID(8185342054829975001L) - implicit def Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6], ord7: Ordering[T7], ord8 : Ordering[T8], ord9: Ordering[T9]): Ordering[(T1, T2, T3, T4, T5, T6, T7, T8, T9)] = - new Tuple9Ordering(ord1, ord2, ord3, ord4, ord5, ord6, ord7, ord8, ord9) - - private[this] final class Tuple9Ordering[T1, T2, T3, T4, T5, T6, T7, T8, T9](private val ord1: Ordering[T1], - private val ord2: Ordering[T2], - private val ord3: Ordering[T3], - private val ord4: Ordering[T4], - private val ord5: Ordering[T5], - private val ord6: Ordering[T6], - private val ord7: Ordering[T7], - private val ord8: Ordering[T8], - private val ord9: Ordering[T9]) - extends Ordering[(T1, T2, T3, T4, T5, T6, T7, T8, T9)] { - def compare(x: (T1, T2, T3, T4, T5, T6, T7, T8, T9), y: (T1, T2, T3, T4, T5, T6, T7, T8, T9)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - val compare6 = ord6.compare(x._6, y._6) - if (compare6 != 0) return compare6 - val compare7 = ord7.compare(x._7, y._7) - if (compare7 != 0) return compare7 - val compare8 = ord8.compare(x._8, y._8) - if (compare8 != 0) return compare8 - ord9.compare(x._9, y._9) - } - - override def equals(obj: scala.Any): Boolean = obj match { - case that: AnyRef if this eq that => true - case that: Tuple9Ordering[_, _, _, _, _, _, _, _, _] => - this.ord1 == that.ord1 && - this.ord2 == that.ord2 && - this.ord3 == that.ord3 && - this.ord4 == that.ord4 && - this.ord5 == that.ord5 && - this.ord6 == that.ord6 && - this.ord7 == that.ord7 && - this.ord8 == that.ord8 && - this.ord9 == that.ord9 - case _ => false - } - override def hashCode(): Int = (ord1, ord2, ord3, ord4, ord5, ord6, ord7, ord8, ord9).hashCode() - } -} diff --git a/stdlib-bootstrapped/src/scala/sys/process/ProcessBuilderImpl.scala b/stdlib-bootstrapped/src/scala/sys/process/ProcessBuilderImpl.scala deleted file mode 100644 index 159681f13896..000000000000 --- a/stdlib-bootstrapped/src/scala/sys/process/ProcessBuilderImpl.scala +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package sys -package process - -import processInternal._ -import Process._ -import BasicIO.{LazilyListed, Streamed, Uncloseable} -import Uncloseable.protect - -import java.io.{FileInputStream, FileOutputStream} -import java.util.concurrent.LinkedBlockingQueue - -import scala.util.control.NonFatal - -private[process] trait ProcessBuilderImpl { - self: ProcessBuilder.type => - - private[process] class DaemonBuilder(underlying: ProcessBuilder) extends AbstractBuilder { - final def run(io: ProcessIO): Process = underlying.run(io.daemonized()) - } - - private[process] class Dummy(override val toString: String, exitValue: => Int) extends AbstractBuilder { - override def run(io: ProcessIO): Process = new DummyProcess(exitValue) - override def canPipeTo = true - } - - private[process] class URLInput(url: URL) extends IStreamBuilder(url.openStream(), url.toString) - private[process] class FileInput(file: File) extends IStreamBuilder(new FileInputStream(file), file.getAbsolutePath) - private[process] class FileOutput(file: File, append: Boolean) extends OStreamBuilder(new FileOutputStream(file, append), file.getAbsolutePath) - - private[process] class OStreamBuilder( - stream: => OutputStream, - label: String - ) extends ThreadBuilder(label, _ writeInput protect(stream)) { - override def hasExitValue = false - } - - private[process] class IStreamBuilder( - stream: => InputStream, - label: String - ) extends ThreadBuilder(label, _ processOutput protect(stream)) { - override def hasExitValue = false - } - - private[process] abstract class ThreadBuilder( - override val toString: String, - runImpl: ProcessIO => Unit - ) extends AbstractBuilder { - - override def run(io: ProcessIO): Process = { - val success = new LinkedBlockingQueue[Boolean](1) - def go(): Unit = { - var ok = false - try { - runImpl(io) - ok = true - } finally success.put(ok) - } - val t = Spawn("ThreadProcess", io.daemonizeThreads)(go()) - new ThreadProcess(t, success) - } - } - - /** Represents a simple command without any redirection or combination. */ - private[process] class Simple(p: JProcessBuilder) extends AbstractBuilder { - override def run(io: ProcessIO): Process = { - import java.lang.ProcessBuilder.Redirect.{INHERIT => Inherit} - import io.{daemonizeThreads, processError, processOutput, writeInput} - - val inherit = writeInput eq BasicIO.connectToStdIn - if (inherit) p.redirectInput(Inherit) - - val process = p.start() // start the external process - - // spawn threads that process the input, output, and error streams using the functions defined in `io` - val inThread = - if (inherit || (writeInput eq BasicIO.connectNoOp)) null - else Spawn("Simple-input", daemon = true)(writeInput(process.getOutputStream)) - val outThread = Spawn("Simple-output", daemonizeThreads)(processOutput(process.getInputStream())) - val errorThread = - if (p.redirectErrorStream) Nil - else List(Spawn("Simple-error", daemonizeThreads)(processError(process.getErrorStream()))) - - new SimpleProcess(process, inThread, outThread :: errorThread) - } - override def toString = p.command.toString - override def canPipeTo = true - } - - private[scala] abstract class AbstractBuilder extends ProcessBuilder with Sink with Source { - protected def toSource: AbstractBuilder = this - protected def toSink: AbstractBuilder = this - - private[this] val defaultStreamCapacity = 4096 - - def #|(other: ProcessBuilder): ProcessBuilder = { - require(other.canPipeTo, "Piping to multiple processes is not supported.") - new PipedBuilder(this, other, false) - } - def #||(other: ProcessBuilder): ProcessBuilder = new OrBuilder(this, other) - def #&&(other: ProcessBuilder): ProcessBuilder = new AndBuilder(this, other) - def ###(other: ProcessBuilder): ProcessBuilder = new SequenceBuilder(this, other) - - def run(): Process = run(connectInput = false) - def run(connectInput: Boolean): Process = run(BasicIO.standard(connectInput)) - def run(log: ProcessLogger): Process = run(log, connectInput = false) - def run(log: ProcessLogger, connectInput: Boolean): Process = run(BasicIO(connectInput, log)) - - def !! = slurp(None, withIn = false) - def !!(log: ProcessLogger) = slurp(Some(log), withIn = false) - def !!< = slurp(None, withIn = true) - def !!<(log: ProcessLogger) = slurp(Some(log), withIn = true) - - def lazyLines: LazyList[String] = lazyLines(withInput = false, nonZeroException = true, None, defaultStreamCapacity) - def lazyLines(log: ProcessLogger): LazyList[String] = lazyLines(withInput = false, nonZeroException = true, Some(log), defaultStreamCapacity) - def lazyLines_! : LazyList[String] = lazyLines(withInput = false, nonZeroException = false, None, defaultStreamCapacity) - def lazyLines_!(log: ProcessLogger): LazyList[String] = lazyLines(withInput = false, nonZeroException = false, Some(log), defaultStreamCapacity) - def lazyLines(capacity: Integer): LazyList[String] = lazyLines(withInput = false, nonZeroException = true, None, capacity) - def lazyLines(log: ProcessLogger, capacity: Integer): LazyList[String] = lazyLines(withInput = false, nonZeroException = true, Some(log), capacity) - def lazyLines_!(capacity: Integer) : LazyList[String] = lazyLines(withInput = false, nonZeroException = false, None, capacity) - def lazyLines_!(log: ProcessLogger, capacity: Integer): LazyList[String] = lazyLines(withInput = false, nonZeroException = false, Some(log), capacity) - - @deprecated("internal", since = "2.13.4") def lineStream: Stream[String] = lineStream(withInput = false, nonZeroException = true, None, defaultStreamCapacity) - @deprecated("internal", since = "2.13.4") def lineStream(log: ProcessLogger): Stream[String] = lineStream(withInput = false, nonZeroException = true, Some(log), defaultStreamCapacity) - @deprecated("internal", since = "2.13.4") def lineStream_! : Stream[String] = lineStream(withInput = false, nonZeroException = false, None, defaultStreamCapacity) - @deprecated("internal", since = "2.13.4") def lineStream_!(log: ProcessLogger): Stream[String] = lineStream(withInput = false, nonZeroException = false, Some(log), defaultStreamCapacity) - @deprecated("internal", since = "2.13.4") def lineStream(capacity: Integer): Stream[String] = lineStream(withInput = false, nonZeroException = true, None, capacity) - @deprecated("internal", since = "2.13.4") def lineStream(log: ProcessLogger, capacity: Integer): Stream[String] = lineStream(withInput = false, nonZeroException = true, Some(log), capacity) - @deprecated("internal", since = "2.13.4") def lineStream_!(capacity: Integer) : Stream[String] = lineStream(withInput = false, nonZeroException = false, None, capacity) - @deprecated("internal", since = "2.13.4") def lineStream_!(log: ProcessLogger, capacity: Integer): Stream[String] = lineStream(withInput = false, nonZeroException = false, Some(log), capacity) - - def ! = run(connectInput = false).exitValue() - def !(io: ProcessIO) = run(io).exitValue() - def !(log: ProcessLogger) = runBuffered(log, connectInput = false) - def !< = run(connectInput = true).exitValue() - def !<(log: ProcessLogger) = runBuffered(log, connectInput = true) - - /** Constructs a new builder which runs this command with all input/output threads marked - * as daemon threads. This allows the creation of a long running process while still - * allowing the JVM to exit normally. - * - * Note: not in the public API because it's not fully baked, but I need the capability - * for fsc. - */ - def daemonized(): ProcessBuilder = new DaemonBuilder(this) - - private[this] def slurp(log: Option[ProcessLogger], withIn: Boolean): String = { - val buffer = new StringBuffer - val code = this ! BasicIO(withIn, buffer, log) - - if (code == 0) buffer.toString - else scala.sys.error("Nonzero exit value: " + code) - } - - private[this] def lazyLines( - withInput: Boolean, - nonZeroException: Boolean, - log: Option[ProcessLogger], - capacity: Integer - ): LazyList[String] = { - val lazilyListed = LazilyListed[String](nonZeroException, capacity) - val process = run(BasicIO(withInput, lazilyListed.process, log)) - - // extract done from lazilyListed so that the anonymous function below closes over just the done and not the whole lazilyListed (see https://github.com/scala/bug/issues/12185) - val done = lazilyListed.done - - Spawn("LazyLines") { - done { - try process.exitValue() - catch { - case NonFatal(_) => -2 - } - } - } - lazilyListed.lazyList - } - - @deprecated("internal", since = "2.13.4") - private[this] def lineStream( - withInput: Boolean, - nonZeroException: Boolean, - log: Option[ProcessLogger], - capacity: Integer - ): Stream[String] = { - val streamed = Streamed[String](nonZeroException, capacity) - val process = run(BasicIO(withInput, streamed.process, log)) - - Spawn("LineStream")(streamed done process.exitValue()) - streamed.stream() - } - - private[this] def runBuffered(log: ProcessLogger, connectInput: Boolean) = - log buffer run(log, connectInput).exitValue() - - def canPipeTo = false - def hasExitValue = true - } - - private[process] class URLImpl(url: URL) extends URLBuilder with Source { - protected def toSource: URLInput = new URLInput(url) - } - private[process] class FileImpl(base: File) extends FileBuilder with Sink with Source { - protected def toSource: FileInput = new FileInput(base) - protected def toSink: FileOutput = new FileOutput(base, false) - - def #<<(f: File): ProcessBuilder = #<<(new FileInput(f)) - def #<<(u: URL): ProcessBuilder = #<<(new URLInput(u)) - def #<<(s: => InputStream): ProcessBuilder = #<<(new IStreamBuilder(s, "")) - def #<<(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(b, new FileOutput(base, true), false) - } - - private[process] abstract class BasicBuilder extends AbstractBuilder { - protected[this] def checkNotThis(a: ProcessBuilder) = require(a != this, "Compound process '" + a + "' cannot contain itself.") - final def run(io: ProcessIO): Process = { - val p = createProcess(io) - p.start() - p - } - protected[this] def createProcess(io: ProcessIO): BasicProcess - } - - private[process] abstract class SequentialBuilder( - a: ProcessBuilder, - b: ProcessBuilder, - operatorString: String - ) extends BasicBuilder { - - checkNotThis(a) - checkNotThis(b) - override def toString = " ( " + a + " " + operatorString + " " + b + " ) " - } - - private[process] class PipedBuilder( - first: ProcessBuilder, - second: ProcessBuilder, - toError: Boolean - ) extends SequentialBuilder(first, second, if (toError) "#|!" else "#|") { - - override def createProcess(io: ProcessIO): PipedProcesses = new PipedProcesses(first, second, io, toError) - } - - private[process] class AndBuilder( - first: ProcessBuilder, - second: ProcessBuilder - ) extends SequentialBuilder(first, second, "#&&") { - override def createProcess(io: ProcessIO): AndProcess = new AndProcess(first, second, io) - } - - private[process] class OrBuilder( - first: ProcessBuilder, - second: ProcessBuilder - ) extends SequentialBuilder(first, second, "#||") { - override def createProcess(io: ProcessIO): OrProcess = new OrProcess(first, second, io) - } - - private[process] class SequenceBuilder( - first: ProcessBuilder, - second: ProcessBuilder - ) extends SequentialBuilder(first, second, "###") { - override def createProcess(io: ProcessIO): ProcessSequence = new ProcessSequence(first, second, io) - } -}