diff --git a/library/src/scala/AnyVal.scala b/library/src/scala/AnyVal.scala index 79be48d642be..b18e14eadd79 100644 --- a/library/src/scala/AnyVal.scala +++ b/library/src/scala/AnyVal.scala @@ -54,5 +54,5 @@ package scala * explained in greater detail in the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]]. */ abstract class AnyVal extends Any { - def getClass(): Class[_ <: AnyVal] = null + def getClass(): Class[_ <: AnyVal] = null.asInstanceOf } diff --git a/library/src/scala/Array.scala b/library/src/scala/Array.scala index 02af1837e1b7..901cc6117264 100644 --- a/library/src/scala/Array.scala +++ b/library/src/scala/Array.scala @@ -583,7 +583,7 @@ object Array { def get: UnapplySeqWrapper[T] = this def lengthCompare(len: Int): Int = a.lengthCompare(len) def apply(i: Int): T = a(i) - def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 + def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)).nn // clones the array, also if n == 0 def toSeq: scala.Seq[T] = a.toSeq // clones the array } } diff --git a/library/src/scala/Enumeration.scala b/library/src/scala/Enumeration.scala index bf61198f7d3b..7e7c697020be 100644 --- a/library/src/scala/Enumeration.scala +++ b/library/src/scala/Enumeration.scala @@ -105,7 +105,7 @@ abstract class Enumeration (initial: Int) extends Serializable { private val vmap: mutable.Map[Int, Value] = new mutable.HashMap /** The cache listing all values of this enumeration. */ - @transient private var vset: ValueSet = null + @transient private var vset: ValueSet | Null = null @transient @volatile private var vsetDefined = false /** The mapping from the integer used to identify values to their @@ -119,17 +119,17 @@ abstract class Enumeration (initial: Int) extends Serializable { vset = (ValueSet.newBuilder ++= vmap.values).result() vsetDefined = true } - vset + vset.nn } /** The integer to use to identify the next created value. */ protected var nextId: Int = initial /** The string to use to name the next created value. */ - protected var nextName: Iterator[String] = _ + protected var nextName: Iterator[String] | Null = _ - private def nextNameOrNull = - if (nextName != null && nextName.hasNext) nextName.next() else null + private def nextNameOrNull: String | Null = + if (nextName != null && nextName.nn.hasNext) nextName.nn.next() else null /** The highest integer amongst those used to identify values in this * enumeration. */ @@ -175,7 +175,7 @@ abstract class Enumeration (initial: Int) extends Serializable { * @param name A human-readable name for that value. * @return Fresh value called `name`. */ - protected final def Value(name: String): Value = Value(nextId, name) + protected final def Value(name: String | Null): Value = Value(nextId, name) /** Creates a fresh value, part of this enumeration, called `name` * and identified by the integer `i`. @@ -185,10 +185,10 @@ abstract class Enumeration (initial: Int) extends Serializable { * @param name A human-readable name for that value. * @return Fresh value with the provided identifier `i` and name `name`. */ - protected final def Value(i: Int, name: String): Value = new Val(i, name) + protected final def Value(i: Int, name: String | Null): Value = new Val(i, name) private def populateNameMap(): Unit = { - @tailrec def getFields(clazz: Class[_], acc: Array[JField]): Array[JField] = { + @tailrec def getFields(clazz: Class[?] | Null, acc: Array[JField]): Array[JField] = { if (clazz == null) acc else @@ -246,7 +246,7 @@ abstract class Enumeration (initial: Int) extends Serializable { * identification behaviour. */ @SerialVersionUID(0 - 3501153230598116017L) - protected class Val(i: Int, name: String) extends Value with Serializable { + protected class Val(i: Int, name: String | Null) extends Value with Serializable { def this(i: Int) = this(i, nextNameOrNull) def this(name: String) = this(nextId, name) def this() = this(nextId) @@ -259,13 +259,13 @@ abstract class Enumeration (initial: Int) extends Serializable { if (i < bottomId) bottomId = i def id: Int = i override def toString(): String = - if (name != null) name + if (name != null) name.nn else try thisenum.nameOf(i) catch { case _: NoSuchElementException => "" } protected def readResolve(): AnyRef = { val enumeration = thisenum.readResolve().asInstanceOf[Enumeration] - if (enumeration.vmap == null) this + if (enumeration.vmap eq null) this else enumeration.vmap(i) } } diff --git a/library/src/scala/Option.scala b/library/src/scala/Option.scala index 514bf50607ff..130eb3d01893 100644 --- a/library/src/scala/Option.scala +++ b/library/src/scala/Option.scala @@ -26,7 +26,7 @@ object Option { * @param x the value * @return Some(value) if value != null, None if value == null */ - def apply[A](x: A): Option[A] = if (x == null) None else Some(x) + def apply[A](x: A | Null): Option[A] = if (x == null) None else Some(x) /** An Option factory which returns `None` in a manner consistent with * the collections hierarchy. diff --git a/library/src/scala/Predef.scala b/library/src/scala/Predef.scala index 26dbc568a9ab..331a255015e8 100644 --- a/library/src/scala/Predef.scala +++ b/library/src/scala/Predef.scala @@ -119,7 +119,7 @@ object Predef extends LowPriorityImplicits { * @return The runtime [[Class]] representation of type `T`. * @group utilities */ - def classOf[T]: Class[T] = null // This is a stub method. The actual implementation is filled in by the compiler. + def classOf[T]: Class[T] = null.asInstanceOf // This is a stub method. The actual implementation is filled in by the compiler. /** * Retrieve the single value of a type with a unique inhabitant. @@ -412,7 +412,7 @@ object Predef extends LowPriorityImplicits { * @param x the object to print; may be null. * @group console-output */ - def print(x: Any): Unit = Console.print(x) + def print(x: Any | Null): Unit = Console.print(x) /** Prints a newline character on the default output. * @group console-output @@ -424,7 +424,7 @@ object Predef extends LowPriorityImplicits { * @param x the object to print. * @group console-output */ - def println(x: Any): Unit = Console.println(x) + def println(x: Any | Null): Unit = Console.println(x) /** Prints its arguments as a formatted string to the default output, * based on a string pattern (in a fashion similar to printf in C). @@ -541,7 +541,7 @@ private[scala] abstract class LowPriorityImplicits extends LowPriorityImplicits2 @inline implicit def booleanWrapper(x: Boolean): runtime.RichBoolean = new runtime.RichBoolean(x) /** @group conversions-array-to-wrapped-array */ - implicit def genericWrapArray[T](xs: Array[T]): ArraySeq[T] = + implicit def genericWrapArray[T](xs: Array[T] | Null): ArraySeq[T] | Null = if (xs eq null) null else ArraySeq.make(xs) @@ -549,38 +549,38 @@ private[scala] abstract class LowPriorityImplicits extends LowPriorityImplicits2 // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 // unique ones by way of this implicit, let's share one. /** @group conversions-array-to-wrapped-array */ - implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): ArraySeq.ofRef[T] = { + implicit def wrapRefArray[T <: AnyRef](xs: Array[T] | Null): ArraySeq.ofRef[T] | Null = { if (xs eq null) null - else if (xs.length == 0) ArraySeq.empty[AnyRef].asInstanceOf[ArraySeq.ofRef[T]] - else new ArraySeq.ofRef[T](xs) + else if (xs.nn.length == 0) ArraySeq.empty[AnyRef].asInstanceOf[ArraySeq.ofRef[T]] + else new ArraySeq.ofRef[T](xs.nn) } /** @group conversions-array-to-wrapped-array */ - implicit def wrapIntArray(xs: Array[Int]): ArraySeq.ofInt = if (xs ne null) new ArraySeq.ofInt(xs) else null + implicit def wrapIntArray(xs: Array[Int] | Null): ArraySeq.ofInt | Null = if (xs ne null) new ArraySeq.ofInt(xs.nn) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapDoubleArray(xs: Array[Double]): ArraySeq.ofDouble = if (xs ne null) new ArraySeq.ofDouble(xs) else null + implicit def wrapDoubleArray(xs: Array[Double] | Null): ArraySeq.ofDouble | Null = if (xs ne null) new ArraySeq.ofDouble(xs.nn) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapLongArray(xs: Array[Long]): ArraySeq.ofLong = if (xs ne null) new ArraySeq.ofLong(xs) else null + implicit def wrapLongArray(xs: Array[Long] | Null): ArraySeq.ofLong | Null = if (xs ne null) new ArraySeq.ofLong(xs.nn) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapFloatArray(xs: Array[Float]): ArraySeq.ofFloat = if (xs ne null) new ArraySeq.ofFloat(xs) else null + implicit def wrapFloatArray(xs: Array[Float] | Null): ArraySeq.ofFloat | Null = if (xs ne null) new ArraySeq.ofFloat(xs.nn) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapCharArray(xs: Array[Char]): ArraySeq.ofChar = if (xs ne null) new ArraySeq.ofChar(xs) else null + implicit def wrapCharArray(xs: Array[Char] | Null): ArraySeq.ofChar | Null = if (xs ne null) new ArraySeq.ofChar(xs.nn) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapByteArray(xs: Array[Byte]): ArraySeq.ofByte = if (xs ne null) new ArraySeq.ofByte(xs) else null + implicit def wrapByteArray(xs: Array[Byte] | Null): ArraySeq.ofByte | Null = if (xs ne null) new ArraySeq.ofByte(xs.nn) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapShortArray(xs: Array[Short]): ArraySeq.ofShort = if (xs ne null) new ArraySeq.ofShort(xs) else null + implicit def wrapShortArray(xs: Array[Short] | Null): ArraySeq.ofShort | Null = if (xs ne null) new ArraySeq.ofShort(xs.nn) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapBooleanArray(xs: Array[Boolean]): ArraySeq.ofBoolean = if (xs ne null) new ArraySeq.ofBoolean(xs) else null + implicit def wrapBooleanArray(xs: Array[Boolean] | Null): ArraySeq.ofBoolean | Null = if (xs ne null) new ArraySeq.ofBoolean(xs.nn) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapUnitArray(xs: Array[Unit]): ArraySeq.ofUnit = if (xs ne null) new ArraySeq.ofUnit(xs) else null + implicit def wrapUnitArray(xs: Array[Unit] | Null): ArraySeq.ofUnit | Null = if (xs ne null) new ArraySeq.ofUnit(xs.nn) else null /** @group conversions-string */ - implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null + implicit def wrapString(s: String | Null): WrappedString | Null = if (s ne null) new WrappedString(s.nn) else null } private[scala] abstract class LowPriorityImplicits2 { @deprecated("implicit conversions from Array to immutable.IndexedSeq are implemented by copying; use `toIndexedSeq` explicitly if you want to copy, or use the more efficient non-copying ArraySeq.unsafeWrapArray", since="2.13.0") - implicit def copyArrayToImmutableIndexedSeq[T](xs: Array[T]): IndexedSeq[T] = + implicit def copyArrayToImmutableIndexedSeq[T](xs: Array[T] | Null): IndexedSeq[T] | Null = if (xs eq null) null - else new ArrayOps(xs).toIndexedSeq + else new ArrayOps(xs.nn).toIndexedSeq } diff --git a/library/src/scala/Specializable.scala b/library/src/scala/Specializable.scala index 54fb59dba83e..ce7b7f875f1f 100644 --- a/library/src/scala/Specializable.scala +++ b/library/src/scala/Specializable.scala @@ -24,15 +24,15 @@ object Specializable { // Smuggle a list of types by way of a tuple upon which Group is parameterized. class Group[T >: Null](value: T) extends SpecializedGroup - final val Primitives: Group[(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)] = null - final val Everything: Group[(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)] = null - final val Bits32AndUp: Group[(Int, Long, Float, Double)] = null - final val Integral: Group[(Byte, Short, Int, Long, Char)] = null - final val AllNumeric: Group[(Byte, Short, Int, Long, Char, Float, Double)] = null - final val BestOfBreed: Group[(Int, Double, Boolean, Unit, AnyRef)] = null - final val Unit: Group[Tuple1[Unit]] = null + final val Primitives: Group[(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)] = null.asInstanceOf + final val Everything: Group[(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)] = null.asInstanceOf + final val Bits32AndUp: Group[(Int, Long, Float, Double)] = null.asInstanceOf + final val Integral: Group[(Byte, Short, Int, Long, Char)] = null.asInstanceOf + final val AllNumeric: Group[(Byte, Short, Int, Long, Char, Float, Double)] = null.asInstanceOf + final val BestOfBreed: Group[(Int, Double, Boolean, Unit, AnyRef)] = null.asInstanceOf + final val Unit: Group[Tuple1[Unit]] = null.asInstanceOf - final val Arg: Group[(Int, Long, Float, Double)] = null - final val Args: Group[(Int, Long, Double)] = null - final val Return: Group[(Int, Long, Float, Double, Boolean, Unit)] = null + final val Arg: Group[(Int, Long, Float, Double)] = null.asInstanceOf + final val Args: Group[(Int, Long, Double)] = null.asInstanceOf + final val Return: Group[(Int, Long, Float, Double, Boolean, Unit)] = null.asInstanceOf } diff --git a/library/src/scala/StringContext.scala b/library/src/scala/StringContext.scala index ec5c49a2349e..a00389266d5f 100644 --- a/library/src/scala/StringContext.scala +++ b/library/src/scala/StringContext.scala @@ -290,7 +290,7 @@ object StringContext { } // Matched all of pattern to all of name. Success. - Some(collection.immutable.ArraySeq.unsafeWrapArray( + Option.fromNullable(collection.immutable.ArraySeq.unsafeWrapArray( Array.tabulate(patternChunks.length - 1)(n => input.slice(matchStarts(n), matchEnds(n))) )) } diff --git a/library/src/scala/collection/ArrayOps.scala b/library/src/scala/collection/ArrayOps.scala index 08758e2ab46a..8a67e0490470 100644 --- a/library/src/scala/collection/ArrayOps.scala +++ b/library/src/scala/collection/ArrayOps.scala @@ -57,7 +57,8 @@ object ArrayOps { private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { def length = xs.length def apply(n: Int) = xs(n) - override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") + override def toString: String = + immutable.ArraySeq.unsafeWrapArray(xs).nn.mkString("ArrayView(", ", ", ")") } /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ @@ -1081,7 +1082,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs).nn, that) /** Returns an array formed from this array and another iterable collection * by combining corresponding elements in pairs. @@ -1435,7 +1436,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq def toIndexedSeq: immutable.IndexedSeq[A] = - immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) + immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)).nn /** Copy elements of this array to another array. * Fills the given array `xs` starting at index 0. diff --git a/library/src/scala/collection/Iterator.scala b/library/src/scala/collection/Iterator.scala index 7c288bf58e9f..638299e26422 100644 --- a/library/src/scala/collection/Iterator.scala +++ b/library/src/scala/collection/Iterator.scala @@ -157,12 +157,12 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - private[this] var buffer: Array[B] = null // current result - private[this] var prev: Array[B] = null // if sliding, overlap from previous result + private[this] var buffer: Array[B] | Null = null // current result + private[this] var prev: Array[B] | Null = null // if sliding, overlap from previous result private[this] var first = true // if !first, advancing may skip ahead private[this] var filled = false // whether the buffer is "hot" private[this] var partial = true // whether to emit partial sequence - private[this] var padding: () => B = null // what to pad short sequences with + private[this] var padding: (() => B) | Null = null // what to pad short sequences with private[this] def pad = padding != null // irrespective of partial flag private[this] def newBuilder = { val b = ArrayBuilder.make[Any] @@ -226,7 +226,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite val builder = newBuilder var done = false // keep prefix of previous buffer if stepping - if (prev != null) builder.addAll(prev) + if (prev != null) builder.addAll(prev.nn) // skip ahead if (!first && step > size) { var dropping = step - size @@ -247,7 +247,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite if (index < size && pad) { builder.sizeHint(size) while (index < size) { - builder.addOne(padding()) + builder.addOne(padding.nn()) index += 1 } } @@ -271,11 +271,11 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite filled = false // if stepping, retain overlap in prev if (step < size) { - if (first) prev = buffer.drop(step) - else if (buffer.length == size) Array.copy(src = buffer, srcPos = step, dest = prev, destPos = 0, length = size - step) + if (first) prev = buffer.nn.drop(step) + else if (buffer.nn.length == size) Array.copy(src = buffer.nn, srcPos = step, dest = prev.nn, destPos = 0, length = size - step) else prev = null } - val res = immutable.ArraySeq.unsafeWrapArray(buffer).asInstanceOf[immutable.ArraySeq[B]] + val res = immutable.ArraySeq.unsafeWrapArray(buffer.nn).asInstanceOf[immutable.ArraySeq[B]] buffer = null first = false res @@ -698,7 +698,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * handling of structural calls. It's not what's intended here. */ final class Leading extends AbstractIterator[A] { - private[this] var lookahead: mutable.Queue[A] = null + private[this] var lookahead: mutable.Queue[A] | Null = null private[this] var hd: A = _ /* Status is kept with magic numbers * 1 means next element is in hd and we're still reading into this iterator @@ -709,10 +709,10 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite private[this] var status = 0 private def store(a: A): Unit = { if (lookahead == null) lookahead = new mutable.Queue[A] - lookahead += a + lookahead.nn += a } def hasNext = { - if (status < 0) (lookahead ne null) && lookahead.nonEmpty + if (status < 0) (lookahead ne null) && lookahead.nn.nonEmpty else if (status > 0) true else { if (self.hasNext) { @@ -726,7 +726,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def next() = { if (hasNext) { if (status == 1) { status = 0; hd } - else lookahead.dequeue() + else lookahead.nn.dequeue() } else Iterator.empty.next() } @@ -753,7 +753,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite val leading = new Leading val trailing = new AbstractIterator[A] { - private[this] var myLeading = leading + private[this] var myLeading: Leading | Null = leading /* Status flag meanings: * -1 not yet accessed * 0 single element waiting in leading @@ -768,13 +768,13 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite case 1 => if (self.hasNext) { status = 2 ; true } else { status = 3 ; false } case 0 => true case _ => - if (myLeading.finish()) { status = 0 ; true } else { status = 1 ; myLeading = null ; hasNext } + if (myLeading.nn.finish()) { status = 0 ; true } else { status = 1 ; myLeading = null ; hasNext } } def next() = { if (hasNext) { if (status == 0) { status = 1 - val res = myLeading.trailer + val res = myLeading.nn.trailer myLeading = null res } else { @@ -869,7 +869,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ def duplicate: (Iterator[A], Iterator[A]) = { val gap = new scala.collection.mutable.Queue[A] - var ahead: Iterator[A] = null + var ahead: Iterator[A] | Null = null class Partner extends AbstractIterator[A] { override def knownSize: Int = self.synchronized { val thisSize = self.knownSize @@ -1145,15 +1145,15 @@ object Iterator extends IterableFactory[Iterator] { /** Creates an iterator to which other iterators can be appended efficiently. * Nested ConcatIterators are merged to avoid blowing the stack. */ - private final class ConcatIterator[+A](private var current: Iterator[A @uncheckedVariance]) extends AbstractIterator[A] { - private var tail: ConcatIteratorCell[A @uncheckedVariance] = null - private var last: ConcatIteratorCell[A @uncheckedVariance] = null + private final class ConcatIterator[+A](private var current: Iterator[A @uncheckedVariance] | Null) extends AbstractIterator[A] { + private var tail: ConcatIteratorCell[A @uncheckedVariance] | Null = null + private var last: ConcatIteratorCell[A @uncheckedVariance] | Null = null private var currentHasNextChecked = false def hasNext = if (currentHasNextChecked) true else if (current == null) false - else if (current.hasNext) { + else if (current.nn.hasNext) { currentHasNextChecked = true true } @@ -1166,7 +1166,7 @@ object Iterator extends IterableFactory[Iterator] { currentHasNextChecked = c.currentHasNextChecked if (c.tail != null) { if (last == null) last = c.last - c.last.tail = tail + c.last.nn.tail = tail tail = c.tail } merge() @@ -1181,12 +1181,12 @@ object Iterator extends IterableFactory[Iterator] { false } else { - current = tail.headIterator - if (last eq tail) last = last.tail - tail = tail.tail + current = tail.nn.headIterator + if (last eq tail) last = last.nn.tail + tail = tail.nn.tail merge() if (currentHasNextChecked) true - else if (current != null && current.hasNext) { + else if (current != null && current.nn.hasNext) { currentHasNextChecked = true true } else advance() @@ -1198,7 +1198,7 @@ object Iterator extends IterableFactory[Iterator] { def next() = if (hasNext) { currentHasNextChecked = false - current.next() + current.nn.next() } else Iterator.empty.next() override def concat[B >: A](that: => IterableOnce[B]): Iterator[B] = { @@ -1208,7 +1208,7 @@ object Iterator extends IterableFactory[Iterator] { last = c } else { - last.tail = c + last.nn.tail = c last = c } if (current == null) current = Iterator.empty @@ -1216,7 +1216,7 @@ object Iterator extends IterableFactory[Iterator] { } } - private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A], var tail: ConcatIteratorCell[A]) { + private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A], var tail: ConcatIteratorCell[A] | Null) { def headIterator: Iterator[A] = head.iterator } @@ -1284,10 +1284,10 @@ object Iterator extends IterableFactory[Iterator] { */ private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)]) extends AbstractIterator[A] { private[this] var state: S = init - private[this] var nextResult: Option[(A, S)] = null + private[this] var nextResult: Option[(A, S)] | Null = null override def hasNext: Boolean = { - if (nextResult eq null) { + if (nextResult == null) { nextResult = { val res = f(state) if (res eq null) throw new NullPointerException("null during unfold") @@ -1295,12 +1295,12 @@ object Iterator extends IterableFactory[Iterator] { } state = null.asInstanceOf[S] // allow GC } - nextResult.isDefined + nextResult.nn.isDefined } override def next(): A = { if (hasNext) { - val (value, newState) = nextResult.get + val (value, newState) = nextResult.nn.get state = newState nextResult = null value diff --git a/library/src/scala/collection/JavaConverters.scala b/library/src/scala/collection/JavaConverters.scala index 7a803a685d3e..0a7570e5e96b 100644 --- a/library/src/scala/collection/JavaConverters.scala +++ b/library/src/scala/collection/JavaConverters.scala @@ -78,65 +78,65 @@ import scala.language.implicitConversions @deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") object JavaConverters extends AsJavaConverters with AsScalaConverters { @deprecated("Use `asJava` instead", "2.13.0") - def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = asJava(i) + def asJavaIterator[A](i: Iterator[A] | Null): ju.Iterator[A] | Null = asJava(i) @deprecated("Use `asJava` instead", "2.13.0") - def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = asJava(i) + def asJavaIterable[A](i: Iterable[A] | Null): jl.Iterable[A] | Null = asJava(i) @deprecated("Use `asJava` instead", "2.13.0") - def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = asJava(b) + def bufferAsJavaList[A](b: mutable.Buffer[A] | Null): ju.List[A] | Null = asJava(b) @deprecated("Use `asJava` instead", "2.13.0") - def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = asJava(s) + def mutableSeqAsJavaList[A](s: mutable.Seq[A] | Null): ju.List[A] | Null = asJava(s) @deprecated("Use `asJava` instead", "2.13.0") - def seqAsJavaList[A](s: Seq[A]): ju.List[A] = asJava(s) + def seqAsJavaList[A](s: Seq[A] | Null): ju.List[A] | Null = asJava(s) @deprecated("Use `asJava` instead", "2.13.0") - def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = asJava(s) + def mutableSetAsJavaSet[A](s: mutable.Set[A] | Null): ju.Set[A] | Null = asJava(s) @deprecated("Use `asJava` instead", "2.13.0") - def setAsJavaSet[A](s: Set[A]): ju.Set[A] = asJava(s) + def setAsJavaSet[A](s: Set[A] | Null): ju.Set[A] | Null = asJava(s) @deprecated("Use `asJava` instead", "2.13.0") - def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = asJava(m) + def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V] | Null): ju.Map[K, V] | Null = asJava(m) @deprecated("Use `asJava` instead", "2.13.0") - def mapAsJavaMap[K, V](m: Map[K, V]): ju.Map[K, V] = asJava(m) + def mapAsJavaMap[K, V](m: Map[K, V] | Null): ju.Map[K, V] | Null = asJava(m) @deprecated("Use `asJava` instead", "2.13.0") - def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = asJava(m) + def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V] | Null): juc.ConcurrentMap[K, V] | Null = asJava(m) @deprecated("Use `asScala` instead", "2.13.0") - def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = asScala(i) + def asScalaIterator[A](i: ju.Iterator[A] | Null): Iterator[A] | Null = asScala(i) @deprecated("Use `asScala` instead", "2.13.0") - def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = asScala(i) + def enumerationAsScalaIterator[A](i: ju.Enumeration[A] | Null): Iterator[A] | Null = asScala(i) @deprecated("Use `asScala` instead", "2.13.0") - def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = asScala(i) + def iterableAsScalaIterable[A](i: jl.Iterable[A] | Null): Iterable[A] | Null = asScala(i) @deprecated("Use `asScala` instead", "2.13.0") - def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = asScala(i) + def collectionAsScalaIterable[A](i: ju.Collection[A] | Null): Iterable[A] | Null = asScala(i) @deprecated("Use `asScala` instead", "2.13.0") - def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = asScala(l) + def asScalaBuffer[A](l: ju.List[A] | Null): mutable.Buffer[A] | Null = asScala(l) @deprecated("Use `asScala` instead", "2.13.0") - def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = asScala(s) + def asScalaSet[A](s: ju.Set[A] | Null): mutable.Set[A] | Null = asScala(s) @deprecated("Use `asScala` instead", "2.13.0") - def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = asScala(m) + def mapAsScalaMap[A, B](m: ju.Map[A, B] | Null): mutable.Map[A, B] | Null = asScala(m) @deprecated("Use `asScala` instead", "2.13.0") - def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = asScala(m) + def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B] | Null): concurrent.Map[A, B] | Null = asScala(m) @deprecated("Use `asScala` instead", "2.13.0") - def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = asScala(p) + def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B] | Null): mutable.Map[A, B] | Null = asScala(p) @deprecated("Use `asScala` instead", "2.13.0") - def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = asScala(p) + def propertiesAsScalaMap(p: ju.Properties | Null): mutable.Map[String, String] | Null = asScala(p) // Deprecated implicit conversions for code that directly imports them @@ -304,32 +304,32 @@ object JavaConverters extends AsJavaConverters with AsScalaConverters { /** Generic class containing the `asJava` converter method */ - class AsJava[A](op: => A) { + class AsJava[A](op: => A | Null) { /** Converts a Scala collection to the corresponding Java collection */ - def asJava: A = op + def asJava: A | Null = op } /** Generic class containing the `asScala` converter method */ - class AsScala[A](op: => A) { + class AsScala[A](op: => A | Null) { /** Converts a Java collection to the corresponding Scala collection */ - def asScala: A = op + def asScala: A | Null = op } /** Generic class containing the `asJavaCollection` converter method */ - class AsJavaCollection[A](i: Iterable[A]) { + class AsJavaCollection[A](i: Iterable[A] | Null) { /** Converts a Scala `Iterable` to a Java `Collection` */ - def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) + def asJavaCollection: ju.Collection[A] | Null = JavaConverters.asJavaCollection(i) } /** Generic class containing the `asJavaEnumeration` converter method */ - class AsJavaEnumeration[A](i: Iterator[A]) { + class AsJavaEnumeration[A](i: Iterator[A] | Null) { /** Converts a Scala `Iterator` to a Java `Enumeration` */ - def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) + def asJavaEnumeration: ju.Enumeration[A] | Null = JavaConverters.asJavaEnumeration(i) } /** Generic class containing the `asJavaDictionary` converter method */ - class AsJavaDictionary[K, V](m : mutable.Map[K, V]) { + class AsJavaDictionary[K, V](m : mutable.Map[K, V] | Null) { /** Converts a Scala `Map` to a Java `Dictionary` */ - def asJavaDictionary: ju.Dictionary[K, V] = JavaConverters.asJavaDictionary(m) + def asJavaDictionary: ju.Dictionary[K, V] | Null = JavaConverters.asJavaDictionary(m) } } diff --git a/library/src/scala/collection/LazyZipOps.scala b/library/src/scala/collection/LazyZipOps.scala index a7a72ce882a8..f1a80d693fc2 100644 --- a/library/src/scala/collection/LazyZipOps.scala +++ b/library/src/scala/collection/LazyZipOps.scala @@ -70,19 +70,19 @@ final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterabl def iterator: AbstractIterator[(El1, El2)] = new AbstractIterator[(El1, El2)] { private[this] val elems1 = coll1.iterator private[this] val elems2 = coll2.iterator - private[this] var _current: (El1, El2) = _ + private[this] var _current: (El1, El2) | Null = null private def current = { - while ((_current eq null) && elems1.hasNext && elems2.hasNext) { + while ((_current == null) && elems1.hasNext && elems2.hasNext) { val e1 = elems1.next() val e2 = elems2.next() if (p(e1, e2)) _current = (e1, e2) } _current } - def hasNext = current ne null + def hasNext = current != null def next() = { val c = current - if (c ne null) { + if (c != null) { _current = null c } else Iterator.empty.next() @@ -336,7 +336,7 @@ final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, private[this] val elems2 = coll2.iterator private[this] val elems3 = coll3.iterator private[this] val elems4 = coll4.iterator - private[this] var _current: (El1, El2, El3, El4) = _ + private[this] var _current: (El1, El2, El3, El4) | Null = null private def current = { while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) { val e1 = elems1.next() @@ -352,7 +352,7 @@ final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, val c = current if (c ne null) { _current = null - c + c.nn } else Iterator.empty.next() } } diff --git a/library/src/scala/collection/SeqView.scala b/library/src/scala/collection/SeqView.scala index a45797892220..05b8fc47eebe 100644 --- a/library/src/scala/collection/SeqView.scala +++ b/library/src/scala/collection/SeqView.scala @@ -45,7 +45,7 @@ object SeqView { /** A `SeqOps` whose collection type and collection type constructor are unknown */ private type SomeSeqOps[+A] = SeqOps[A, AnyConstr, _] - /** A view that doesn’t apply any transformation to an underlying sequence */ + /** A view that doesn't apply any transformation to an underlying sequence */ @SerialVersionUID(3L) class Id[+A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { def apply(idx: Int): A = underlying.apply(idx) @@ -129,7 +129,7 @@ object SeqView { } @SerialVersionUID(3L) - class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A], + class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A] | Null, private[this] val len: Int, ord: Ordering[B]) extends SeqView[A] { @@ -164,10 +164,10 @@ object SeqView { val res = { val len = this.len if (len == 0) Nil - else if (len == 1) List(underlying.head) + else if (len == 1) List(underlying.nn.head) else { val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] - @annotation.unused val copied = underlying.copyToArray(arr) + @annotation.unused val copied = underlying.nn.copyToArray(arr) //assert(copied == len) java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it @@ -178,7 +178,7 @@ object SeqView { // contains items of another type, we'd get a CCE anyway) // - the cast doesn't actually do anything in the runtime because the // type of A is not known and Array[_] is Array[AnyRef] - immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]) + immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]).nn } } evaluated = true @@ -188,7 +188,7 @@ object SeqView { private[this] def elems: SomeSeqOps[A] = { val orig = underlying - if (evaluated) _sorted else orig + if (evaluated) _sorted else orig.nn } def apply(i: Int): A = _sorted.apply(i) diff --git a/library/src/scala/collection/Stepper.scala b/library/src/scala/collection/Stepper.scala index f1355e8182c3..6ba9c4ac9838 100644 --- a/library/src/scala/collection/Stepper.scala +++ b/library/src/scala/collection/Stepper.scala @@ -51,7 +51,7 @@ trait Stepper[@specialized(Double, Int, Long) +A] { * * See method `trySplit` in [[java.util.Spliterator]]. */ - def trySplit(): Stepper[A] + def trySplit(): Stepper[A] | Null /** Returns an estimate of the number of elements of this Stepper, or [[Long.MaxValue]]. See * method `estimateSize` in [[java.util.Spliterator]]. @@ -108,7 +108,7 @@ object Stepper { def nextStep(): Double = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): DoubleStepper = { + def trySplit(): DoubleStepper | Null = { val s = st.trySplit() if (s == null) null else new UnboxingDoubleStepper(s) } @@ -119,7 +119,7 @@ object Stepper { def nextStep(): Int = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { + def trySplit(): IntStepper | Null = { val s = st.trySplit() if (s == null) null else new UnboxingIntStepper(s) } @@ -130,7 +130,7 @@ object Stepper { def nextStep(): Long = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): LongStepper = { + def trySplit(): LongStepper | Null = { val s = st.trySplit() if (s == null) null else new UnboxingLongStepper(s) } @@ -141,7 +141,7 @@ object Stepper { def nextStep(): Int = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { + def trySplit(): IntStepper | Null = { val s = st.trySplit() if (s == null) null else new UnboxingByteStepper(s) } @@ -152,7 +152,7 @@ object Stepper { def nextStep(): Int = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { + def trySplit(): IntStepper | Null = { val s = st.trySplit() if (s == null) null else new UnboxingCharStepper(s) } @@ -163,7 +163,7 @@ object Stepper { def nextStep(): Int = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { + def trySplit(): IntStepper | Null = { val s = st.trySplit() if (s == null) null else new UnboxingShortStepper(s) } @@ -174,7 +174,7 @@ object Stepper { def nextStep(): Double = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): DoubleStepper = { + def trySplit(): DoubleStepper | Null = { val s = st.trySplit() if (s == null) null else new UnboxingFloatStepper(s) } @@ -183,7 +183,7 @@ object Stepper { /** A Stepper for arbitrary element types. See [[Stepper]]. */ trait AnyStepper[+A] extends Stepper[A] { - def trySplit(): AnyStepper[A] + def trySplit(): AnyStepper[A] | Null def spliterator[B >: A]: Spliterator[B] = new AnyStepper.AnyStepperSpliterator(this) @@ -197,9 +197,9 @@ object AnyStepper { class AnyStepperSpliterator[A](s: AnyStepper[A]) extends Spliterator[A] { def tryAdvance(c: Consumer[_ >: A]): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false - def trySplit(): Spliterator[A] = { + def trySplit(): Spliterator[A] | Null = { val sp = s.trySplit() - if (sp == null) null else sp.spliterator + if (sp == null) null else sp.nn.spliterator } def estimateSize(): Long = s.estimateSize def characteristics(): Int = s.characteristics @@ -222,7 +222,7 @@ object AnyStepper { def nextStep(): Double = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): AnyStepper[Double] = { + def trySplit(): AnyStepper[Double] | Null = { val s = st.trySplit() if (s == null) null else new BoxedDoubleStepper(s) } @@ -233,7 +233,7 @@ object AnyStepper { def nextStep(): Int = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): AnyStepper[Int] = { + def trySplit(): AnyStepper[Int] | Null = { val s = st.trySplit() if (s == null) null else new BoxedIntStepper(s) } @@ -244,7 +244,7 @@ object AnyStepper { def nextStep(): Long = st.nextStep() def estimateSize: Long = st.estimateSize def characteristics: Int = st.characteristics - def trySplit(): AnyStepper[Long] = { + def trySplit(): AnyStepper[Long] | Null = { val s = st.trySplit() if (s == null) null else new BoxedLongStepper(s) } @@ -253,7 +253,7 @@ object AnyStepper { /** A Stepper for Ints. See [[Stepper]]. */ trait IntStepper extends Stepper[Int] { - def trySplit(): IntStepper + def trySplit(): IntStepper | Null def spliterator[B >: Int]: Spliterator.OfInt = new IntStepper.IntStepperSpliterator(this) @@ -272,7 +272,7 @@ object IntStepper { case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfInt = { + override def trySplit(): Spliterator.OfInt | Null = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -291,7 +291,7 @@ object IntStepper { /** A Stepper for Doubles. See [[Stepper]]. */ trait DoubleStepper extends Stepper[Double] { - def trySplit(): DoubleStepper + def trySplit(): DoubleStepper | Null def spliterator[B >: Double]: Spliterator.OfDouble = new DoubleStepper.DoubleStepperSpliterator(this) @@ -311,7 +311,7 @@ object DoubleStepper { case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfDouble = { + override def trySplit(): Spliterator.OfDouble | Null = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -330,7 +330,7 @@ object DoubleStepper { /** A Stepper for Longs. See [[Stepper]]. */ trait LongStepper extends Stepper[Long] { - def trySplit(): LongStepper + def trySplit(): LongStepper | Null def spliterator[B >: Long]: Spliterator.OfLong = new LongStepper.LongStepperSpliterator(this) @@ -350,7 +350,7 @@ object LongStepper { case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfLong = { + override def trySplit(): Spliterator.OfLong | Null = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } diff --git a/library/src/scala/collection/StringOps.scala b/library/src/scala/collection/StringOps.scala index f641c792156a..5bce277d7110 100644 --- a/library/src/scala/collection/StringOps.scala +++ b/library/src/scala/collection/StringOps.scala @@ -722,7 +722,7 @@ final class StringOps(private val s: String) extends AnyVal { * This method does not convert characters outside the Basic Multilingual Plane (BMP). */ def capitalize: String = - if (s == null || s.length == 0 || !s.charAt(0).isLower) s + if (s == null || s.length == 0 || !s.charAt(0).isLower) s.nn else updated(0, s.charAt(0).toUpper) /** Returns this string with the given `prefix` stripped. If this string does not diff --git a/library/src/scala/collection/View.scala b/library/src/scala/collection/View.scala index f304b8931f14..7f50cd4a1e02 100644 --- a/library/src/scala/collection/View.scala +++ b/library/src/scala/collection/View.scala @@ -441,17 +441,17 @@ object View extends IterableFactory[View] { else new TakeRightIterator[A](it, n) } - private final class TakeRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { + private final class TakeRightIterator[A](private[this] var underlying: Iterator[A] | Null, maxlen: Int) extends AbstractIterator[A] { private[this] var len: Int = -1 private[this] var pos: Int = 0 - private[this] var buf: ArrayBuffer[AnyRef] = _ + private[this] var buf: ArrayBuffer[AnyRef] | Null = null def init(): Unit = if(buf eq null) { buf = new ArrayBuffer[AnyRef](maxlen min 256) len = 0 - while(underlying.hasNext) { - val n = underlying.next().asInstanceOf[AnyRef] - if(pos >= buf.length) buf.addOne(n) - else buf(pos) = n + while(underlying.nn.hasNext) { + val n = underlying.nn.next().asInstanceOf[AnyRef] + if(pos >= buf.nn.length) buf.nn.addOne(n) + else buf.nn(pos) = n pos += 1 if(pos == maxlen) pos = 0 len += 1 @@ -470,7 +470,7 @@ object View extends IterableFactory[View] { init() if(len == 0) Iterator.empty.next() else { - val x = buf(pos).asInstanceOf[A] + val x = buf.nn(pos).asInstanceOf[A] pos += 1 if(pos == maxlen) pos = 0 len -= 1 @@ -499,11 +499,11 @@ object View extends IterableFactory[View] { private final class DropRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { private[this] var len: Int = -1 // known size or -1 if the end of `underlying` has not been seen yet private[this] var pos: Int = 0 - private[this] var buf: ArrayBuffer[AnyRef] = _ + private[this] var buf: ArrayBuffer[AnyRef] | Null = null def init(): Unit = if(buf eq null) { buf = new ArrayBuffer[AnyRef](maxlen min 256) while(pos < maxlen && underlying.hasNext) { - buf.addOne(underlying.next().asInstanceOf[AnyRef]) + buf.nn.addOne(underlying.next().asInstanceOf[AnyRef]) pos += 1 } if(!underlying.hasNext) len = 0 @@ -517,9 +517,9 @@ object View extends IterableFactory[View] { def next(): A = { if(!hasNext) Iterator.empty.next() else { - val x = buf(pos).asInstanceOf[A] + val x = buf.nn(pos).asInstanceOf[A] if(len == -1) { - buf(pos) = underlying.next().asInstanceOf[AnyRef] + buf.nn(pos) = underlying.next().asInstanceOf[AnyRef] if(!underlying.hasNext) len = 0 } else len -= 1 pos += 1 diff --git a/library/src/scala/collection/convert/AsJavaConverters.scala b/library/src/scala/collection/convert/AsJavaConverters.scala index 2fc73da64fe7..4f81e7a3aede 100644 --- a/library/src/scala/collection/convert/AsJavaConverters.scala +++ b/library/src/scala/collection/convert/AsJavaConverters.scala @@ -37,10 +37,10 @@ trait AsJavaConverters { * @param i The Scala `Iterator` to be converted. * @return A Java `Iterator` view of the argument. */ - def asJava[A](i: Iterator[A]): ju.Iterator[A] = i match { + def asJava[A](i: Iterator[A] | Null): ju.Iterator[A] | Null = i match { case null => null case wrapper: JIteratorWrapper[A @uc] => wrapper.underlying - case _ => new IteratorWrapper(i) + case _ => new IteratorWrapper(i.nn) } /** @@ -55,10 +55,10 @@ trait AsJavaConverters { * @param i The Scala `Iterator` to be converted. * @return A Java `Enumeration` view of the argument. */ - def asJavaEnumeration[A](i: Iterator[A]): ju.Enumeration[A] = i match { + def asJavaEnumeration[A](i: Iterator[A] | Null): ju.Enumeration[A] | Null = i match { case null => null case wrapper: JEnumerationWrapper[A @uc] => wrapper.underlying - case _ => new IteratorWrapper(i) + case _ => new IteratorWrapper(i.nn) } /** @@ -73,10 +73,10 @@ trait AsJavaConverters { * @param i The Scala `Iterable` to be converted. * @return A Java `Iterable` view of the argument. */ - def asJava[A](i: Iterable[A]): jl.Iterable[A] = i match { + def asJava[A](i: Iterable[A] | Null): jl.Iterable[A] | Null = i match { case null => null case wrapper: JIterableWrapper[A @uc] => wrapper.underlying - case _ => new IterableWrapper(i) + case _ => new IterableWrapper(i.nn) } /** @@ -88,10 +88,10 @@ trait AsJavaConverters { * @param i The Scala `Iterable` to be converted. * @return A Java `Collection` view of the argument. */ - def asJavaCollection[A](i: Iterable[A]): ju.Collection[A] = i match { + def asJavaCollection[A](i: Iterable[A] | Null): ju.Collection[A] | Null = i match { case null => null case wrapper: JCollectionWrapper[A @uc] => wrapper.underlying - case _ => new IterableWrapper(i) + case _ => new IterableWrapper(i.nn) } /** @@ -106,10 +106,10 @@ trait AsJavaConverters { * @param b The Scala `Buffer` to be converted. * @return A Java `List` view of the argument. */ - def asJava[A](b: mutable.Buffer[A]): ju.List[A] = b match { + def asJava[A](b: mutable.Buffer[A] | Null): ju.List[A] | Null = b match { case null => null case wrapper: JListWrapper[A @uc] => wrapper.underlying - case _ => new MutableBufferWrapper(b) + case _ => new MutableBufferWrapper(b.nn) } /** @@ -124,10 +124,10 @@ trait AsJavaConverters { * @param s The Scala `Seq` to be converted. * @return A Java `List` view of the argument. */ - def asJava[A](s: mutable.Seq[A]): ju.List[A] = s match { + def asJava[A](s: mutable.Seq[A] | Null): ju.List[A] | Null = s match { case null => null case wrapper: JListWrapper[A @uc] => wrapper.underlying - case _ => new MutableSeqWrapper(s) + case _ => new MutableSeqWrapper(s.nn) } /** @@ -142,10 +142,10 @@ trait AsJavaConverters { * @param s The Scala `Seq` to be converted. * @return A Java `List` view of the argument. */ - def asJava[A](s: Seq[A]): ju.List[A] = s match { + def asJava[A](s: Seq[A] | Null): ju.List[A] | Null = s match { case null => null case wrapper: JListWrapper[A @uc] => wrapper.underlying - case _ => new SeqWrapper(s) + case _ => new SeqWrapper(s.nn) } /** @@ -160,10 +160,10 @@ trait AsJavaConverters { * @param s The Scala mutable `Set` to be converted. * @return A Java `Set` view of the argument. */ - def asJava[A](s: mutable.Set[A]): ju.Set[A] = s match { + def asJava[A](s: mutable.Set[A] | Null): ju.Set[A] | Null = s match { case null => null case wrapper: JSetWrapper[A @uc] => wrapper.underlying - case _ => new MutableSetWrapper(s) + case _ => new MutableSetWrapper(s.nn) } /** @@ -178,10 +178,10 @@ trait AsJavaConverters { * @param s The Scala `Set` to be converted. * @return A Java `Set` view of the argument. */ - def asJava[A](s: Set[A]): ju.Set[A] = s match { + def asJava[A](s: Set[A] | Null): ju.Set[A] | Null = s match { case null => null case wrapper: JSetWrapper[A @uc] => wrapper.underlying - case _ => new SetWrapper(s) + case _ => new SetWrapper(s.nn) } /** @@ -196,10 +196,10 @@ trait AsJavaConverters { * @param m The Scala mutable `Map` to be converted. * @return A Java `Map` view of the argument. */ - def asJava[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = m match { + def asJava[K, V](m: mutable.Map[K, V] | Null): ju.Map[K, V] | Null = m match { case null => null case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying - case _ => new MutableMapWrapper(m) + case _ => new MutableMapWrapper(m.nn) } /** @@ -215,10 +215,10 @@ trait AsJavaConverters { * @param m The Scala `Map` to be converted. * @return A Java `Dictionary` view of the argument. */ - def asJavaDictionary[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = m match { + def asJavaDictionary[K, V](m: mutable.Map[K, V] | Null): ju.Dictionary[K, V] | Null = m match { case null => null case wrapper: JDictionaryWrapper[K @uc, V @uc] => wrapper.underlying - case _ => new DictionaryWrapper(m) + case _ => new DictionaryWrapper(m.nn) } /** @@ -233,10 +233,10 @@ trait AsJavaConverters { * @param m The Scala `Map` to be converted. * @return A Java `Map` view of the argument. */ - def asJava[K, V](m: Map[K, V]): ju.Map[K, V] = m match { + def asJava[K, V](m: Map[K, V] | Null): ju.Map[K, V] | Null = m match { case null => null case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying - case _ => new MapWrapper(m) + case _ => new MapWrapper(m.nn) } /** @@ -252,9 +252,9 @@ trait AsJavaConverters { * @param m The Scala `concurrent.Map` to be converted. * @return A Java `ConcurrentMap` view of the argument. */ - def asJava[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = m match { + def asJava[K, V](m: concurrent.Map[K, V] | Null): juc.ConcurrentMap[K, V] | Null = m match { case null => null case wrapper: JConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlying - case _ => new ConcurrentMapWrapper(m) + case _ => new ConcurrentMapWrapper(m.nn) } } diff --git a/library/src/scala/collection/convert/AsJavaExtensions.scala b/library/src/scala/collection/convert/AsJavaExtensions.scala index d356a419325d..6021ec905f70 100644 --- a/library/src/scala/collection/convert/AsJavaExtensions.scala +++ b/library/src/scala/collection/convert/AsJavaExtensions.scala @@ -21,88 +21,88 @@ import java.{lang => jl, util => ju} trait AsJavaExtensions { import scala.jdk.javaapi.{CollectionConverters => conv} - implicit class IteratorHasAsJava[A](i: Iterator[A]) { + implicit class IteratorHasAsJava[A](i: Iterator[A] | Null) { /** Converts a Scala `Iterator` to a Java `Iterator`, see * [[AsJavaConverters.asJava[A](i:Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: ju.Iterator[A] = conv.asJava(i) + def asJava: ju.Iterator[A] | Null = conv.asJava(i) /** Converts a Scala `Iterator` to a Java `Enumeration`, see * [[AsJavaConverters.asJavaEnumeration `scala.jdk.javaapi.CollectionConverters.asJavaEnumeration`]]. */ - def asJavaEnumeration: ju.Enumeration[A] = conv.asJavaEnumeration(i) + def asJavaEnumeration: ju.Enumeration[A] | Null = conv.asJavaEnumeration(i) } - implicit class IterableHasAsJava[A](i: Iterable[A]) { + implicit class IterableHasAsJava[A](i: Iterable[A] | Null) { /** Converts a Scala `Iterable` to a Java `Iterable`, see * [[AsJavaConverters.asJava[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: jl.Iterable[A] = conv.asJava(i) + def asJava: jl.Iterable[A] | Null = conv.asJava(i) /** Converts a Scala `Iterator` to a Java `Collection`, see * [[AsJavaConverters.asJavaCollection `scala.jdk.javaapi.CollectionConverters.asJavaCollection`]]. */ - def asJavaCollection: ju.Collection[A] = conv.asJavaCollection(i) + def asJavaCollection: ju.Collection[A] | Null = conv.asJavaCollection(i) } - implicit class BufferHasAsJava[A](b: mutable.Buffer[A]) { + implicit class BufferHasAsJava[A](b: mutable.Buffer[A] | Null) { /** Converts a Scala `Buffer` to a Java `List`, see * [[AsJavaConverters.asJava[A](b:scala\.collection\.mutable\.Buffer[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: ju.List[A] = conv.asJava(b) + def asJava: ju.List[A] | Null = conv.asJava(b) } - implicit class MutableSeqHasAsJava[A](s: mutable.Seq[A]) { + implicit class MutableSeqHasAsJava[A](s: mutable.Seq[A] | Null) { /** Converts a Scala `Seq` to a Java `List`, see * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: ju.List[A] = conv.asJava(s) + def asJava: ju.List[A] | Null = conv.asJava(s) } - implicit class SeqHasAsJava[A](s: Seq[A]) { + implicit class SeqHasAsJava[A](s: Seq[A] | Null) { /** Converts a Scala `Seq` to a Java `List`, see * [[AsJavaConverters.asJava[A](s:scala\.collection\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: ju.List[A] = conv.asJava(s) + def asJava: ju.List[A] | Null = conv.asJava(s) } - implicit class MutableSetHasAsJava[A](s: mutable.Set[A]) { + implicit class MutableSetHasAsJava[A](s: mutable.Set[A] | Null) { /** Converts a Scala `mutable.Set` to a Java `Set`, see * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: ju.Set[A] = conv.asJava(s) + def asJava: ju.Set[A] | Null = conv.asJava(s) } - implicit class SetHasAsJava[A](s: Set[A]) { + implicit class SetHasAsJava[A](s: Set[A] | Null) { /** Converts a Scala `Set` to a Java `Set`, see * [[AsJavaConverters.asJava[A](s:scala\.collection\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: ju.Set[A] = conv.asJava(s) + def asJava: ju.Set[A] | Null = conv.asJava(s) } - implicit class MutableMapHasAsJava[K, V](m: mutable.Map[K, V]) { + implicit class MutableMapHasAsJava[K, V](m: mutable.Map[K, V] | Null) { /** Converts a Scala `mutable.Map` to a Java `Map`, see * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.mutable\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: ju.Map[K, V] = conv.asJava(m) + def asJava: ju.Map[K, V] | Null = conv.asJava(m) /** Converts a Scala `mutable.Map` to a Java `Map`, see * [[AsJavaConverters.asJavaDictionary `scala.jdk.javaapi.CollectionConverters.asJavaDictionary`]]. */ - def asJavaDictionary: ju.Dictionary[K, V] = conv.asJavaDictionary(m) + def asJavaDictionary: ju.Dictionary[K, V] | Null = conv.asJavaDictionary(m) } - implicit class MapHasAsJava[K, V](m: Map[K, V]) { + implicit class MapHasAsJava[K, V](m: Map[K, V] | Null) { /** Converts a Scala `Map` to a Java `Map`, see * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: ju.Map[K, V] = conv.asJava(m) + def asJava: ju.Map[K, V] | Null = conv.asJava(m) } - implicit class ConcurrentMapHasAsJava[K, V](m: concurrent.Map[K, V]) { + implicit class ConcurrentMapHasAsJava[K, V](m: concurrent.Map[K, V] | Null) { /** Converts a Scala `concurrent.Map` to a Java `ConcurrentMap`, see * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.concurrent\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. */ - def asJava: juc.ConcurrentMap[K, V] = conv.asJava(m) + def asJava: juc.ConcurrentMap[K, V] | Null = conv.asJava(m) } } diff --git a/library/src/scala/collection/convert/AsScalaConverters.scala b/library/src/scala/collection/convert/AsScalaConverters.scala index e1055c60b36e..bfcd48497086 100644 --- a/library/src/scala/collection/convert/AsScalaConverters.scala +++ b/library/src/scala/collection/convert/AsScalaConverters.scala @@ -37,10 +37,10 @@ trait AsScalaConverters { * @param i The Java `Iterator` to be converted. * @return A Scala `Iterator` view of the argument. */ - def asScala[A](i: ju.Iterator[A]): Iterator[A] = i match { + def asScala[A](i: ju.Iterator[A] | Null): Iterator[A] | Null = i match { case null => null case wrapper: IteratorWrapper[A @uc] => wrapper.underlying - case _ => new JIteratorWrapper(i) + case _ => new JIteratorWrapper(i.nn) } /** @@ -55,10 +55,10 @@ trait AsScalaConverters { * @param e The Java `Enumeration` to be converted. * @return A Scala `Iterator` view of the argument. */ - def asScala[A](e: ju.Enumeration[A]): Iterator[A] = e match { + def asScala[A](e: ju.Enumeration[A] | Null): Iterator[A] | Null = e match { case null => null case wrapper: IteratorWrapper[A @uc] => wrapper.underlying - case _ => new JEnumerationWrapper(e) + case _ => new JEnumerationWrapper(e.nn) } /** @@ -73,10 +73,10 @@ trait AsScalaConverters { * @param i The Java `Iterable` to be converted. * @return A Scala `Iterable` view of the argument. */ - def asScala[A](i: jl.Iterable[A]): Iterable[A] = i match { + def asScala[A](i: jl.Iterable[A] | Null): Iterable[A] | Null = i match { case null => null case wrapper: IterableWrapper[A @uc] => wrapper.underlying - case _ => new JIterableWrapper(i) + case _ => new JIterableWrapper(i.nn) } /** @@ -88,10 +88,10 @@ trait AsScalaConverters { * @param c The Java `Collection` to be converted. * @return A Scala `Iterable` view of the argument. */ - def asScala[A](c: ju.Collection[A]): Iterable[A] = c match { + def asScala[A](c: ju.Collection[A] | Null): Iterable[A] | Null = c match { case null => null case wrapper: IterableWrapper[A @uc] => wrapper.underlying - case _ => new JCollectionWrapper(c) + case _ => new JCollectionWrapper(c.nn) } /** @@ -106,10 +106,10 @@ trait AsScalaConverters { * @param l The Java `List` to be converted. * @return A Scala mutable `Buffer` view of the argument. */ - def asScala[A](l: ju.List[A]): mutable.Buffer[A] = l match { + def asScala[A](l: ju.List[A] | Null): mutable.Buffer[A] | Null = l match { case null => null case wrapper: MutableBufferWrapper[A @uc] => wrapper.underlying - case _ => new JListWrapper(l) + case _ => new JListWrapper(l.nn) } /** @@ -124,10 +124,10 @@ trait AsScalaConverters { * @param s The Java `Set` to be converted. * @return A Scala mutable `Set` view of the argument. */ - def asScala[A](s: ju.Set[A]): mutable.Set[A] = s match { + def asScala[A](s: ju.Set[A] | Null): mutable.Set[A] | Null = s match { case null => null case wrapper: MutableSetWrapper[A @uc] => wrapper.underlying - case _ => new JSetWrapper(s) + case _ => new JSetWrapper(s.nn) } /** @@ -147,10 +147,10 @@ trait AsScalaConverters { * @param m The Java `Map` to be converted. * @return A Scala mutable `Map` view of the argument. */ - def asScala[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = m match { + def asScala[K, V](m: ju.Map[K, V] | Null): mutable.Map[K, V] | Null = m match { case null => null case wrapper: MutableMapWrapper[K @uc, V @uc] => wrapper.underlying - case _ => new JMapWrapper(m) + case _ => new JMapWrapper(m.nn) } /** @@ -166,10 +166,10 @@ trait AsScalaConverters { * @param m The Java `ConcurrentMap` to be converted. * @return A Scala mutable `ConcurrentMap` view of the argument. */ - def asScala[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = m match { + def asScala[K, V](m: juc.ConcurrentMap[K, V] | Null): concurrent.Map[K, V] | Null = m match { case null => null case wrapper: ConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlyingConcurrentMap - case _ => new JConcurrentMapWrapper(m) + case _ => new JConcurrentMapWrapper(m.nn) } /** @@ -184,10 +184,10 @@ trait AsScalaConverters { * @param d The Java `Dictionary` to be converted. * @return A Scala mutable `Map` view of the argument. */ - def asScala[K, V](d: ju.Dictionary[K, V]): mutable.Map[K, V] = d match { + def asScala[K, V](d: ju.Dictionary[K, V] | Null): mutable.Map[K, V] | Null = d match { case null => null case wrapper: DictionaryWrapper[K @uc, V @uc] => wrapper.underlying - case _ => new JDictionaryWrapper(d) + case _ => new JDictionaryWrapper(d.nn) } /** @@ -200,8 +200,8 @@ trait AsScalaConverters { * @param p The Java `Properties` to be converted. * @return A Scala mutable `Map[String, String]` view of the argument. */ - def asScala(p: ju.Properties): mutable.Map[String, String] = p match { + def asScala(p: ju.Properties | Null): mutable.Map[String, String] | Null = p match { case null => null - case _ => new JPropertiesWrapper(p) + case _ => new JPropertiesWrapper(p.nn) } } diff --git a/library/src/scala/collection/convert/AsScalaExtensions.scala b/library/src/scala/collection/convert/AsScalaExtensions.scala index ef08f4505fe1..b0002bf0aaea 100644 --- a/library/src/scala/collection/convert/AsScalaExtensions.scala +++ b/library/src/scala/collection/convert/AsScalaExtensions.scala @@ -21,73 +21,73 @@ import java.{lang => jl, util => ju} trait AsScalaExtensions { import scala.jdk.javaapi.{CollectionConverters => conv} - implicit class IteratorHasAsScala[A](i: ju.Iterator[A]) { + implicit class IteratorHasAsScala[A](i: ju.Iterator[A] | Null) { /** Converts a Java `Iterator` to a Scala `Iterator`, see * [[AsScalaConverters.asScala[A](i:java\.util\.Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: Iterator[A] = conv.asScala(i) + def asScala: Iterator[A] | Null = conv.asScala(i) } - implicit class EnumerationHasAsScala[A](e: ju.Enumeration[A]) { + implicit class EnumerationHasAsScala[A](e: ju.Enumeration[A] | Null) { /** Converts a Java `Enumeration` to a Scala `Iterator`, see * [[AsScalaConverters.asScala[A](e:java\.util\.Enumeration[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: Iterator[A] = conv.asScala(e) + def asScala: Iterator[A] | Null = conv.asScala(e) } - implicit class IterableHasAsScala[A](i: jl.Iterable[A]) { + implicit class IterableHasAsScala[A](i: jl.Iterable[A] | Null) { /** Converts a Java `Iterable` to a Scala `Iterable`, see * [[AsScalaConverters.asScala[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: Iterable[A] = conv.asScala(i) + def asScala: Iterable[A] | Null = conv.asScala(i) } - implicit class CollectionHasAsScala[A](c: ju.Collection[A]) { + implicit class CollectionHasAsScala[A](c: ju.Collection[A] | Null) { /** Converts a Java `Collection` to a Scala `Iterable`, see * [[AsScalaConverters.asScala[A](c:java\.util\.Collection[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: Iterable[A] = conv.asScala(c) + def asScala: Iterable[A] | Null = conv.asScala(c) } - implicit class ListHasAsScala[A](l: ju.List[A]) { + implicit class ListHasAsScala[A](l: ju.List[A] | Null) { /** Converts a Java `List` to a Scala `Buffer`, see * [[AsScalaConverters.asScala[A](l:java\.util\.List[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: mutable.Buffer[A] = conv.asScala(l) + def asScala: mutable.Buffer[A] | Null = conv.asScala(l) } - implicit class SetHasAsScala[A](s: ju.Set[A]) { + implicit class SetHasAsScala[A](s: ju.Set[A] | Null) { /** Converts a Java `Set` to a Scala `Set`, see * [[AsScalaConverters.asScala[A](s:java\.util\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: mutable.Set[A] = conv.asScala(s) + def asScala: mutable.Set[A] | Null = conv.asScala(s) } - implicit class MapHasAsScala[K, V](m: ju.Map[K, V]) { + implicit class MapHasAsScala[K, V](m: ju.Map[K, V] | Null) { /** Converts a Java `Map` to a Scala `Map`, see * [[AsScalaConverters.asScala[A,B](m:java\.util\.Map[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: mutable.Map[K, V] = conv.asScala(m) + def asScala: mutable.Map[K, V] | Null = conv.asScala(m) } - implicit class ConcurrentMapHasAsScala[K, V](m: juc.ConcurrentMap[K, V]) { + implicit class ConcurrentMapHasAsScala[K, V](m: juc.ConcurrentMap[K, V] | Null) { /** Converts a Java `ConcurrentMap` to a Scala `concurrent.Map`, see * [[AsScalaConverters.asScala[A,B](m:java\.util\.concurrent\.ConcurrentMap[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: concurrent.Map[K, V] = conv.asScala(m) + def asScala: concurrent.Map[K, V] | Null = conv.asScala(m) } - implicit class DictionaryHasAsScala[K, V](d: ju.Dictionary[K, V]) { + implicit class DictionaryHasAsScala[K, V](d: ju.Dictionary[K, V] | Null) { /** Converts a Java `Dictionary` to a Scala `Map`, see * [[AsScalaConverters.asScala[A,B](d:java\.util\.Dictionary[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: mutable.Map[K, V] = conv.asScala(d) + def asScala: mutable.Map[K, V] | Null = conv.asScala(d) } - implicit class PropertiesHasAsScala(i: ju.Properties) { + implicit class PropertiesHasAsScala(i: ju.Properties | Null) { /** Converts a Java `Properties` to a Scala `Map`, see * [[AsScalaConverters.asScala(p:java\.util\.Properties)* `scala.jdk.javaapi.CollectionConverters.asScala`]]. */ - def asScala: mutable.Map[String, String] = conv.asScala(i) + def asScala: mutable.Map[String, String] | Null = conv.asScala(i) } } diff --git a/library/src/scala/collection/convert/ImplicitConversions.scala b/library/src/scala/collection/convert/ImplicitConversions.scala index 6492c60d6d9e..be2e7c30589d 100644 --- a/library/src/scala/collection/convert/ImplicitConversions.scala +++ b/library/src/scala/collection/convert/ImplicitConversions.scala @@ -26,52 +26,52 @@ trait ToScalaImplicits { /** Implicitly converts a Java `Iterator` to a Scala `Iterator`. * @see [[JavaConverters.asScalaIterator]] */ - implicit def `iterator asScala`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it) + implicit def `iterator asScala`[A](it: ju.Iterator[A] | Null): Iterator[A] | Null = asScalaIterator(it) /** Implicitly converts a Java `Enumeration` to a Scala `Iterator`. * @see [[JavaConverters.enumerationAsScalaIterator]] */ - implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i) + implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A] | Null): Iterator[A] | Null = enumerationAsScalaIterator(i) /** Implicitly converts a Java `Iterable` to a Scala `Iterable`. * @see [[JavaConverters.iterableAsScalaIterable]] */ - implicit def `iterable AsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i) + implicit def `iterable AsScalaIterable`[A](i: jl.Iterable[A] | Null): Iterable[A] | Null = iterableAsScalaIterable(i) /** Implicitly converts a Java `Collection` to an Scala `Iterable`. * @see [[JavaConverters.collectionAsScalaIterable]] */ - implicit def `collection AsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i) + implicit def `collection AsScalaIterable`[A](i: ju.Collection[A] | Null): Iterable[A] | Null = collectionAsScalaIterable(i) /** Implicitly converts a Java `List` to a Scala mutable `Buffer`. * @see [[JavaConverters.asScalaBuffer]] */ - implicit def `list asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l) + implicit def `list asScalaBuffer`[A](l: ju.List[A] | Null): mutable.Buffer[A] | Null = asScalaBuffer(l) /** Implicitly converts a Java `Set` to a Scala mutable `Set`. * @see [[JavaConverters.asScalaSet]] */ - implicit def `set asScala`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s) + implicit def `set asScala`[A](s: ju.Set[A] | Null): mutable.Set[A] | Null = asScalaSet(s) /** Implicitly converts a Java `Map` to a Scala mutable `Map`. * @see [[JavaConverters.mapAsScalaMap]] */ - implicit def `map AsScala`[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = mapAsScalaMap(m) + implicit def `map AsScala`[K, V](m: ju.Map[K, V] | Null): mutable.Map[K, V] | Null = mapAsScalaMap(m) /** Implicitly converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. * @see [[JavaConverters.mapAsScalaConcurrentMap]] */ - implicit def `map AsScalaConcurrentMap`[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = mapAsScalaConcurrentMap(m) + implicit def `map AsScalaConcurrentMap`[K, V](m: juc.ConcurrentMap[K, V] | Null): concurrent.Map[K, V] | Null = mapAsScalaConcurrentMap(m) /** Implicitly converts a Java `Dictionary` to a Scala mutable `Map`. * @see [[JavaConverters.dictionaryAsScalaMap]] */ - implicit def `dictionary AsScalaMap`[K, V](p: ju.Dictionary[K, V]): mutable.Map[K, V] = dictionaryAsScalaMap(p) + implicit def `dictionary AsScalaMap`[K, V](p: ju.Dictionary[K, V] | Null): mutable.Map[K, V] | Null = dictionaryAsScalaMap(p) /** Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. * @see [[JavaConverters.propertiesAsScalaMap]] */ - implicit def `properties AsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) + implicit def `properties AsScalaMap`(p: ju.Properties | Null): mutable.Map[String, String] | Null = propertiesAsScalaMap(p) } /** Defines implicit conversions from Scala to Java collections. */ @@ -80,67 +80,67 @@ trait ToJavaImplicits { /** Implicitly converts a Scala `Iterator` to a Java `Iterator`. * @see [[JavaConverters.asJavaIterator]] */ - implicit def `iterator asJava`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it) + implicit def `iterator asJava`[A](it: Iterator[A] | Null): ju.Iterator[A] | Null = asJavaIterator(it) /** Implicitly converts a Scala `Iterator` to a Java `Enumeration`. * @see [[JavaConverters.asJavaEnumeration]] */ - implicit def `enumeration asJava`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it) + implicit def `enumeration asJava`[A](it: Iterator[A] | Null): ju.Enumeration[A] | Null = asJavaEnumeration(it) /** Implicitly converts a Scala `Iterable` to a Java `Iterable`. * @see [[JavaConverters.asJavaIterable]] */ - implicit def `iterable asJava`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i) + implicit def `iterable asJava`[A](i: Iterable[A] | Null): jl.Iterable[A] | Null = asJavaIterable(i) /** Implicitly converts a Scala `Iterable` to an immutable Java `Collection`. * @see [[JavaConverters.asJavaCollection]] */ - implicit def `collection asJava`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it) + implicit def `collection asJava`[A](it: Iterable[A] | Null): ju.Collection[A] | Null = asJavaCollection(it) /** Implicitly converts a Scala mutable `Buffer` to a Java `List`. * @see [[JavaConverters.bufferAsJavaList]] */ - implicit def `buffer AsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b) + implicit def `buffer AsJavaList`[A](b: mutable.Buffer[A] | Null): ju.List[A] | Null = bufferAsJavaList(b) /** Implicitly converts a Scala mutable `Seq` to a Java `List`. * @see [[JavaConverters.mutableSeqAsJavaList]] */ - implicit def `mutableSeq AsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq) + implicit def `mutableSeq AsJavaList`[A](seq: mutable.Seq[A] | Null): ju.List[A] | Null = mutableSeqAsJavaList(seq) /** Implicitly converts a Scala `Seq` to a Java `List`. * @see [[JavaConverters.seqAsJavaList]] */ - implicit def `seq AsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq) + implicit def `seq AsJavaList`[A](seq: Seq[A] | Null): ju.List[A] | Null = seqAsJavaList(seq) /** Implicitly converts a Scala mutable `Set` to a Java `Set`. * @see [[JavaConverters.mutableSetAsJavaSet]] */ - implicit def `mutableSet AsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s) + implicit def `mutableSet AsJavaSet`[A](s: mutable.Set[A] | Null): ju.Set[A] | Null = mutableSetAsJavaSet(s) /** Implicitly converts a Scala `Set` to a Java `Set`. * @see [[JavaConverters.setAsJavaSet]] */ - implicit def `set AsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s) + implicit def `set AsJavaSet`[A](s: Set[A] | Null): ju.Set[A] | Null = setAsJavaSet(s) /** Implicitly converts a Scala mutable `Map` to a Java `Map`. * @see [[JavaConverters.mutableMapAsJavaMap]] */ - implicit def `mutableMap AsJavaMap`[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = mutableMapAsJavaMap(m) + implicit def `mutableMap AsJavaMap`[K, V](m: mutable.Map[K, V] | Null): ju.Map[K, V] | Null = mutableMapAsJavaMap(m) /** Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. * @see [[JavaConverters.asJavaDictionary]] */ - implicit def `dictionary asJava`[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = asJavaDictionary(m) + implicit def `dictionary asJava`[K, V](m: mutable.Map[K, V] | Null): ju.Dictionary[K, V] | Null = asJavaDictionary(m) /** Implicitly converts a Scala `Map` to a Java `Map`. * @see [[JavaConverters.mapAsJavaMap]] */ - implicit def `map AsJavaMap`[K, V](m: Map[K, V]): ju.Map[K, V] = mapAsJavaMap(m) + implicit def `map AsJavaMap`[K, V](m: Map[K, V] | Null): ju.Map[K, V] | Null = mapAsJavaMap(m) /** Implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. * @see [[JavaConverters.mapAsJavaConcurrentMap]] */ - implicit def `map AsJavaConcurrentMap`[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = mapAsJavaConcurrentMap(m) + implicit def `map AsJavaConcurrentMap`[K, V](m: concurrent.Map[K, V] | Null): juc.ConcurrentMap[K, V] | Null = mapAsJavaConcurrentMap(m) } /** diff --git a/library/src/scala/collection/convert/JavaCollectionWrappers.scala b/library/src/scala/collection/convert/JavaCollectionWrappers.scala index f79adff98e23..62229a36a357 100644 --- a/library/src/scala/collection/convert/JavaCollectionWrappers.scala +++ b/library/src/scala/collection/convert/JavaCollectionWrappers.scala @@ -84,7 +84,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { extends AbstractIterable[A] with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] with Serializable { - def iterator = underlying.iterator.asScala + def iterator = underlying.iterator.asScala.nn override def iterableFactory: mutable.ArrayBuffer.type = mutable.ArrayBuffer override def isEmpty: Boolean = !underlying.iterator().hasNext override def equals(other: Any): Boolean = other match { @@ -99,7 +99,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { extends AbstractIterable[A] with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] with Serializable { - def iterator = underlying.iterator.asScala + def iterator = underlying.iterator.asScala.nn override def size = underlying.size override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize override def isEmpty = underlying.isEmpty @@ -144,7 +144,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { def length = underlying.size override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize override def isEmpty = underlying.isEmpty - override def iterator: Iterator[A] = underlying.iterator.asScala + override def iterator: Iterator[A] = underlying.iterator.asScala.nn def apply(i: Int) = underlying.get(i) def update(i: Int, elem: A) = underlying.set(i, elem) def prepend(elem: A) = { underlying.subList(0, 0) add elem; this } @@ -221,7 +221,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { override def size: Int = underlying.size override def isEmpty: Boolean = underlying.isEmpty override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize - def iterator: Iterator[A] = underlying.iterator.asScala + def iterator: Iterator[A] = underlying.iterator.asScala.nn def contains(elem: A): Boolean = underlying.contains(elem) @@ -521,8 +521,8 @@ private[collection] object JavaCollectionWrappers extends Serializable { class DictionaryWrapper[K, V](val underlying: mutable.Map[K, V]) extends ju.Dictionary[K, V] with Serializable { def size: Int = underlying.size def isEmpty: Boolean = underlying.isEmpty - def keys: ju.Enumeration[K] = underlying.keysIterator.asJavaEnumeration - def elements: ju.Enumeration[V] = underlying.valuesIterator.asJavaEnumeration + def keys: ju.Enumeration[K] = underlying.keysIterator.asJavaEnumeration.nn + def elements: ju.Enumeration[V] = underlying.valuesIterator.asJavaEnumeration.nn def get(key: AnyRef) = try { underlying get key.asInstanceOf[K] match { case None => null.asInstanceOf[V] @@ -568,7 +568,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { override def update(k: K, v: V): Unit = { underlying.put(k, v) } override def remove(k: K): Option[V] = Option(underlying remove k) - def iterator = underlying.keys.asScala map (k => (k, underlying get k)) + def iterator = underlying.keys.asScala.nn map (k => (k, underlying get k)) override def clear() = iterator.foreach(entry => underlying.remove(entry._1)) @@ -633,3 +633,4 @@ private[collection] object JavaCollectionWrappers extends Serializable { /** Thrown when certain Map operations attempt to put a null value. */ private val PutNull = new ControlThrowable {} } + diff --git a/library/src/scala/collection/convert/StreamExtensions.scala b/library/src/scala/collection/convert/StreamExtensions.scala index 90b8bcb9031d..75b4ead2aedc 100644 --- a/library/src/scala/collection/convert/StreamExtensions.scala +++ b/library/src/scala/collection/convert/StreamExtensions.scala @@ -300,7 +300,7 @@ trait StreamExtensions { if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Int], AnyAccumulator.unboxedIntAdder, AnyAccumulator.merger[Int]).asInstanceOf[C1] else if (info.companion == IntAccumulator) intAcc.asInstanceOf[C1] else if (stream.isParallel) intAcc.to(factory) - else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Int]].asScala) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Int]].asScala.nn) } } @@ -327,7 +327,7 @@ trait StreamExtensions { if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Long], AnyAccumulator.unboxedLongAdder, AnyAccumulator.merger[Long]).asInstanceOf[C1] else if (info.companion == LongAccumulator) longAcc.asInstanceOf[C1] else if (stream.isParallel) longAcc.to(factory) - else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Long]].asScala) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Long]].asScala.nn) } } @@ -354,7 +354,7 @@ trait StreamExtensions { if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Double], AnyAccumulator.unboxedDoubleAdder, AnyAccumulator.merger[Double]).asInstanceOf[C1] else if (info.companion == DoubleAccumulator) doubleAcc.asInstanceOf[C1] else if (stream.isParallel) doubleAcc.to(factory) - else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Double]].asScala) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Double]].asScala.nn) } } } @@ -446,31 +446,31 @@ object StreamExtensions { * `noAccumulatorFactoryInfo` is passed. */ trait AccumulatorFactoryInfo[A, C] { - val companion: AnyRef + val companion: AnyRef | Null } trait LowPriorityAccumulatorFactoryInfo { implicit def noAccumulatorFactoryInfo[A, C]: AccumulatorFactoryInfo[A, C] = noAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, C]] private val noAccumulatorFactoryInfoPrototype: AccumulatorFactoryInfo[AnyRef, AnyRef] = new AccumulatorFactoryInfo[AnyRef, AnyRef] { - val companion: AnyRef = null + val companion: AnyRef | Null = null } } object AccumulatorFactoryInfo extends LowPriorityAccumulatorFactoryInfo { implicit def anyAccumulatorFactoryInfo[A]: AccumulatorFactoryInfo[A, AnyAccumulator[A]] = anyAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, AnyAccumulator[A]]] private object anyAccumulatorFactoryInfoPrototype extends AccumulatorFactoryInfo[AnyRef, AnyAccumulator[AnyRef]] { - val companion: AnyRef = AnyAccumulator + val companion: AnyRef | Null = AnyAccumulator } implicit val intAccumulatorFactoryInfo: AccumulatorFactoryInfo[Int, IntAccumulator] = new AccumulatorFactoryInfo[Int, IntAccumulator] { - val companion: AnyRef = IntAccumulator + val companion: AnyRef | Null = IntAccumulator } implicit val longAccumulatorFactoryInfo: AccumulatorFactoryInfo[Long, LongAccumulator] = new AccumulatorFactoryInfo[Long, LongAccumulator] { - val companion: AnyRef = LongAccumulator + val companion: AnyRef | Null = LongAccumulator } implicit val doubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[Double, DoubleAccumulator] = new AccumulatorFactoryInfo[Double, DoubleAccumulator] { - val companion: AnyRef = DoubleAccumulator + val companion: AnyRef | Null = DoubleAccumulator } implicit val jIntegerAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Integer, IntAccumulator] = intAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Integer, IntAccumulator]] diff --git a/library/src/scala/collection/convert/impl/BinaryTreeStepper.scala b/library/src/scala/collection/convert/impl/BinaryTreeStepper.scala index d15977eced17..8f9af217ca54 100644 --- a/library/src/scala/collection/convert/impl/BinaryTreeStepper.scala +++ b/library/src/scala/collection/convert/impl/BinaryTreeStepper.scala @@ -141,7 +141,7 @@ extends EfficientSplit { } -private[collection] final class AnyBinaryTreeStepper[A, T >: Null <: AnyRef]( +private[collection] final class AnyBinaryTreeStepper[A, T >: Null <: AnyRef | Null]( _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => A ) extends BinaryTreeStepperBase[A, T, AnyStepper[A], AnyBinaryTreeStepper[A, T]](_maxLength, _myCurrent, _stack, _index, _left, _right) @@ -159,7 +159,7 @@ with AnyStepper[A] { new AnyBinaryTreeStepper[A, T](maxL, myC, stk, ix, left, right, extract) } private[collection] object AnyBinaryTreeStepper { - def from[A, T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => A): AnyBinaryTreeStepper[A, T] = { + def from[A, T >: Null <: AnyRef | Null](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => A): AnyBinaryTreeStepper[A, T] = { val ans = new AnyBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) ans.initialize(root, maxLength) ans @@ -167,7 +167,7 @@ private[collection] object AnyBinaryTreeStepper { } -private[collection] final class DoubleBinaryTreeStepper[T >: Null <: AnyRef]( +private[collection] final class DoubleBinaryTreeStepper[T >: Null <: AnyRef | Null]( _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Double ) extends BinaryTreeStepperBase[Double, T, DoubleStepper, DoubleBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) @@ -185,7 +185,7 @@ with DoubleStepper { new DoubleBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) } private [collection] object DoubleBinaryTreeStepper { - def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Double): DoubleBinaryTreeStepper[T] = { + def from[T >: Null <: AnyRef | Null](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Double): DoubleBinaryTreeStepper[T] = { val ans = new DoubleBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) ans.initialize(root, maxLength) ans @@ -193,7 +193,7 @@ private [collection] object DoubleBinaryTreeStepper { } -private[collection] final class IntBinaryTreeStepper[T >: Null <: AnyRef]( +private[collection] final class IntBinaryTreeStepper[T >: Null <: AnyRef | Null]( _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Int ) extends BinaryTreeStepperBase[Int, T, IntStepper, IntBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) @@ -211,7 +211,7 @@ with IntStepper { new IntBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) } private [collection] object IntBinaryTreeStepper { - def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Int): IntBinaryTreeStepper[T] = { + def from[T >: Null <: AnyRef | Null](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Int): IntBinaryTreeStepper[T] = { val ans = new IntBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) ans.initialize(root, maxLength) ans @@ -220,7 +220,7 @@ private [collection] object IntBinaryTreeStepper { -private[collection] final class LongBinaryTreeStepper[T >: Null <: AnyRef]( +private[collection] final class LongBinaryTreeStepper[T >: Null <: AnyRef | Null]( _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Long ) extends BinaryTreeStepperBase[Long, T, LongStepper, LongBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) @@ -238,7 +238,7 @@ with LongStepper { new LongBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) } private [collection] object LongBinaryTreeStepper { - def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Long): LongBinaryTreeStepper[T] = { + def from[T >: Null <: AnyRef | Null](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Long): LongBinaryTreeStepper[T] = { val ans = new LongBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) ans.initialize(root, maxLength) ans diff --git a/library/src/scala/collection/convert/impl/BitSetStepper.scala b/library/src/scala/collection/convert/impl/BitSetStepper.scala index 905afaaf4a0d..d9fb65645e4e 100644 --- a/library/src/scala/collection/convert/impl/BitSetStepper.scala +++ b/library/src/scala/collection/convert/impl/BitSetStepper.scala @@ -18,7 +18,7 @@ import scala.collection.{BitSetOps, IntStepper, Stepper} private[collection] final class BitSetStepper( - private var underlying: BitSetOps[_], + private var underlying: BitSetOps[_] | Null, private var cache0: Long, private var cache1: Long, _i0: Int, _iN: Int, private var cacheIndex: Int @@ -47,15 +47,15 @@ with IntStepper { findNext() } } - else if (underlying eq null) { + else if (underlying == null) { i0 = iN found = false found } else { cacheIndex = ix - cache0 = underlying.word(cacheIndex) - cache1 = if ((iN - 1) >> LogWL == ix) -1L else underlying.word(cacheIndex+1) + cache0 = underlying.nn.word(cacheIndex) + cache1 = if ((iN - 1) >> LogWL == ix) -1L else underlying.nn.word(cacheIndex+1) findNext() } } @@ -78,8 +78,8 @@ with IntStepper { // Advance old stepper to breakpoint val ixOld0 = half >> LogWL if (ixOld0 > cacheIndex + 1) { - cache0 = underlying.word(ixOld0) - cache1 = if (((iN - 1) >> LogWL) == ixOld0) -1L else underlying.word(ixOld0+1) + cache0 = underlying.nn.word(ixOld0) + cache1 = if (((iN - 1) >> LogWL) == ixOld0) -1L else underlying.nn.word(ixOld0+1) cacheIndex = ixOld0 i0 = half found = false diff --git a/library/src/scala/collection/convert/impl/IteratorStepper.scala b/library/src/scala/collection/convert/impl/IteratorStepper.scala index 8fac29cf96ae..a92ac83a8b64 100644 --- a/library/src/scala/collection/convert/impl/IteratorStepper.scala +++ b/library/src/scala/collection/convert/impl/IteratorStepper.scala @@ -18,21 +18,21 @@ import java.util.Spliterator import scala.collection.{AnyStepper, DoubleStepper, IntStepper, LongStepper, Stepper} import scala.jdk.{AnyAccumulator, DoubleAccumulator, IntAccumulator, LongAccumulator} -private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A]) - extends IteratorStepperBase[A, AnyStepper[A], AnyIteratorStepper[A]](_underlying) +private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A] | Null) + extends IteratorStepperBase[A, AnyStepper[A], AnyIteratorStepper[A]](_underlying.nn) with AnyStepper[A] { - protected def semiclone(): AnyIteratorStepper[A] = new AnyIteratorStepper(null) + protected def semiclone(): AnyIteratorStepper[A] = new AnyIteratorStepper[A](null) - def nextStep(): A = if (proxied ne null) proxied.nextStep() else underlying.next() + def nextStep(): A = if (proxied ne null) proxied.nn.nextStep() else underlying.next() - def trySplit(): AnyStepper[A] = if (proxied ne null) proxied.trySplit() else { + def trySplit(): AnyStepper[A] | Null = if (proxied ne null) proxied.nn.trySplit() else { val acc = new AnyAccumulator[A] var i = 0 val n = nextChunkSize & 0xFFFFFFFC while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } if (i < n || !underlying.hasNext) { proxied = acc.stepper - proxied.trySplit() + proxied.nn.trySplit() } else { val ans = semiclone() @@ -43,21 +43,21 @@ private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A]) } } -private[collection] class DoubleIteratorStepper(_underlying: Iterator[Double]) - extends IteratorStepperBase[Double, DoubleStepper, DoubleIteratorStepper](_underlying) +private[collection] class DoubleIteratorStepper(_underlying: Iterator[Double] | Null) + extends IteratorStepperBase[Double, DoubleStepper, DoubleIteratorStepper](_underlying.nn) with DoubleStepper { protected def semiclone(): DoubleIteratorStepper = new DoubleIteratorStepper(null) - def nextStep(): Double = if (proxied ne null) proxied.nextStep() else underlying.next() + def nextStep(): Double = if (proxied ne null) proxied.nn.nextStep() else underlying.next() - def trySplit(): DoubleStepper = if (proxied ne null) proxied.trySplit() else { + def trySplit(): DoubleStepper | Null = if (proxied ne null) proxied.nn.trySplit() else { val acc = new DoubleAccumulator var i = 0 val n = nextChunkSize & 0xFFFFFFFC while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } if (i < n || !underlying.hasNext) { proxied = acc.stepper - proxied.trySplit() + proxied.nn.trySplit() } else { val ans = semiclone() @@ -68,21 +68,21 @@ private[collection] class DoubleIteratorStepper(_underlying: Iterator[Double]) } } -private[collection] class IntIteratorStepper(_underlying: Iterator[Int]) - extends IteratorStepperBase[Int, IntStepper, IntIteratorStepper](_underlying) +private[collection] class IntIteratorStepper(_underlying: Iterator[Int] | Null) + extends IteratorStepperBase[Int, IntStepper, IntIteratorStepper](_underlying.nn) with IntStepper { protected def semiclone(): IntIteratorStepper = new IntIteratorStepper(null) - def nextStep(): Int = if (proxied ne null) proxied.nextStep() else underlying.next() + def nextStep(): Int = if (proxied ne null) proxied.nn.nextStep() else underlying.next() - def trySplit(): IntStepper = if (proxied ne null) proxied.trySplit() else { + def trySplit(): IntStepper | Null = if (proxied ne null) proxied.nn.trySplit() else { val acc = new IntAccumulator var i = 0 val n = nextChunkSize & 0xFFFFFFFC while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } if (i < n || !underlying.hasNext) { proxied = acc.stepper - proxied.trySplit() + proxied.nn.trySplit() } else { val ans = semiclone() @@ -93,21 +93,21 @@ private[collection] class IntIteratorStepper(_underlying: Iterator[Int]) } } -private[collection] class LongIteratorStepper(_underlying: Iterator[Long]) - extends IteratorStepperBase[Long, LongStepper, LongIteratorStepper](_underlying) +private[collection] class LongIteratorStepper(_underlying: Iterator[Long] | Null) + extends IteratorStepperBase[Long, LongStepper, LongIteratorStepper](_underlying.nn) with LongStepper { protected def semiclone(): LongIteratorStepper = new LongIteratorStepper(null) - def nextStep(): Long = if (proxied ne null) proxied.nextStep() else underlying.next() + def nextStep(): Long = if (proxied ne null) proxied.nn.nextStep() else underlying.next() - def trySplit(): LongStepper = if (proxied ne null) proxied.trySplit() else { + def trySplit(): LongStepper | Null = if (proxied ne null) proxied.nn.trySplit() else { val acc = new LongAccumulator var i = 0 val n = nextChunkSize & 0xFFFFFFFC while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } if (i < n || !underlying.hasNext) { proxied = acc.stepper - proxied.trySplit() + proxied.nn.trySplit() } else { val ans = semiclone() @@ -124,6 +124,6 @@ private[convert] abstract class IteratorStepperBase[A, SP >: Null <: Stepper[A], final protected var proxied: SP = null protected def semiclone(): Semi // Must initialize with null iterator! def characteristics: Int = if (proxied ne null) Spliterator.ORDERED | Spliterator.SIZED | Spliterator.SUBSIZED else Spliterator.ORDERED - def estimateSize: Long = if (proxied ne null) proxied.estimateSize else Long.MaxValue - def hasStep: Boolean = if (proxied ne null) proxied.hasStep else underlying.hasNext + def estimateSize: Long = if (proxied ne null) proxied.nn.estimateSize else Long.MaxValue + def hasStep: Boolean = if (proxied ne null) proxied.nn.hasStep else underlying.hasNext } diff --git a/library/src/scala/collection/convert/impl/StringStepper.scala b/library/src/scala/collection/convert/impl/StringStepper.scala index e8c4d7073c43..110e893d608b 100644 --- a/library/src/scala/collection/convert/impl/StringStepper.scala +++ b/library/src/scala/collection/convert/impl/StringStepper.scala @@ -46,7 +46,7 @@ extends IntStepper with EfficientSplit { } else Stepper.throwNSEE() } - def trySplit(): CodePointStringStepper = + def trySplit(): CodePointStringStepper | Null = if (iN - 3 > i0) { var half = (i0 + iN) >>> 1 if (isLowSurrogate(underlying.charAt(half))) half -= 1 diff --git a/library/src/scala/collection/convert/impl/TableStepper.scala b/library/src/scala/collection/convert/impl/TableStepper.scala index 2c144e4fae8f..f37e8d2e0d1d 100644 --- a/library/src/scala/collection/convert/impl/TableStepper.scala +++ b/library/src/scala/collection/convert/impl/TableStepper.scala @@ -69,7 +69,7 @@ extends EfficientSplit { } -private[collection] final class AnyTableStepper[A, I >: Null <: AnyRef]( +private[collection] final class AnyTableStepper[A, I >: Null <: AnyRef | Null]( _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => A, _i0: Int, _iN: Int ) extends TableStepperBase[A, I, AnyStepper[A], AnyTableStepper[A, I]](_maxLength, _table, _i0, _iN) @@ -86,7 +86,7 @@ with AnyStepper[A] { } -private[collection] final class DoubleTableStepper[I >: Null <: AnyRef]( +private[collection] final class DoubleTableStepper[I >: Null <: AnyRef | Null]( _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Double, _i0: Int, _iN: Int ) extends TableStepperBase[Double, I, DoubleStepper, DoubleTableStepper[I]](_maxLength, _table, _i0, _iN) @@ -103,7 +103,7 @@ with DoubleStepper { } -private[collection] final class IntTableStepper[I >: Null <: AnyRef]( +private[collection] final class IntTableStepper[I >: Null <: AnyRef | Null]( _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Int, _i0: Int, _iN: Int ) extends TableStepperBase[Int, I, IntStepper, IntTableStepper[I]](_maxLength, _table, _i0, _iN) @@ -120,7 +120,7 @@ with IntStepper { } -private[collection] final class LongTableStepper[I >: Null <: AnyRef]( +private[collection] final class LongTableStepper[I >: Null <: AnyRef | Null]( _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Long, _i0: Int, _iN: Int ) extends TableStepperBase[Long, I, LongStepper, LongTableStepper[I]](_maxLength, _table, _i0, _iN) diff --git a/library/src/scala/collection/convert/impl/VectorStepper.scala b/library/src/scala/collection/convert/impl/VectorStepper.scala index ca0d45330a70..02fe61124290 100644 --- a/library/src/scala/collection/convert/impl/VectorStepper.scala +++ b/library/src/scala/collection/convert/impl/VectorStepper.scala @@ -23,15 +23,15 @@ private[convert] abstract class VectorStepperBase[Sub >: Null, Semi <: Sub]( ) extends IndexedStepperBase[Sub, Semi](_i0, _iN) { protected var index: Int = 32 // Force an advanceData on the first element - protected var leaves: Array[AnyRef] = null + protected var leaves: Array[AnyRef] | Null = null protected var index1: Int = 32 // Force advanceData to defer to initTo on the first element - protected var twigs: Array[AnyRef] = null + protected var twigs: Array[AnyRef] | Null = null protected final def advanceData(iX: Int): Unit = { index1 += 1 if (index1 >= 32) initTo(iX) else { - leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + leaves = twigs.nn(index1).asInstanceOf[Array[AnyRef]] index = 0 } } @@ -42,7 +42,7 @@ extends IndexedStepperBase[Sub, Semi](_i0, _iN) { case 1 => twigs = trunk index1 = iX >>> 5 - leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + leaves = twigs.nn(index1).asInstanceOf[Array[AnyRef]] index = iX & 0x1F case _ => var n = displayN @@ -53,7 +53,7 @@ extends IndexedStepperBase[Sub, Semi](_i0, _iN) { } twigs = dataN((iX >>> 10) & 0x1F).asInstanceOf[Array[AnyRef]] index1 = (iX >> 5) & 0x1F - leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + leaves = twigs.nn(index1).asInstanceOf[Array[AnyRef]] index = iX & 0x1F } } @@ -65,7 +65,7 @@ with AnyStepper[A] { index += 1 if (index >= 32) advanceData(i0) i0 += 1 - leaves(index).asInstanceOf[A] + leaves.nn(index).asInstanceOf[A] } else Stepper.throwNSEE() def semiclone(half: Int): AnyVectorStepper[A] = { val ans = new AnyVectorStepper[A](i0, half, displayN, trunk) @@ -83,7 +83,7 @@ with DoubleStepper { index += 1 if (index >= 32) advanceData(i0) i0 += 1 - leaves(index).asInstanceOf[Double] + leaves.nn(index).asInstanceOf[Double] } else Stepper.throwNSEE() def semiclone(half: Int): DoubleVectorStepper = { val ans = new DoubleVectorStepper(i0, half, displayN, trunk) @@ -101,7 +101,7 @@ with IntStepper { index += 1 if (index >= 32) advanceData(i0) i0 += 1 - leaves(index).asInstanceOf[Int] + leaves.nn(index).asInstanceOf[Int] } else Stepper.throwNSEE() def semiclone(half: Int): IntVectorStepper = { val ans = new IntVectorStepper(i0, half, displayN, trunk) @@ -119,7 +119,7 @@ with LongStepper { index += 1 if (index >= 32) advanceData(i0) i0 += 1 - leaves(index).asInstanceOf[Long] + leaves.nn(index).asInstanceOf[Long] } else Stepper.throwNSEE() def semiclone(half: Int): LongVectorStepper = { val ans = new LongVectorStepper(i0, half, displayN, trunk) diff --git a/library/src/scala/collection/immutable/ArraySeq.scala b/library/src/scala/collection/immutable/ArraySeq.scala index eafe9baa719f..587664d5b3e3 100644 --- a/library/src/scala/collection/immutable/ArraySeq.scala +++ b/library/src/scala/collection/immutable/ArraySeq.scala @@ -88,7 +88,7 @@ sealed abstract class ArraySeq[+A] * * @return null if optimisation not possible. */ - private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] = { + private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] | Null = { // Optimise concatenation of two ArraySeqs // For ArraySeqs with sizes of [100, 1000, 10000] this is [3.5, 4.1, 5.2]x as fast if (isEmpty) @@ -141,7 +141,7 @@ sealed abstract class ArraySeq[+A] case that: ArraySeq[_] => val result = appendedAllArraySeq(that.asInstanceOf[ArraySeq[B]]) if (result == null) genericResult - else result + else result.nn case _ => genericResult } @@ -165,7 +165,7 @@ sealed abstract class ArraySeq[+A] case that: ArraySeq[_] => val result = that.asInstanceOf[ArraySeq[B]].appendedAllArraySeq(this) if (result == null) genericResult - else result + else result.nn case _ => genericResult } @@ -279,11 +279,11 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => def from[A](it: scala.collection.IterableOnce[A])(implicit tag: ClassTag[A]): ArraySeq[A] = it match { case as: ArraySeq[A] => as - case _ => unsafeWrapArray(Array.from[A](it)) + case _ => unsafeWrapArray(Array.from[A](it)).nn } def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = - ArrayBuffer.newBuilder[A].mapResult(b => unsafeWrapArray[A](b.toArray)) + ArrayBuffer.newBuilder[A].mapResult(b => unsafeWrapArray[A](b.toArray).nn) override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) @@ -294,7 +294,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => ScalaRunTime.array_update(elements, i, f(i)) i = i + 1 } - ArraySeq.unsafeWrapArray(elements) + ArraySeq.unsafeWrapArray(elements).nn } /** @@ -309,7 +309,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a * `ClassCastException` at runtime. */ - def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = ((x: @unchecked) match { + def unsafeWrapArray[T](x: Array[T] | Null): ArraySeq[T] | Null = ((x: @unchecked) match { case null => null case x: Array[AnyRef] => new ofRef[AnyRef](x) case x: Array[Int] => new ofInt(x) @@ -321,7 +321,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => case x: Array[Short] => new ofShort(x) case x: Array[Boolean] => new ofBoolean(x) case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[ArraySeq[T]] + }).asInstanceOf[ArraySeq[T] | Null] @SerialVersionUID(3L) final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] { @@ -692,3 +692,4 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => new ObjectArrayStepper[AnyRef](unsafeArray.asInstanceOf[Array[AnyRef]], 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] } } + diff --git a/library/src/scala/collection/immutable/HashMap.scala b/library/src/scala/collection/immutable/HashMap.scala index e9257f1948fc..095a5a6d3d81 100644 --- a/library/src/scala/collection/immutable/HashMap.scala +++ b/library/src/scala/collection/immutable/HashMap.scala @@ -67,7 +67,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: override def incl(elem: K): Set[K] = { val originalHash = elem.## val improvedHash = improve(originalHash) - val newNode = rootNode.updated(elem, null.asInstanceOf[V], originalHash, improvedHash, 0, replaceValue = false) + val newNode = rootNode.updated(elem, null, originalHash, improvedHash, 0, replaceValue = false) newKeySetOrThis(newNode) } override def excl(elem: K): Set[K] = newKeySetOrThis(HashMap.this - elem) @@ -333,7 +333,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: * }}} * */ - def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1)): HashMap[K, V1] = + def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1) | Null): HashMap[K, V1] = if (mergef == null) { that ++ this } else { @@ -346,7 +346,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: if (that.rootNode.containsKey(k, originalHash, improved, 0)) { val thatPayload = that.rootNode.getTuple(k, originalHash, improved, 0) - val (mergedK, mergedV) = mergef(payload, thatPayload) + val (mergedK, mergedV) = mergef.nn(payload, thatPayload) val mergedOriginalHash = mergedK.## val mergedImprovedHash = improve(mergedOriginalHash) new HashMap(that.rootNode.removed(thatPayload._1, originalHash, improved, 0).updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) @@ -360,7 +360,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: if (rootNode.containsKey(k, thatOriginalHash, thatImproved, 0)) { val payload = rootNode.getTuple(k, thatOriginalHash, thatImproved, 0) - val (mergedK, mergedV) = mergef(payload, thatPayload) + val (mergedK, mergedV) = mergef.nn(payload, thatPayload) val mergedOriginalHash = mergedK.## val mergedImprovedHash = improve(mergedOriginalHash) new HashMap(rootNode.updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) @@ -369,7 +369,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: } } else { val builder = new HashMapBuilder[K, V1] - rootNode.mergeInto(that.rootNode, builder, 0)(mergef) + rootNode.mergeInto(that.rootNode, builder, 0)(mergef.nn) builder.result() } } @@ -1172,7 +1172,7 @@ private final class BitmapIndexedMapNode[K, +V]( } override def transform[W](f: (K, V) => W): BitmapIndexedMapNode[K, W] = { - var newContent: Array[Any] = null + var newContent: Array[Any] | Null = null val iN = payloadArity // arity doesn't change during this operation val jN = nodeArity // arity doesn't change during this operation val newContentLength = content.length @@ -1184,10 +1184,10 @@ private final class BitmapIndexedMapNode[K, +V]( if (newContent eq null) { if (newValue.asInstanceOf[AnyRef] ne value.asInstanceOf[AnyRef]) { newContent = content.clone() - newContent(TupleLength * i + 1) = newValue + newContent.nn(TupleLength * i + 1) = newValue } } else { - newContent(TupleLength * i + 1) = newValue + newContent.nn(TupleLength * i + 1) = newValue } i += 1 } @@ -1199,14 +1199,14 @@ private final class BitmapIndexedMapNode[K, +V]( if (newContent eq null) { if (newNode ne node) { newContent = content.clone() - newContent(newContentLength - j - 1) = newNode + newContent.nn(newContentLength - j - 1) = newNode } } else - newContent(newContentLength - j - 1) = newNode + newContent.nn(newContentLength - j - 1) = newNode j += 1 } if (newContent eq null) this.asInstanceOf[BitmapIndexedMapNode[K, W]] - else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent, originalHashes, size, cachedJavaKeySetHashCode) + else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent.nn, originalHashes, size, cachedJavaKeySetHashCode) } override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { @@ -1715,7 +1715,7 @@ private final class BitmapIndexedMapNode[K, +V]( // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data var nodeMigrateToDataTargetMap = 0 // the queue of single-element, post-filter nodes - var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] = null + var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] | Null = null // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node var nodesToPassThroughMap = 0 @@ -1725,7 +1725,7 @@ private final class BitmapIndexedMapNode[K, +V]( // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) var mapOfNewNodes = 0 // each bit in `mapOfNewNodes` corresponds to one element in this queue - var newNodes: mutable.Queue[MapNode[K, V]] = null + var newNodes: mutable.Queue[MapNode[K, V]] | Null = null var newDataMap = 0 var newNodeMap = 0 @@ -1765,18 +1765,18 @@ private final class BitmapIndexedMapNode[K, +V]( nodesToPassThroughMap |= bitpos } else { mapOfNewNodes |= bitpos - if (newNodes eq null) { + if (newNodes == null) { newNodes = mutable.Queue.empty } - newNodes += newSubNode + newNodes.nn += newSubNode } } else if (newSubNode.size == 1) { newDataMap |= bitpos nodeMigrateToDataTargetMap |= bitpos - if (nodesToMigrateToData eq null) { + if (nodesToMigrateToData == null) { nodesToMigrateToData = mutable.Queue() } - nodesToMigrateToData += newSubNode + nodesToMigrateToData.nn += newSubNode } nodeIndex += 1 @@ -1823,14 +1823,14 @@ private final class BitmapIndexedMapNode[K, +V]( oldNodeIndex += 1 } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null - val node = nodesToMigrateToData.dequeue() + val node = nodesToMigrateToData.nn.dequeue() newContent(TupleLength * newDataIndex) = node.getKey(0) newContent(TupleLength * newDataIndex + 1) = node.getValue(0) newOriginalHashes(newDataIndex) = node.getHash(0) newDataIndex += 1 oldNodeIndex += 1 } else if ((bitpos & mapOfNewNodes) != 0) { - newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newContent(newContentSize - newNodeIndex - 1) = newNodes.nn.dequeue() newNodeIndex += 1 oldNodeIndex += 1 } else if ((bitpos & dataMap) != 0) { @@ -2005,16 +2005,16 @@ private final class HashCollisionMapNode[K, +V ]( if (hc eq this) { this } else { - var newContent: VectorBuilder[(K, V1)] = null + var newContent: VectorBuilder[(K, V1)] | Null = null val iter = content.iterator while (iter.hasNext) { val nextPayload = iter.next() if (hc.indexOf(nextPayload._1) < 0) { - if (newContent eq null) { + if (newContent == null) { newContent = new VectorBuilder[(K, V1)]() - newContent.addAll(hc.content) + newContent.nn.addAll(hc.content) } - newContent.addOne(nextPayload) + newContent.nn.addOne(nextPayload) } } if (newContent eq null) hc else new HashCollisionMapNode(originalHash, hash, newContent.result()) @@ -2028,7 +2028,7 @@ private final class HashCollisionMapNode[K, +V ]( override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { case hc: HashCollisionMapNode[K, V1] => val iter = content.iterator - val rightArray = hc.content.toArray[AnyRef] // really Array[(K, V1)] + val rightArray: Array[AnyRef | Null] = hc.content.toArray[AnyRef] // really Array[(K, V1)] def rightIndexOf(key: K): Int = { var i = 0 @@ -2224,7 +2224,7 @@ private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, /** The last given out HashMap as a return value of `result()`, if any, otherwise null. * Indicates that on next add, the elements should be copied to an identical structure, before continuing * mutations. */ - private var aliased: HashMap[K, V] = _ + private var aliased: HashMap[K, V] | Null = _ private def isAliased: Boolean = aliased != null diff --git a/library/src/scala/collection/immutable/HashSet.scala b/library/src/scala/collection/immutable/HashSet.scala index 3c72236a5395..3b9f58cfaf1b 100644 --- a/library/src/scala/collection/immutable/HashSet.scala +++ b/library/src/scala/collection/immutable/HashSet.scala @@ -395,7 +395,7 @@ private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] { def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] - def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] | Null def hasNodes: Boolean @@ -417,11 +417,11 @@ private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] { def copy(): SetNode[A] - def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] + def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] | Null - def diff(that: SetNode[A], shift: Int): SetNode[A] + def diff(that: SetNode[A], shift: Int): SetNode[A] | Null - def concat(that: SetNode[A], shift: Int): SetNode[A] + def concat(that: SetNode[A], shift: Int): SetNode[A] | Null def foreachWithHash(f: (A, Int) => Unit): Unit @@ -1094,7 +1094,7 @@ private final class BitmapIndexedSetNode[A]( // return at runtime a SetNode[A], or a tuple of (A, Int, Int) // the queue of single-element, post-filter nodes - var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + var nodesToMigrateToData: mutable.Queue[SetNode[A]] | Null = null // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node var nodesToPassThroughMap = 0 @@ -1104,7 +1104,7 @@ private final class BitmapIndexedSetNode[A]( // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) var mapOfNewNodes = 0 // each bit in `mapOfNewNodes` corresponds to one element in this queue - var newNodes: mutable.Queue[SetNode[A]] = null + var newNodes: mutable.Queue[SetNode[A]] | Null = null var newDataMap = 0 var newNodeMap = 0 @@ -1144,18 +1144,18 @@ private final class BitmapIndexedSetNode[A]( nodesToPassThroughMap |= bitpos } else { mapOfNewNodes |= bitpos - if (newNodes eq null) { + if (newNodes == null) { newNodes = mutable.Queue.empty } - newNodes += newSubNode + newNodes.nn += newSubNode } } else if (newSubNode.size == 1) { newDataMap |= bitpos nodeMigrateToDataTargetMap |= bitpos - if (nodesToMigrateToData eq null) { + if (nodesToMigrateToData == null) { nodesToMigrateToData = mutable.Queue.empty } - nodesToMigrateToData += newSubNode + nodesToMigrateToData.nn += newSubNode } nodeIndex += 1 @@ -1196,7 +1196,7 @@ private final class BitmapIndexedSetNode[A]( // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data var nodeMigrateToDataTargetMap = 0 // the queue of single-element, post-filter nodes - var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + var nodesToMigrateToData: mutable.Queue[SetNode[A]] | Null = null // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node var nodesToPassThroughMap = 0 @@ -1206,7 +1206,7 @@ private final class BitmapIndexedSetNode[A]( // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) var mapOfNewNodes = 0 // each bit in `mapOfNewNodes` corresponds to one element in this queue - var newNodes: mutable.Queue[SetNode[A]] = null + var newNodes: mutable.Queue[SetNode[A]] | Null = null var newDataMap = 0 var newNodeMap = 0 @@ -1259,18 +1259,18 @@ private final class BitmapIndexedSetNode[A]( nodesToPassThroughMap |= bitpos } else { mapOfNewNodes |= bitpos - if (newNodes eq null) { + if (newNodes == null) { newNodes = mutable.Queue.empty } - newNodes += newSubNode + newNodes.nn += newSubNode } } else if (newSubNode.size == 1) { newDataMap |= bitpos nodeMigrateToDataTargetMap |= bitpos - if (nodesToMigrateToData eq null) { + if (nodesToMigrateToData == null) { nodesToMigrateToData = mutable.Queue.empty } - nodesToMigrateToData += newSubNode + nodesToMigrateToData.nn += newSubNode } nodeIndex += 1 @@ -1324,9 +1324,9 @@ private final class BitmapIndexedSetNode[A]( oldDataPassThrough: Int, nodesToPassThroughMap: Int, nodeMigrateToDataTargetMap: Int, - nodesToMigrateToData: mutable.Queue[SetNode[A]], + nodesToMigrateToData: mutable.Queue[SetNode[A]] | Null, mapOfNewNodes: Int, - newNodes: mutable.Queue[SetNode[A]], + newNodes: mutable.Queue[SetNode[A]] | Null, newCachedHashCode: Int): BitmapIndexedSetNode[A] = { if (newSize == 0) { SetNode.empty @@ -1365,14 +1365,14 @@ private final class BitmapIndexedSetNode[A]( oldNodeIndex += 1 } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null - val node = nodesToMigrateToData.dequeue() + val node = nodesToMigrateToData.nn.dequeue() newContent(newDataIndex) = node.getPayload(0) newOriginalHashes(newDataIndex) = node.getHash(0) newDataIndex += 1 oldNodeIndex += 1 } else if ((bitpos & mapOfNewNodes) != 0) { // we need not check for null here. If mapOfNewNodes != 0, then newNodes must not be null - newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newContent(newContentSize - newNodeIndex - 1) = newNodes.nn.dequeue() newNodeIndex += 1 oldNodeIndex += 1 } else if ((bitpos & dataMap) != 0) { @@ -1839,19 +1839,19 @@ private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int if (hc eq this) { this } else { - var newContent: VectorBuilder[A] = null + var newContent: VectorBuilder[A] | Null = null val iter = hc.content.iterator while (iter.hasNext) { val nextPayload = iter.next() if (!content.contains(nextPayload)) { if (newContent eq null) { newContent = new VectorBuilder() - newContent.addAll(this.content) + newContent.nn.addAll(this.content) } - newContent.addOne(nextPayload) + newContent.nn.addOne(nextPayload) } } - if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.result()) + if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.nn.result()) } case _: BitmapIndexedSetNode[A] => // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes @@ -1961,7 +1961,7 @@ private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, Has /** The last given out HashSet as a return value of `result()`, if any, otherwise null. * Indicates that on next add, the elements should be copied to an identical structure, before continuing * mutations. */ - private var aliased: HashSet[A] = _ + private var aliased: HashSet[A] | Null = null private def isAliased: Boolean = aliased != null @@ -2061,11 +2061,11 @@ private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, Has if (rootNode.size == 0) { HashSet.empty } else if (aliased != null) { - aliased + aliased.nn } else { aliased = new HashSet(rootNode) releaseFence() - aliased + aliased.nn } override def addOne(elem: A): this.type = { diff --git a/library/src/scala/collection/immutable/List.scala b/library/src/scala/collection/immutable/List.scala index d6651f417103..732606f8d53d 100644 --- a/library/src/scala/collection/immutable/List.scala +++ b/library/src/scala/collection/immutable/List.scala @@ -261,7 +261,7 @@ sealed abstract class List[+A] final override def collect[B](pf: PartialFunction[A, B]): List[B] = { if (this eq Nil) Nil else { var rest = this - var h: ::[B] = null + var h: ::[B] | Null = null var x: Any = null // Special case for first element while (h eq null) { @@ -288,8 +288,8 @@ sealed abstract class List[+A] final override def flatMap[B](f: A => IterableOnce[B]): List[B] = { var rest = this - var h: ::[B] = null - var t: ::[B] = null + var h: ::[B] | Null = null + var t: ::[B] | Null = null while (rest ne Nil) { val it = f(rest.head).iterator while (it.hasNext) { @@ -495,7 +495,7 @@ sealed abstract class List[+A] } } } - val result = loop(null, null, this, this) + val result = loop(null: List[B] | Null, null: ::[B] | Null, this, this).nn releaseFence() result } diff --git a/library/src/scala/collection/immutable/ListMap.scala b/library/src/scala/collection/immutable/ListMap.scala index 74d1697cac7f..152919fd0042 100644 --- a/library/src/scala/collection/immutable/ListMap.scala +++ b/library/src/scala/collection/immutable/ListMap.scala @@ -192,12 +192,12 @@ object ListMap extends MapFactory[ListMap] { if (found) { if (isDifferent) { - var newHead: ListMap.Node[K, V1] = null - var prev: ListMap.Node[K, V1] = null + var newHead: ListMap.Node[K, V1] | Null = null + var prev: ListMap.Node[K, V1] | Null = null var curr: ListMap[K, V1] = this var i = 0 while (i < index) { - val temp = new ListMap.Node(curr.key, curr.value, null) + val temp = new ListMap.Node[K, V1](curr.key, curr.value, null.asInstanceOf[ListMap[K, V1]]) if (prev ne null) { prev._init = temp } @@ -208,17 +208,17 @@ object ListMap extends MapFactory[ListMap] { } i += 1 } - val newNode = new ListMap.Node(curr.key, v, curr.init) + val newNode = new ListMap.Node[K, V1](curr.key, v, curr.init) if (prev ne null) { prev._init = newNode } releaseFence() - if (newHead eq null) newNode else newHead + if (newHead eq null) newNode else newHead.nn } else { this } } else { - new ListMap.Node(k, v, this) + new ListMap.Node[K, V1](k, v, this) } } diff --git a/library/src/scala/collection/immutable/Map.scala b/library/src/scala/collection/immutable/Map.scala index 8f372312512e..2a6b0c5e08fc 100644 --- a/library/src/scala/collection/immutable/Map.scala +++ b/library/src/scala/collection/immutable/Map.scala @@ -661,26 +661,26 @@ abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] wit private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { private[this] var elems: Map[K, V] = Map.empty private[this] var switchedToHashMapBuilder: Boolean = false - private[this] var hashMapBuilder: HashMapBuilder[K, V] = _ + private[this] var hashMapBuilder: HashMapBuilder[K, V] | Null = _ private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = - if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) + if (hashMapBuilder ne null) hashMapBuilder.nn.getOrElse(key, value) else elems.getOrElse(key, value) override def clear(): Unit = { elems = Map.empty if (hashMapBuilder != null) { - hashMapBuilder.clear() + hashMapBuilder.nn.clear() } switchedToHashMapBuilder = false } override def result(): Map[K, V] = - if (switchedToHashMapBuilder) hashMapBuilder.result() else elems + if (switchedToHashMapBuilder) hashMapBuilder.nn.result() else elems def addOne(key: K, value: V): this.type = { if (switchedToHashMapBuilder) { - hashMapBuilder.addOne(key, value) + hashMapBuilder.nn.addOne(key, value) } else if (elems.size < 4) { elems = elems.updated(key, value) } else { @@ -692,8 +692,8 @@ private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, if (hashMapBuilder == null) { hashMapBuilder = new HashMapBuilder } - elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder) - hashMapBuilder.addOne(key, value) + elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder.nn) + hashMapBuilder.nn.addOne(key, value) } } @@ -704,7 +704,7 @@ private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, override def addAll(xs: IterableOnce[(K, V)]): this.type = if (switchedToHashMapBuilder) { - hashMapBuilder.addAll(xs) + hashMapBuilder.nn.addAll(xs) this } else { super.addAll(xs) diff --git a/library/src/scala/collection/immutable/RedBlackTree.scala b/library/src/scala/collection/immutable/RedBlackTree.scala index 33f7d9ceb7e2..1ca2be6c4282 100644 --- a/library/src/scala/collection/immutable/RedBlackTree.scala +++ b/library/src/scala/collection/immutable/RedBlackTree.scala @@ -26,15 +26,15 @@ import scala.runtime.Statics.releaseFence * optimizations behind a reasonably clean API. */ private[collection] object RedBlackTree { - def validate[A](tree: Tree[A, _])(implicit ordering: Ordering[A]): tree.type = { + def validate[A](tree: Tree[A, _] | Null)(implicit ordering: Ordering[A]): tree.type = { def impl(tree: Tree[A, _], keyProp: A => Boolean): Int = { assert(keyProp(tree.key), s"key check failed: $tree") if (tree.isRed) { - assert(tree.left == null || tree.left.isBlack, s"red-red left $tree") - assert(tree.right == null || tree.right.isBlack, s"red-red right $tree") + assert(tree.left.nn == null || tree.left.nn.isBlack, s"red-red left $tree") + assert(tree.right.nn == null || tree.right.nn.isBlack, s"red-red right $tree") } - val leftBlacks = if (tree.left == null) 0 else impl(tree.left, k => keyProp(k) && ordering.compare(k, tree.key) < 0) - val rightBlacks = if (tree.right == null) 0 else impl(tree.right, k => keyProp(k) && ordering.compare(k, tree.key) > 0) + val leftBlacks = if (tree.left.nn == null) 0 else impl(tree.left.nn, k => keyProp(k) && ordering.compare(k, tree.key) < 0) + val rightBlacks = if (tree.right.nn == null) 0 else impl(tree.right.nn, k => keyProp(k) && ordering.compare(k, tree.key) > 0) assert(leftBlacks == rightBlacks, s"not balanced: $tree") leftBlacks + (if (tree.isBlack) 1 else 0) } @@ -42,23 +42,23 @@ private[collection] object RedBlackTree { tree } - def isEmpty(tree: Tree[_, _]): Boolean = tree eq null + def isEmpty(tree: Tree[_, _] | Null): Boolean = tree eq null - def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null - def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { + def contains[A: Ordering](tree: Tree[A, _] | Null, x: A): Boolean = lookup(tree, x) ne null + def get[A: Ordering, B](tree: Tree[A, B] | Null, x: A): Option[B] = lookup(tree, x) match { case null => None case found => Some(found.value) } @tailrec - def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + def lookup[A, B](tree: Tree[A, B] | Null, x: A)(implicit ordering: Ordering[A]): Tree[A, B] | Null = if (tree eq null) null else { val cmp = ordering.compare(x, tree.key) if (cmp < 0) lookup(tree.left, x) else if (cmp > 0) lookup(tree.right, x) else tree } private[immutable] abstract class Helper[A](implicit val ordering: Ordering[A]) { - def beforePublish[B](tree: Tree[A, B]): Tree[A, B] = { + def beforePublish[B](tree: Tree[A, B] | Null): Tree[A, B] | Null = { if (tree eq null) tree else if (tree.isMutable) { val res = tree.mutableBlack.makeImmutable @@ -156,7 +156,7 @@ private[collection] object RedBlackTree { } } private[immutable] class SetHelper[A](implicit ordering: Ordering[A]) extends Helper[A] { - protected[this] final def mutableUpd(tree: Tree[A, Any], k: A): Tree[A, Any] = + protected[this] final def mutableUpd(tree: Tree[A, Any] | Null, k: A): Tree[A, Any] = if (tree eq null) { mutableRedTree(k, (), null, null) } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { @@ -171,7 +171,7 @@ private[collection] object RedBlackTree { } } private[immutable] class MapHelper[A, B](implicit ordering: Ordering[A]) extends Helper[A] { - protected[this] final def mutableUpd[B1 >: B](tree: Tree[A, B], k: A, v: B1): Tree[A, B1] = + protected[this] final def mutableUpd[B1 >: B](tree: Tree[A, B] | Null, k: A, v: B1): Tree[A, B1] = if (tree eq null) { mutableRedTree(k, v, null, null) } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { @@ -186,38 +186,38 @@ private[collection] object RedBlackTree { } } - def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count - def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) - def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) - def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { + def count(tree: Tree[_, _] | Null) = if (tree eq null) 0 else tree.count + def update[A: Ordering, B, B1 >: B](tree: Tree[A, B] | Null, k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) + def delete[A: Ordering, B](tree: Tree[A, B] | Null, k: A): Tree[A, B] = blacken(del(tree, k)) + def rangeImpl[A: Ordering, B](tree: Tree[A, B] | Null, from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { case (Some(from), Some(until)) => this.range(tree, from, until) case (Some(from), None) => this.from(tree, from) case (None, Some(until)) => this.until(tree, until) case (None, None) => tree } - def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) - def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from)) - def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) - def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) + def range[A: Ordering, B](tree: Tree[A, B] | Null, from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) + def from[A: Ordering, B](tree: Tree[A, B] | Null, from: A): Tree[A, B] = blacken(doFrom(tree, from)) + def to[A: Ordering, B](tree: Tree[A, B] | Null, to: A): Tree[A, B] = blacken(doTo(tree, to)) + def until[A: Ordering, B](tree: Tree[A, B] | Null, key: A): Tree[A, B] = blacken(doUntil(tree, key)) - def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n)) - def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n)) - def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) + def drop[A: Ordering, B](tree: Tree[A, B] | Null, n: Int): Tree[A, B] = blacken(doDrop(tree, n)) + def take[A: Ordering, B](tree: Tree[A, B] | Null, n: Int): Tree[A, B] = blacken(doTake(tree, n)) + def slice[A: Ordering, B](tree: Tree[A, B] | Null, from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) - def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { + def smallest[A, B](tree: Tree[A, B] | Null): Tree[A, B] = { if (tree eq null) throw new NoSuchElementException("empty tree") var result = tree while (result.left ne null) result = result.left result } - def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { + def greatest[A, B](tree: Tree[A, B] | Null): Tree[A, B] = { if (tree eq null) throw new NoSuchElementException("empty tree") var result = tree while (result.right ne null) result = result.right result } - def tail[A, B](tree: Tree[A, B]): Tree[A, B] = { + def tail[A, B](tree: Tree[A, B] | Null): Tree[A, B] = { def _tail(tree: Tree[A, B]): Tree[A, B] = if (tree eq null) throw new NoSuchElementException("empty tree") else { @@ -229,7 +229,7 @@ private[collection] object RedBlackTree { blacken(_tail(tree)) } - def init[A, B](tree: Tree[A, B]): Tree[A, B] = { + def init[A, B](tree: Tree[A, B] | Null): Tree[A, B] = { def _init(tree: Tree[A, B]): Tree[A, B] = if (tree eq null) throw new NoSuchElementException("empty tree") else { @@ -244,7 +244,7 @@ private[collection] object RedBlackTree { /** * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node. */ - def minAfter[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + def minAfter[A, B](tree: Tree[A, B] | Null, x: A)(implicit ordering: Ordering[A]): Tree[A, B] | Null = if (tree eq null) null else { val cmp = ordering.compare(x, tree.key) if (cmp == 0) tree else if (cmp < 0) { @@ -256,7 +256,7 @@ private[collection] object RedBlackTree { /** * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node. */ - def maxBefore[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + def maxBefore[A, B](tree: Tree[A, B] | Null, x: A)(implicit ordering: Ordering[A]): Tree[A, B] | Null = if (tree eq null) null else { val cmp = ordering.compare(x, tree.key) if (cmp <= 0) maxBefore(tree.left, x) else { @@ -265,21 +265,21 @@ private[collection] object RedBlackTree { } } - def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) + def foreach[A,B,U](tree:Tree[A,B] | Null, f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) - def keysEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + def keysEqual[A: Ordering, X, Y](a: Tree[A, X] | Null, b: Tree[A, Y] | Null): Boolean = { if (a eq b) true else if (a eq null) false else if (b eq null) false else a.count == b.count && (new EqualsIterator(a)).sameKeys(new EqualsIterator(b)) } - def valuesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + def valuesEqual[A: Ordering, X, Y](a: Tree[A, X] | Null, b: Tree[A, Y] | Null): Boolean = { if (a eq b) true else if (a eq null) false else if (b eq null) false else a.count == b.count && (new EqualsIterator(a)).sameValues(new EqualsIterator(b)) } - def entriesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + def entriesEqual[A: Ordering, X, Y](a: Tree[A, X] | Null, b: Tree[A, Y] | Null): Boolean = { if (a eq b) true else if (a eq null) false else if (b eq null) false @@ -292,7 +292,7 @@ private[collection] object RedBlackTree { if (tree.right ne null) _foreach(tree.right, f) } - def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) + def foreachKey[A, U](tree:Tree[A,_] | Null, f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { if (tree.left ne null) _foreachKey(tree.left, f) @@ -300,72 +300,75 @@ private[collection] object RedBlackTree { if (tree.right ne null) _foreachKey(tree.right, f) } - def foreachEntry[A, B, U](tree:Tree[A,B], f: (A, B) => U):Unit = if (tree ne null) _foreachEntry(tree,f) + def foreachEntry[A, B, U](tree: Tree[A, B] | Null, f: (A, B) => U): Unit = if (tree ne null) _foreachEntry(tree.nn, f) private[this] def _foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { - if (tree.left ne null) _foreachEntry(tree.left, f) + if (tree.left ne null) _foreachEntry(tree.left.nn, f) f(tree.key, tree.value) - if (tree.right ne null) _foreachEntry(tree.right, f) + if (tree.right ne null) _foreachEntry(tree.right.nn, f) } - def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) - def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) - def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) + def iterator[A: Ordering, B](tree: Tree[A, B] | Null, start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) + def keysIterator[A: Ordering](tree: Tree[A, _] | Null, start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) + def valuesIterator[A: Ordering, B](tree: Tree[A, B] | Null, start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) @tailrec - def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { - val count = this.count(tree.left) - if (n < count) nth(tree.left, n) - else if (n > count) nth(tree.right, n - count - 1) - else tree + def nth[A, B](tree: Tree[A, B] | Null, n: Int): Tree[A, B] | Null = { + if (tree eq null) null + else { + val count = this.count(tree.nn.left) + if (n < count) nth(tree.nn.left, n) + else if (n > count) nth(tree.nn.right, n - count - 1) + else tree + } } - def isBlack(tree: Tree[_, _]) = (tree eq null) || tree.isBlack + def isBlack(tree: Tree[_, _] | Null) = (tree eq null) || tree.nn.isBlack - @`inline` private[this] def isRedTree(tree: Tree[_, _]) = (tree ne null) && tree.isRed - @`inline` private[this] def isBlackTree(tree: Tree[_, _]) = (tree ne null) && tree.isBlack + @`inline` private[this] def isRedTree(tree: Tree[_, _] | Null) = (tree ne null) && tree.nn.isRed + @`inline` private[this] def isBlackTree(tree: Tree[_, _] | Null) = (tree ne null) && tree.nn.isBlack - private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black + private[this] def blacken[A, B](t: Tree[A, B] | Null): Tree[A, B] | Null = if (t eq null) null else t.nn.black // Blacken if the tree is red and has a red child. This is necessary when using methods such as `upd` or `updNth` // for building subtrees. Use `blacken` instead when building top-level trees. - private[this] def maybeBlacken[A, B](t: Tree[A, B]): Tree[A, B] = - if(isBlack(t)) t else if(isRedTree(t.left) || isRedTree(t.right)) t.black else t + private[this] def maybeBlacken[A, B](t: Tree[A, B] | Null): Tree[A, B] | Null = + if(isBlack(t)) t else if(isRedTree(t.nn.left) || isRedTree(t.nn.right)) t.nn.black else t - private[this] def mkTree[A, B](isBlack: Boolean, key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = { + private[this] def mkTree[A, B](isBlack: Boolean, key: A, value: B, left: Tree[A, B] | Null, right: Tree[A, B] | Null) = { val sizeAndColour = sizeOf(left) + sizeOf(right) + 1 | (if(isBlack) initialBlackCount else initialRedCount) new Tree(key, value.asInstanceOf[AnyRef], left, right, sizeAndColour) } /** Create a new balanced tree where `newLeft` replaces `tree.left`. */ - private[this] def balanceLeft[A, B1](tree: Tree[A, B1], newLeft: Tree[A, B1]): Tree[A, B1] = { + private[this] def balanceLeft[A, B1](tree: Tree[A, B1], newLeft: Tree[A, B1] | Null): Tree[A, B1] = { // Parameter trees // tree | newLeft // -- KV R | nl.L nl.KV nl.R // | nl.R.L nl.R.KV nl.R.R if (tree.left eq newLeft) tree else { - if (newLeft.isRed) { - val newLeft_left = newLeft.left - val newLeft_right = newLeft.right + if (newLeft ne null && newLeft.nn.isRed) { + val newLeft_left = newLeft.nn.left + val newLeft_right = newLeft.nn.right if (isRedTree(newLeft_left)) { // RED // black(nl.L) nl.KV black // nl.R KV R - val resultLeft = newLeft_left.black + val resultLeft = newLeft_left.nn.black val resultRight = tree.blackWithLeft(newLeft_right) - newLeft.withLeftRight(resultLeft, resultRight) + newLeft.nn.withLeftRight(resultLeft, resultRight) } else if (isRedTree(newLeft_right)) { // RED // black nl.R.KV black // nl.L nl.KV nl.R.L nl.R.R KV R - val newLeft_right_right = newLeft_right.right + val newLeft_right_right = newLeft_right.nn.right - val resultLeft = newLeft.blackWithRight(newLeft_right.left) + val resultLeft = newLeft.nn.blackWithRight(newLeft_right.nn.left) val resultRight = tree.blackWithLeft(newLeft_right_right) - newLeft_right.withLeftRight(resultLeft, resultRight) + newLeft_right.nn.withLeftRight(resultLeft, resultRight) } else { // tree // newLeft KV R @@ -422,91 +425,94 @@ private[collection] object RedBlackTree { private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { RedTree(k, v, null, null) - } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + } else if (k.asInstanceOf[AnyRef] eq tree.nn.key.asInstanceOf[AnyRef]) { if (overwrite) - tree.withV(v) - else tree + tree.nn.withV(v) + else tree.nn } else { - val cmp = ordering.compare(k, tree.key) + val cmp = ordering.compare(k, tree.nn.key) if (cmp < 0) - balanceLeft(tree, upd(tree.left, k, v, overwrite)) + balanceLeft(tree.nn, upd(tree.nn.left, k, v, overwrite)) else if (cmp > 0) - balanceRight(tree, upd(tree.right, k, v, overwrite)) - else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.value.asInstanceOf[AnyRef])) - tree.withV(v) - else tree + balanceRight(tree.nn, upd(tree.nn.right, k, v, overwrite)) + else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.nn.value.asInstanceOf[AnyRef])) + tree.nn.withV(v) + else tree.nn } - private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) { + + private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B] | Null, idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) { RedTree(k, v, null, null) } else { - val rank = count(tree.left) + 1 + val rank = count(tree.nn.left) + 1 if (idx < rank) - balanceLeft(tree, updNth(tree.left, idx, k, v)) + balanceLeft(tree.nn, updNth(tree.nn.left, idx, k, v)) else if (idx > rank) - balanceRight(tree, updNth(tree.right, idx - rank, k, v)) - else tree + balanceRight(tree.nn, updNth(tree.nn.right, idx - rank, k, v)) + else tree.nn } - private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + private[this] def doFrom[A, B](tree: Tree[A, B] | Null, from: A)(implicit ordering: Ordering[A]): Tree[A, B] | Null = { if (tree eq null) return null - if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) - val newLeft = doFrom(tree.left, from) - if (newLeft eq tree.left) tree - else if (newLeft eq null) maybeBlacken(upd(tree.right, tree.key, tree.value, overwrite = false)) - else join(newLeft, tree.key, tree.value, tree.right) + if (ordering.lt(tree.nn.key, from)) return doFrom(tree.nn.right, from) + val newLeft = doFrom(tree.nn.left, from) + if (newLeft eq tree.nn.left) tree + else if (newLeft eq null) maybeBlacken(upd(tree.nn.right, tree.nn.key, tree.nn.value, overwrite = false)) + else join(newLeft, tree.nn.key, tree.nn.value, tree.nn.right) } - private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + + private[this] def doTo[A, B](tree: Tree[A, B] | Null, to: A)(implicit ordering: Ordering[A]): Tree[A, B] | Null = { if (tree eq null) return null - if (ordering.lt(to, tree.key)) return doTo(tree.left, to) - val newRight = doTo(tree.right, to) - if (newRight eq tree.right) tree - else if (newRight eq null) maybeBlacken(upd(tree.left, tree.key, tree.value, overwrite = false)) - else join(tree.left, tree.key, tree.value, newRight) + if (ordering.lt(to, tree.nn.key)) return doTo(tree.nn.left, to) + val newRight = doTo(tree.nn.right, to) + if (newRight eq tree.nn.right) tree + else if (newRight eq null) maybeBlacken(upd(tree.nn.left, tree.nn.key, tree.nn.value, overwrite = false)) + else join(tree.nn.left, tree.nn.key, tree.nn.value, newRight) } - private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + + private[this] def doUntil[A, B](tree: Tree[A, B] | Null, until: A)(implicit ordering: Ordering[A]): Tree[A, B] | Null = { if (tree eq null) return null - if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) - val newRight = doUntil(tree.right, until) - if (newRight eq tree.right) tree - else if (newRight eq null) maybeBlacken(upd(tree.left, tree.key, tree.value, overwrite = false)) - else join(tree.left, tree.key, tree.value, newRight) + if (ordering.lteq(until, tree.nn.key)) return doUntil(tree.nn.left, until) + val newRight = doUntil(tree.nn.right, until) + if (newRight eq tree.nn.right) tree + else if (newRight eq null) maybeBlacken(upd(tree.nn.left, tree.nn.key, tree.nn.value, overwrite = false)) + else join(tree.nn.left, tree.nn.key, tree.nn.value, newRight) } - private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + private[this] def doRange[A, B](tree: Tree[A, B] | Null, from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] | Null = { if (tree eq null) return null - if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until) - if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until) - val newLeft = doFrom(tree.left, from) - val newRight = doUntil(tree.right, until) - if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false) - else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false) - else join(newLeft, tree.key, tree.value, newRight) + if (ordering.lt(tree.nn.key, from)) return doRange(tree.nn.right, from, until) + if (ordering.lteq(until, tree.nn.key)) return doRange(tree.nn.left, from, until) + val newLeft = doFrom(tree.nn.left, from) + val newRight = doUntil(tree.nn.right, until) + if ((newLeft eq tree.nn.left) && (newRight eq tree.nn.right)) tree + else if (newLeft eq null) upd(newRight, tree.nn.key, tree.nn.value, overwrite = false) + else if (newRight eq null) upd(newLeft, tree.nn.key, tree.nn.value, overwrite = false) + else join(newLeft, tree.nn.key, tree.nn.value, newRight) } - private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + private[this] def doDrop[A, B](tree: Tree[A, B] | Null, n: Int): Tree[A, B] | Null = if((tree eq null) || (n <= 0)) tree - else if(n >= tree.count) null + else if(n >= tree.nn.count) null else { - val l = count(tree.left) - if(n > l) doDrop(tree.right, n-l-1) - else if(n == l) join(null, tree.key, tree.value, tree.right) - else join(doDrop(tree.left, n), tree.key, tree.value, tree.right) + val l = count(tree.nn.left) + if(n > l) doDrop(tree.nn.right, n-l-1) + else if(n == l) join(null, tree.nn.key, tree.nn.value, tree.nn.right) + else join(doDrop(tree.nn.left, n), tree.nn.key, tree.nn.value, tree.nn.right) } - private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + private[this] def doTake[A, B](tree: Tree[A, B] | Null, n: Int): Tree[A, B] | Null = if((tree eq null) || (n <= 0)) null - else if(n >= tree.count) tree + else if(n >= tree.nn.count) tree else { - val l = count(tree.left) - if(n <= l) doTake(tree.left, n) - else if(n == l+1) maybeBlacken(updNth(tree.left, n, tree.key, tree.value)) - else join(tree.left, tree.key, tree.value, doTake(tree.right, n-l-1)) + val l = count(tree.nn.left) + if(n <= l) doTake(tree.nn.left, n) + else if(n == l+1) maybeBlacken(updNth(tree.nn.left, n, tree.nn.key, tree.nn.value)) + else join(tree.nn.left, tree.nn.key, tree.nn.value, doTake(tree.nn.right, n-l-1)) } - private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = - if((tree eq null) || (from >= until) || (from >= tree.count) || (until <= 0)) null - else if((from <= 0) && (until >= tree.count)) tree + private[this] def doSlice[A, B](tree: Tree[A, B] | Null, from: Int, until: Int): Tree[A, B] | Null = + if((tree eq null) || (from >= until) || (from >= tree.nn.count) || (until <= 0)) null + else if((from <= 0) && (until >= tree.nn.count)) tree else { val l = count(tree.left) if(until <= l) doSlice(tree.left, from, until) @@ -779,8 +785,8 @@ private[collection] object RedBlackTree { private[RedBlackTree] final val initialBlackCount = colourBit private[RedBlackTree] final val initialRedCount = 0 - @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) - @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) + @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B] | Null, right: Tree[A, B] | Null) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) + @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B] | Null, right: Tree[A, B] | Null) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) /** create a new immutable red tree. * left and right may be null @@ -822,18 +828,18 @@ private[collection] object RedBlackTree { } @tailrec - protected final def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] = + protected final def findLeftMostOrPopOnEmpty(tree: Tree[A, B] | Null): Tree[A, B] | Null = if (tree eq null) popNext() else if (tree.left eq null) tree else findLeftMostOrPopOnEmpty(goLeft(tree)) @`inline` private[this] def pushNext(tree: Tree[A, B]): Unit = { - stackOfNexts(index) = tree + stackOfNexts.nn(index) = tree index += 1 } - @`inline` protected final def popNext(): Tree[A, B] = if (index == 0) null else { + @`inline` protected final def popNext(): Tree[A, B] | Null = if (index == 0) null else { index -= 1 - stackOfNexts(index) + stackOfNexts.nn(index) } protected[this] val stackOfNexts = if (root eq null) null else { @@ -850,7 +856,7 @@ private[collection] object RedBlackTree { new Array[Tree[A, B]](maximumHeight) } private[this] var index = 0 - protected var lookahead: Tree[A, B] = if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) + protected var lookahead: Tree[A, B] | Null = if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) /** * Find the leftmost subtree whose key is equal to the given key, or if no such thing, @@ -868,33 +874,33 @@ private[collection] object RedBlackTree { find(root) } - @`inline` private[this] def goLeft(tree: Tree[A, B]) = { + @`inline` private[this] def goLeft(tree: Tree[A, B]): Tree[A, B] | Null = { pushNext(tree) tree.left } - @`inline` protected final def goRight(tree: Tree[A, B]) = tree.right + @`inline` protected final def goRight(tree: Tree[A, B]): Tree[A, B] | Null = tree.right } - private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B]) extends TreeIterator[A, B, Unit](tree, None) { + private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B] | Null) extends TreeIterator[A, B, Unit](tree, None) { override def nextResult(tree: Tree[A, B]): Nothing = ??? - def sameKeys[X](that:EqualsIterator[A,X]): Boolean = { + def sameKeys[X](that: EqualsIterator[A,X]): Boolean = { var equal = true while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { if (this.lookahead eq that.lookahead) { this.lookahead = this.popNext() that.lookahead = that.popNext() } else { - equal = (this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || - ordering.equiv(this.lookahead.key, that.lookahead.key) - this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) - that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + equal = (this.lookahead.nn.key.asInstanceOf[AnyRef] eq that.lookahead.nn.key.asInstanceOf[AnyRef]) || + ordering.equiv(this.lookahead.nn.key, that.lookahead.nn.key) + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead.nn)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead.nn)) } } equal && (this.lookahead eq null) && (that.lookahead eq null) } - def sameValues[X](that:EqualsIterator[A,X]): Boolean = { + def sameValues[X](that: EqualsIterator[A,X]): Boolean = { var equal = true while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { if (this.lookahead eq that.lookahead) { @@ -1203,42 +1209,42 @@ private[collection] object RedBlackTree { (join(t.left, t.key, t.value, tt), kk, vv) } - private[this] def join2[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + private[this] def join2[A, B](tl: Tree[A, B] | Null, tr: Tree[A, B] | Null): Tree[A, B] | Null = if(tl eq null) tr else if(tr eq null) tl else { - val (ttl, k, v) = splitLast(tl) - join(ttl, k, v, tr) + val (ttl, k, v) = splitLast(tl.nn) + join(ttl, k, v, tr.nn) } - private[this] def _union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + private[this] def _union[A, B](t1: Tree[A, B] | Null, t2: Tree[A, B] | Null)(implicit ordering: Ordering[A]): Tree[A, B] | Null = if((t1 eq null) || (t1 eq t2)) t2 else if(t2 eq null) t1 else { - val (l1, _, r1, k1) = split(t1, t2.key) - val tl = _union(l1, t2.left) - val tr = _union(r1, t2.right) - join(tl, k1, t2.value, tr) + val (l1, _, r1, k1) = split(t1.nn, t2.nn.key) + val tl = _union(l1, t2.nn.left) + val tr = _union(r1, t2.nn.right) + join(tl, k1, t2.nn.value, tr) } - private[this] def _intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + private[this] def _intersect[A, B](t1: Tree[A, B] | Null, t2: Tree[A, B] | Null)(implicit ordering: Ordering[A]): Tree[A, B] | Null = if((t1 eq null) || (t2 eq null)) null else if (t1 eq t2) t1 else { - val (l1, b, r1, k1) = split(t1, t2.key) - val tl = _intersect(l1, t2.left) - val tr = _intersect(r1, t2.right) - if(b ne null) join(tl, k1, t2.value, tr) + val (l1, b, r1, k1) = split(t1.nn, t2.nn.key) + val tl = _intersect(l1, t2.nn.left) + val tr = _intersect(r1, t2.nn.right) + if(b ne null) join(tl, k1, t2.nn.value, tr) else join2(tl, tr) } - private[this] def _difference[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + private[this] def _difference[A, B](t1: Tree[A, B] | Null, t2: Tree[A, B] | Null)(implicit ordering: Ordering[A]): Tree[A, B] | Null = if((t1 eq null) || (t2 eq null)) t1 else if (t1 eq t2) null else { - val (l1, _, r1, _) = split(t1, t2.key) - val tl = _difference(l1, t2.left) - val tr = _difference(r1, t2.right) + val (l1, _, r1, _) = split(t1.nn, t2.nn.key) + val tl = _difference(l1, t2.nn.left) + val tr = _difference(r1, t2.nn.right) join2(tl, tr) } } diff --git a/library/src/scala/collection/immutable/SeqMap.scala b/library/src/scala/collection/immutable/SeqMap.scala index 03daef1481a8..09ad4ab24377 100644 --- a/library/src/scala/collection/immutable/SeqMap.scala +++ b/library/src/scala/collection/immutable/SeqMap.scala @@ -238,22 +238,22 @@ object SeqMap extends MapFactory[SeqMap] { private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { private[this] var elems: SeqMap[K, V] = SeqMap.empty private[this] var switchedToVectorMapBuilder: Boolean = false - private[this] var vectorMapBuilder: VectorMapBuilder[K, V] = _ + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] | Null = null override def clear(): Unit = { elems = SeqMap.empty if (vectorMapBuilder != null) { - vectorMapBuilder.clear() + vectorMapBuilder.nn.clear() } switchedToVectorMapBuilder = false } override def result(): SeqMap[K, V] = - if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems + if (switchedToVectorMapBuilder) vectorMapBuilder.nn.result() else elems def addOne(elem: (K, V)) = { if (switchedToVectorMapBuilder) { - vectorMapBuilder.addOne(elem) + vectorMapBuilder.nn.addOne(elem) } else if (elems.size < 4) { elems = elems + elem } else { @@ -265,8 +265,8 @@ object SeqMap extends MapFactory[SeqMap] { if (vectorMapBuilder == null) { vectorMapBuilder = new VectorMapBuilder } - elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) - vectorMapBuilder.addOne(elem) + elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder.nn) + vectorMapBuilder.nn.addOne(elem) } } @@ -275,7 +275,7 @@ object SeqMap extends MapFactory[SeqMap] { override def addAll(xs: IterableOnce[(K, V)]): this.type = if (switchedToVectorMapBuilder) { - vectorMapBuilder.addAll(xs) + vectorMapBuilder.nn.addAll(xs) this } else { super.addAll(xs) diff --git a/library/src/scala/collection/immutable/Set.scala b/library/src/scala/collection/immutable/Set.scala index e8509b58016e..825d0172fe49 100644 --- a/library/src/scala/collection/immutable/Set.scala +++ b/library/src/scala/collection/immutable/Set.scala @@ -362,22 +362,22 @@ abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { private[this] var elems: Set[A] = Set.empty private[this] var switchedToHashSetBuilder: Boolean = false - private[this] var hashSetBuilder: HashSetBuilder[A] = _ + private[this] var hashSetBuilder: HashSetBuilder[A] | Null = _ override def clear(): Unit = { elems = Set.empty if (hashSetBuilder != null) { - hashSetBuilder.clear() + hashSetBuilder.nn.clear() } switchedToHashSetBuilder = false } override def result(): Set[A] = - if (switchedToHashSetBuilder) hashSetBuilder.result() else elems + if (switchedToHashSetBuilder) hashSetBuilder.nn.result() else elems def addOne(elem: A) = { if (switchedToHashSetBuilder) { - hashSetBuilder.addOne(elem) + hashSetBuilder.nn.addOne(elem) } else if (elems.size < 4) { elems = elems + elem } else { @@ -389,8 +389,8 @@ private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { if (hashSetBuilder == null) { hashSetBuilder = new HashSetBuilder } - elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder) - hashSetBuilder.addOne(elem) + elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder.nn) + hashSetBuilder.nn.addOne(elem) } } @@ -399,7 +399,7 @@ private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { override def addAll(xs: IterableOnce[A]): this.type = if (switchedToHashSetBuilder) { - hashSetBuilder.addAll(xs) + hashSetBuilder.nn.addAll(xs) this } else { super.addAll(xs) diff --git a/library/src/scala/collection/immutable/Stream.scala b/library/src/scala/collection/immutable/Stream.scala index 898a988735c6..4aeb18210b7e 100644 --- a/library/src/scala/collection/immutable/Stream.scala +++ b/library/src/scala/collection/immutable/Stream.scala @@ -394,18 +394,18 @@ object Stream extends SeqFactory[Stream] { @SerialVersionUID(3L) final class Cons[A](override val head: A, tl: => Stream[A]) extends Stream[A] { override def isEmpty: Boolean = false - @volatile private[this] var tlVal: Stream[A] = _ - @volatile private[this] var tlGen = () => tl + @volatile private[this] var tlVal: Stream[A] | Null = _ + @volatile private[this] var tlGen: (() => Stream[A]) | Null = () => tl protected def tailDefined: Boolean = tlGen eq null override def tail: Stream[A] = { if (!tailDefined) synchronized { if (!tailDefined) { - tlVal = tlGen() + tlVal = tlGen.nn() tlGen = null } } - tlVal + tlVal.nn } /** Forces evaluation of the whole `Stream` and returns it. @@ -478,8 +478,8 @@ object Stream extends SeqFactory[Stream] { new WithFilter[A](l, p) private[this] final class WithFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean) extends collection.WithFilter[A, Stream] { - private[this] var s = l // set to null to allow GC after filtered - private[this] lazy val filtered: Stream[A] = { val f = s.filter(p); s = null.asInstanceOf[Stream[A]]; f } // don't set to null if throw during filter + private[this] var s: Stream[A] | Null = l // set to null to allow GC after filtered + private[this] lazy val filtered: Stream[A] = { val f = s.nn.filter(p); s = null; f } // don't set to null if throw during filter def map[B](f: A => B): Stream[B] = filtered.map(f) def flatMap[B](f: A => IterableOnce[B]): Stream[B] = filtered.flatMap(f) def foreach[U](f: A => U): Unit = filtered.foreach(f) diff --git a/library/src/scala/collection/immutable/TreeSeqMap.scala b/library/src/scala/collection/immutable/TreeSeqMap.scala index 4eaa8487b6ff..38d18e5e725d 100644 --- a/library/src/scala/collection/immutable/TreeSeqMap.scala +++ b/library/src/scala/collection/immutable/TreeSeqMap.scala @@ -317,12 +317,12 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { private[this] val bdr = new MapBuilderImpl[K, (Int, V)] private[this] var ong = Ordering.empty[K] private[this] var ord = 0 - private[this] var aliased: TreeSeqMap[K, V] = _ + private[this] var aliased: TreeSeqMap[K, V] | Null = null override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) def addOne(key: K, value: V): this.type = { if (aliased ne null) { - aliased = aliased.updated(key, value) + aliased = aliased.nn.updated(key, value) } else { bdr.getOrElse(key, null) match { case (o, v) => @@ -352,7 +352,7 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { if (aliased eq null) { aliased = new TreeSeqMap(ong, bdr.result(), ord, orderedBy) } - aliased + aliased.nn } } @@ -570,8 +570,8 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { else Bin(p, m, l, r.append(ordinal, value)) } - @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null, ordinal, value) - private[collection] final def appendInPlace1[S >: T](parent: Bin[S], ordinal: Int, value: S): Ordering[S] = this match { + @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null: Bin[S] | Null, ordinal, value) + private[collection] final def appendInPlace1[S >: T](parent: Bin[S] | Null, ordinal: Int, value: S): Ordering[S] = this match { case Zero => Tip(ordinal, value) case Tip(o, _) if o >= ordinal => @@ -579,14 +579,14 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { case Tip(o, _) if parent == null => join(ordinal, Tip(ordinal, value), o, this) case Tip(o, _) => - parent.right = join(ordinal, Tip(ordinal, value), o, this) - parent + parent.nn.right = join(ordinal, Tip(ordinal, value), o, this) + parent.nn case b @ Bin(p, m, _, r) => if (!hasMatch(ordinal, p, m)) { val b2 = join(ordinal, Tip(ordinal, value), p, this) if (parent != null) { - parent.right = b2 - parent + parent.nn.right = b2 + parent.nn } else b2 } else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") else { diff --git a/library/src/scala/collection/immutable/Vector.scala b/library/src/scala/collection/immutable/Vector.scala index f38cdbc77b5d..6003085d0af1 100644 --- a/library/src/scala/collection/immutable/Vector.scala +++ b/library/src/scala/collection/immutable/Vector.scala @@ -361,7 +361,7 @@ private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { protected[this] def slice0(lo: Int, hi: Int): Vector[Nothing] = this protected[immutable] def vectorSliceCount: Int = 0 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = null + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] | Null = null protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = 0 override def equals(o: Any): Boolean = { @@ -429,12 +429,12 @@ private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) - case data1b => new Vector1(data1b) + case data1b => new Vector1(data1b.nn) } override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { val data1b = append1IfSpace(prefix1, suffix) - if(data1b ne null) new Vector1(data1b) + if(data1b ne null) new Vector1(data1b.nn) else super.appendedAll0(suffix, k) } } @@ -525,8 +525,8 @@ private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, + val diff = prefix1b.nn.length - prefix1.length + copy(prefix1 = prefix1b.nn, len1 = len1 + diff, length0 = length0 + diff, ) @@ -534,7 +534,7 @@ private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + if(suffix1b ne null) copy(suffix1 = suffix1b.nn, length0 = length0-suffix1.length+suffix1b.nn.length) else super.appendedAll0(suffix, k) } } @@ -647,8 +647,8 @@ private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, + val diff = prefix1b.nn.length - prefix1.length + copy(prefix1 = prefix1b.nn, len1 = len1 + diff, len12 = len12 + diff, length0 = length0 + diff, @@ -657,7 +657,7 @@ private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + if(suffix1b ne null) copy(suffix1 = suffix1b.nn, length0 = length0-suffix1.length+suffix1b.nn.length) else super.appendedAll0(suffix, k) } } @@ -1167,7 +1167,7 @@ private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int private final class VectorSliceBuilder(lo: Int, hi: Int) { //println(s"***** VectorSliceBuilder($lo, $hi)") - private[this] val slices = new Array[Array[AnyRef]](11) + private[this] val slices = new Array[Array[AnyRef] | Null](11) private[this] var len, pos, maxDim = 0 @inline private[this] def prefixIdx(n: Int) = n-1 @@ -1611,8 +1611,8 @@ final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { lenRest -= offset - newOffset offset = newOffset } - var a: Array[AnyRef] = null // the array we modify - var aParent: Array[AnyRef] = null // a's parent, so aParent(0) == a + var a: Array[AnyRef] | Null = null // the array we modify + var aParent: Array[AnyRef] | Null = null // a's parent, so aParent(0) == a if (depth >= 6) { a = a6.asInstanceOf[Array[AnyRef]] val i = offset >>> BITS5 @@ -1656,43 +1656,43 @@ final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { if (a == null) a = a3.asInstanceOf[Array[AnyRef]] val i = (offset >>> BITS2) & MASK if (depth == 3) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + if (i > 0) System.arraycopy(a.nn, i, a.nn, 0, WIDTH - i) a3 = a.asInstanceOf[Arr3] shrinkOffsetIfToLarge(WIDTH2) if ((lenRest >>> BITS2) == 0) depth = 2 } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a + if (i > 0) a = copyOfRange(a.nn, i, WIDTH) + aParent.nn(0) = a } aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] + a = a.nn(0).asInstanceOf[Array[AnyRef]] } if (depth >= 2) { if (a == null) a = a2.asInstanceOf[Array[AnyRef]] val i = (offset >>> BITS) & MASK if (depth == 2) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + if (i > 0) System.arraycopy(a.nn, i, a.nn, 0, WIDTH - i) a2 = a.asInstanceOf[Arr2] shrinkOffsetIfToLarge(WIDTH) if ((lenRest >>> BITS) == 0) depth = 1 } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a + if (i > 0) a = copyOfRange(a.nn, i, WIDTH) + aParent.nn(0) = a } aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] + a = a.nn(0).asInstanceOf[Array[AnyRef]] } if (depth >= 1) { if (a == null) a = a1.asInstanceOf[Array[AnyRef]] val i = offset & MASK if (depth == 1) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + if (i > 0) System.arraycopy(a.nn, i, a.nn, 0, WIDTH - i) a1 = a.asInstanceOf[Arr1] len1 -= offset offset = 0 } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a + if (i > 0) a = copyOfRange(a.nn, i, WIDTH) + aParent.nn(0) = a } } prefixIsRightAligned = false @@ -2186,7 +2186,7 @@ private object VectorStatics { ac.asInstanceOf[Array[T]] } - final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]): Arr1 | Null = xs match { case it: Iterable[_] => if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { it.size match { @@ -2211,7 +2211,7 @@ private object VectorStatics { } else null } - final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]): Arr1 | Null = xs match { case it: Iterable[_] => if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { it.size match { diff --git a/library/src/scala/collection/immutable/VectorMap.scala b/library/src/scala/collection/immutable/VectorMap.scala index 361427a86c53..f39374aa0306 100644 --- a/library/src/scala/collection/immutable/VectorMap.scala +++ b/library/src/scala/collection/immutable/VectorMap.scala @@ -69,29 +69,29 @@ final class VectorMap[K, +V] private ( } @tailrec - private def nextValidField(slot: Int): (Int, K) = { - if (slot >= fields.size) (-1, null.asInstanceOf[K]) + private def nextValidField(slot: Int): (Int, K | Null) = { + if (slot >= fields.size) (-1, null) else fields(slot) match { case Tombstone(distance) => nextValidField(slot + distance) - case k /*: K | Null */ => (slot, k.asInstanceOf[K]) + case k /*: K | Null */ => (slot, k.asInstanceOf[K | Null]) } } def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { private[this] val fieldsLength = fields.length private[this] var slot = -1 - private[this] var key: K = null.asInstanceOf[K] + private[this] var key: K | Null = null private[this] def advance(): Unit = { val nextSlot = slot + 1 if (nextSlot >= fieldsLength) { slot = fieldsLength - key = null.asInstanceOf[K] + key = null } else { nextValidField(nextSlot) match { case (-1, _) => slot = fieldsLength - key = null.asInstanceOf[K] + key = null case (s, k) => slot = s key = k @@ -106,7 +106,7 @@ final class VectorMap[K, +V] private ( override def next(): (K, V) = if (!hasNext) Iterator.empty.next() else { - val result = (key, underlying(key)._2) + val result = (key.nn, underlying(key.nn)._2) advance() result } @@ -191,7 +191,7 @@ final class VectorMap[K, +V] private ( override def tail: VectorMap[K, V] = { if (isEmpty) throw new UnsupportedOperationException("empty.tail") val (slot, key) = nextValidField(0) - new VectorMap(fields.drop(slot + 1), underlying - key, dropped + slot + 1) + new VectorMap(fields.drop(slot + 1), underlying - key.nn, dropped + slot + 1) } override def init: VectorMap[K, V] = { @@ -244,7 +244,7 @@ object VectorMap extends MapFactory[VectorMap] { private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { private[this] val vectorBuilder = new VectorBuilder[K] private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] - private[this] var aliased: VectorMap[K, V] = _ + private[this] var aliased: VectorMap[K, V] | Null = _ override def clear(): Unit = { vectorBuilder.clear() @@ -256,11 +256,11 @@ private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K if (aliased eq null) { aliased = new VectorMap(vectorBuilder.result(), mapBuilder.result()) } - aliased + aliased.nn } def addOne(key: K, value: V): this.type = { if (aliased ne null) { - aliased = aliased.updated(key, value) + aliased = aliased.nn.updated(key, value) } else { mapBuilder.getOrElse(key, null) match { case (slot, _) => diff --git a/library/src/scala/collection/mutable/AnyRefMap.scala b/library/src/scala/collection/mutable/AnyRefMap.scala index 9ad433309b10..ca884bb61490 100644 --- a/library/src/scala/collection/mutable/AnyRefMap.scala +++ b/library/src/scala/collection/mutable/AnyRefMap.scala @@ -68,9 +68,9 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi private[this] var mask = 0 private[this] var _size = 0 private[this] var _vacant = 0 - private[this] var _hashes: Array[Int] = null - private[this] var _keys: Array[AnyRef] = null - private[this] var _values: Array[AnyRef] = null + private[this] var _hashes: Array[Int] | Null = null + private[this] var _keys: Array[AnyRef] | Null = null + private[this] var _values: Array[AnyRef] | Null = null if (initBlank) defaultInitialize(initialBufferSize) @@ -123,8 +123,8 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi var e = h & mask var x = 0 var g = 0 - val hashes = _hashes - val keys = _keys + val hashes = _hashes.nn + val keys = _keys.nn while ({ g = hashes(e); g != 0}) { if (g == h && { val q = keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e x += 1 @@ -138,8 +138,10 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi var x = 0 var g = 0 var o = -1 - while ({ g = _hashes(e); g != 0}) { - if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e + val hashes = _hashes.nn + val keys = _keys.nn + while ({ g = hashes(e); g != 0}) { + if (g == h && { val q = keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e else if (o == -1 && g+g == 0) o = e x += 1 e = (e + 2*(x+1)*x - 3) & mask @@ -151,12 +153,12 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi override def get(key: K): Option[V] = { val i = seekEntry(hashOf(key), key) - if (i < 0) None else Some(_values(i).asInstanceOf[V]) + if (i < 0) None else Some(_values.nn(i).asInstanceOf[V]) } override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { val i = seekEntry(hashOf(key), key) - if (i < 0) default else _values(i).asInstanceOf[V] + if (i < 0) default else _values.nn(i).asInstanceOf[V] } override def getOrElseUpdate(key: K, defaultValue: => V): V = { @@ -164,7 +166,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi var i = seekEntryOrOpen(h, key) if (i < 0) { val value = { - val ohs = _hashes + val ohs = _hashes.nn val j = i & IndexMask val oh = ohs(j) val ans = defaultValue @@ -181,14 +183,14 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } _size += 1 val j = i & IndexMask - _hashes(j) = h - _keys(j) = key.asInstanceOf[AnyRef] - _values(j) = value.asInstanceOf[AnyRef] + _hashes.nn(j) = h + _keys.nn(j) = key.asInstanceOf[AnyRef] + _values.nn(j) = value.asInstanceOf[AnyRef] if ((i & VacantBit) != 0) _vacant -= 1 else if (imbalanced) repack() value } - else _values(i).asInstanceOf[V] + else _values.nn(i).asInstanceOf[V] } /** Retrieves the value associated with a key, or the default for that type if none exists @@ -200,7 +202,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi */ def getOrNull(key: K): V = { val i = seekEntry(hashOf(key), key) - (if (i < 0) null else _values(i)).asInstanceOf[V] + (if (i < 0) null else _values.nn(i)).asInstanceOf[V] } /** Retrieves the value associated with a key. @@ -210,7 +212,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi */ override def apply(key: K): V = { val i = seekEntry(hashOf(key), key) - if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + if (i < 0) defaultEntry(key) else _values.nn(i).asInstanceOf[V] } /** Defers to defaultEntry to find a default value for the key. Throws an @@ -219,9 +221,9 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi override def default(key: K): V = defaultEntry(key) private def repack(newMask: Int): Unit = { - val oh = _hashes - val ok = _keys - val ov = _values + val oh = _hashes.nn + val ok = _keys.nn + val ov = _values.nn mask = newMask _hashes = new Array[Int](mask+1) _keys = new Array[AnyRef](mask+1) @@ -233,10 +235,10 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi if (h+h != 0) { var e = h & mask var x = 0 - while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - _hashes(e) = h - _keys(e) = ok(i) - _values(e) = ov(i) + while (_hashes.nn(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + _hashes.nn(e) = h + _keys.nn(e) = ok(i) + _values.nn(e) = ov(i) } i += 1 } @@ -262,18 +264,18 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi val i = seekEntryOrOpen(h, key) if (i < 0) { val j = i & IndexMask - _hashes(j) = h - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] + _hashes.nn(j) = h + _keys.nn(j) = key + _values.nn(j) = value.asInstanceOf[AnyRef] _size += 1 if ((i & VacantBit) != 0) _vacant -= 1 else if (imbalanced) repack() None } else { - val ans = Some(_values(i).asInstanceOf[V]) - _hashes(i) = h - _values(i) = value.asInstanceOf[AnyRef] + val ans = Some(_values.nn(i).asInstanceOf[V]) + _hashes.nn(i) = h + _values.nn(i) = value.asInstanceOf[AnyRef] ans } } @@ -287,16 +289,16 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi val i = seekEntryOrOpen(h, key) if (i < 0) { val j = i & IndexMask - _hashes(j) = h - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] + _hashes.nn(j) = h + _keys.nn(j) = key + _values.nn(j) = value.asInstanceOf[AnyRef] _size += 1 if ((i & VacantBit) != 0) _vacant -= 1 else if (imbalanced) repack() } else { - _hashes(i) = h - _values(i) = value.asInstanceOf[AnyRef] + _hashes.nn(i) = h + _values.nn(i) = value.asInstanceOf[AnyRef] } } @@ -332,17 +334,17 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } private abstract class AnyRefMapIterator[A] extends AbstractIterator[A] { - private[this] val hz = _hashes - private[this] val kz = _keys - private[this] val vz = _values + private[this] val hz = _hashes.nn + private[this] val kz = _keys.nn + private[this] val vz = _values.nn private[this] var index = 0 - def hasNext: Boolean = index= hz.length) return false + if (index >= hz.nn.length) return false h = hz(index) } true @@ -365,8 +367,8 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi var i = 0 var e = _size while (e > 0) { - while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 - if (i < _hashes.length) { + while(i < _hashes.nn.length && { val h = _hashes(i); h+h == 0 && i < _hashes.nn.length}) i += 1 + if (i < _hashes.nn.length) { f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V])) i += 1 e -= 1 @@ -379,8 +381,8 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi var i = 0 var e = _size while (e > 0) { - while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 - if (i < _hashes.length) { + while(i < _hashes.nn.length && { val h = _hashes(i); h+h == 0 && i < _hashes.nn.length}) i += 1 + if (i < _hashes.nn.length) { f(_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]) i += 1 e -= 1 @@ -390,9 +392,9 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } override def clone(): AnyRefMap[K, V] = { - val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = java.util.Arrays.copyOf(_values, _values.length) + val hz = java.util.Arrays.copyOf(_hashes, _hashes.nn.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.nn.length) + val vz = java.util.Arrays.copyOf(_values, _values.nn.length) val arm = new AnyRefMap[K, V](defaultEntry, 1, initBlank = false) arm.initializeTo(mask, _size, _vacant, hz, kz, vz) arm @@ -421,7 +423,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { var i,j = 0 - while (i < _hashes.length & j < _size) { + while (i < _hashes.nn.length & j < _size) { val h = _hashes(i) if (h+h != 0) { j += 1 @@ -432,10 +434,10 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } /** Applies a function to all keys of this map. */ - def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys, f) + def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys.nn, f) /** Applies a function to all values of this map. */ - def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values, f) + def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values.nn, f) /** Creates a new `AnyRefMap` with different values. * Unlike `mapValues`, this method generates a new @@ -443,11 +445,11 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi */ def mapValuesNow[V1](f: V => V1): AnyRefMap[K, V1] = { val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, initBlank = false) - val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = new Array[AnyRef](_values.length) + val hz = java.util.Arrays.copyOf(_hashes, _hashes.nn.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.nn.length) + val vz = new Array[AnyRef](_values.nn.length) var i,j = 0 - while (i < _hashes.length & j < _size) { + while (i < _hashes.nn.length & j < _size) { val h = _hashes(i) if (h+h != 0) { j += 1 @@ -470,11 +472,11 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi */ def transformValuesInPlace(f: V => V): this.type = { var i,j = 0 - while (i < _hashes.length & j < _size) { + while (i < _hashes.nn.length & j < _size) { val h = _hashes(i) if (h+h != 0) { j += 1 - _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + _values.nn.update(i, f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef]) } i += 1 } diff --git a/library/src/scala/collection/mutable/ArrayBuilder.scala b/library/src/scala/collection/mutable/ArrayBuilder.scala index e962dd024836..40bf0e8830f6 100644 --- a/library/src/scala/collection/mutable/ArrayBuilder.scala +++ b/library/src/scala/collection/mutable/ArrayBuilder.scala @@ -25,7 +25,7 @@ sealed abstract class ArrayBuilder[T] extends ReusableBuilder[T, Array[T]] with Serializable { protected[this] var capacity: Int = 0 - protected[this] def elems: Array[T] // may not be allocated at size = capacity = 0 + protected[this] def elems: Array[T] | Null // may not be allocated at size = capacity = 0 protected var size: Int = 0 /** Current number of elements. */ @@ -59,7 +59,7 @@ sealed abstract class ArrayBuilder[T] private def doAddAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = { if (length > 0) { ensureSize(this.size + length) - Array.copy(xs, offset, elems, this.size, length) + Array.copy(xs, offset, elems.nn, this.size, length) size += length } this @@ -69,7 +69,7 @@ sealed abstract class ArrayBuilder[T] val k = xs.knownSize if (k > 0) { ensureSize(this.size + k) - val actual = IterableOnce.copyElemsToArray(xs, elems, this.size) + val actual = IterableOnce.copyElemsToArray(xs, elems.nn, this.size) if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") size += k } else if (k < 0) super.addAll(xs) @@ -111,12 +111,12 @@ object ArrayBuilder { @SerialVersionUID(3L) final class ofRef[T <: AnyRef](implicit ct: ClassTag[T]) extends ArrayBuilder[T] { - protected var elems: Array[T] = _ + protected var elems: Array[T] | Null = _ private def mkArray(size: Int): Array[T] = { - if (capacity == size && capacity > 0) elems + if (capacity == size && capacity > 0) elems.nn else if (elems eq null) new Array[T](size) - else java.util.Arrays.copyOf[T](elems, size) + else java.util.Arrays.copyOf[T](elems.nn, size) } protected[this] def resize(size: Int): Unit = { @@ -126,7 +126,7 @@ object ArrayBuilder { def addOne(elem: T): this.type = { ensureSize(size + 1) - elems(size) = elem + elems.nn(size) = elem size += 1 this } @@ -134,7 +134,7 @@ object ArrayBuilder { def result(): Array[T] = { if (capacity != 0 && capacity == size) { capacity = 0 - val res = elems + val res = elems.nn elems = null res } @@ -158,11 +158,11 @@ object ArrayBuilder { @SerialVersionUID(3L) final class ofByte extends ArrayBuilder[Byte] { - protected var elems: Array[Byte] = _ + protected var elems: Array[Byte] | Null = _ private def mkArray(size: Int): Array[Byte] = { val newelems = new Array[Byte](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + if (this.size > 0) Array.copy(elems.nn, 0, newelems, 0, this.size) newelems } @@ -173,7 +173,7 @@ object ArrayBuilder { def addOne(elem: Byte): this.type = { ensureSize(size + 1) - elems(size) = elem + elems.nn(size) = elem size += 1 this } @@ -181,7 +181,7 @@ object ArrayBuilder { def result(): Array[Byte] = { if (capacity != 0 && capacity == size) { capacity = 0 - val res = elems + val res = elems.nn elems = null res } @@ -200,11 +200,11 @@ object ArrayBuilder { @SerialVersionUID(3L) final class ofShort extends ArrayBuilder[Short] { - protected var elems: Array[Short] = _ + protected var elems: Array[Short] | Null = _ private def mkArray(size: Int): Array[Short] = { val newelems = new Array[Short](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + if (this.size > 0) Array.copy(elems.nn, 0, newelems, 0, this.size) newelems } @@ -215,7 +215,7 @@ object ArrayBuilder { def addOne(elem: Short): this.type = { ensureSize(size + 1) - elems(size) = elem + elems.nn(size) = elem size += 1 this } @@ -223,7 +223,7 @@ object ArrayBuilder { def result(): Array[Short] = { if (capacity != 0 && capacity == size) { capacity = 0 - val res = elems + val res = elems.nn elems = null res } @@ -242,11 +242,11 @@ object ArrayBuilder { @SerialVersionUID(3L) final class ofChar extends ArrayBuilder[Char] { - protected var elems: Array[Char] = _ + protected var elems: Array[Char] | Null = _ private def mkArray(size: Int): Array[Char] = { val newelems = new Array[Char](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + if (this.size > 0) Array.copy(elems.nn, 0, newelems, 0, this.size) newelems } @@ -257,7 +257,7 @@ object ArrayBuilder { def addOne(elem: Char): this.type = { ensureSize(size + 1) - elems(size) = elem + elems.nn(size) = elem size += 1 this } @@ -265,7 +265,7 @@ object ArrayBuilder { def result(): Array[Char] = { if (capacity != 0 && capacity == size) { capacity = 0 - val res = elems + val res = elems.nn elems = null res } @@ -284,11 +284,11 @@ object ArrayBuilder { @SerialVersionUID(3L) final class ofInt extends ArrayBuilder[Int] { - protected var elems: Array[Int] = _ + protected var elems: Array[Int] | Null = _ private def mkArray(size: Int): Array[Int] = { val newelems = new Array[Int](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + if (this.size > 0) Array.copy(elems.nn, 0, newelems, 0, this.size) newelems } @@ -299,7 +299,7 @@ object ArrayBuilder { def addOne(elem: Int): this.type = { ensureSize(size + 1) - elems(size) = elem + elems.nn(size) = elem size += 1 this } @@ -307,7 +307,7 @@ object ArrayBuilder { def result(): Array[Int] = { if (capacity != 0 && capacity == size) { capacity = 0 - val res = elems + val res = elems.nn elems = null res } @@ -326,11 +326,11 @@ object ArrayBuilder { @SerialVersionUID(3L) final class ofLong extends ArrayBuilder[Long] { - protected var elems: Array[Long] = _ + protected var elems: Array[Long] | Null = _ private def mkArray(size: Int): Array[Long] = { val newelems = new Array[Long](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + if (this.size > 0) Array.copy(elems.nn, 0, newelems, 0, this.size) newelems } @@ -341,7 +341,7 @@ object ArrayBuilder { def addOne(elem: Long): this.type = { ensureSize(size + 1) - elems(size) = elem + elems.nn(size) = elem size += 1 this } @@ -349,7 +349,7 @@ object ArrayBuilder { def result(): Array[Long] = { if (capacity != 0 && capacity == size) { capacity = 0 - val res = elems + val res = elems.nn elems = null res } @@ -368,11 +368,11 @@ object ArrayBuilder { @SerialVersionUID(3L) final class ofFloat extends ArrayBuilder[Float] { - protected var elems: Array[Float] = _ + protected var elems: Array[Float] | Null = _ private def mkArray(size: Int): Array[Float] = { val newelems = new Array[Float](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + if (this.size > 0) Array.copy(elems.nn, 0, newelems, 0, this.size) newelems } @@ -383,7 +383,7 @@ object ArrayBuilder { def addOne(elem: Float): this.type = { ensureSize(size + 1) - elems(size) = elem + elems.nn(size) = elem size += 1 this } @@ -391,7 +391,7 @@ object ArrayBuilder { def result(): Array[Float] = { if (capacity != 0 && capacity == size) { capacity = 0 - val res = elems + val res = elems.nn elems = null res } @@ -410,11 +410,11 @@ object ArrayBuilder { @SerialVersionUID(3L) final class ofDouble extends ArrayBuilder[Double] { - protected var elems: Array[Double] = _ + protected var elems: Array[Double] | Null = _ private def mkArray(size: Int): Array[Double] = { val newelems = new Array[Double](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + if (this.size > 0) Array.copy(elems.nn, 0, newelems, 0, this.size) newelems } @@ -425,7 +425,7 @@ object ArrayBuilder { def addOne(elem: Double): this.type = { ensureSize(size + 1) - elems(size) = elem + elems.nn(size) = elem size += 1 this } @@ -433,7 +433,7 @@ object ArrayBuilder { def result(): Array[Double] = { if (capacity != 0 && capacity == size) { capacity = 0 - val res = elems + val res = elems.nn elems = null res } @@ -452,11 +452,11 @@ object ArrayBuilder { @SerialVersionUID(3L) class ofBoolean extends ArrayBuilder[Boolean] { - protected var elems: Array[Boolean] = _ + protected var elems: Array[Boolean] | Null = _ private def mkArray(size: Int): Array[Boolean] = { val newelems = new Array[Boolean](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + if (this.size > 0) Array.copy(elems.nn, 0, newelems, 0, this.size) newelems } @@ -467,7 +467,7 @@ object ArrayBuilder { def addOne(elem: Boolean): this.type = { ensureSize(size + 1) - elems(size) = elem + elems.nn(size) = elem size += 1 this } @@ -475,7 +475,7 @@ object ArrayBuilder { def result(): Array[Boolean] = { if (capacity != 0 && capacity == size) { capacity = 0 - val res = elems + val res = elems.nn elems = null res } diff --git a/library/src/scala/collection/mutable/ArrayDeque.scala b/library/src/scala/collection/mutable/ArrayDeque.scala index ca70f31d1869..e2e564aefa9d 100644 --- a/library/src/scala/collection/mutable/ArrayDeque.scala +++ b/library/src/scala/collection/mutable/ArrayDeque.scala @@ -37,7 +37,7 @@ import scala.reflect.ClassTag * @define willNotTerminateInf */ class ArrayDeque[A] protected ( - protected var array: Array[AnyRef], + protected var array: Array[AnyRef | Null], private[ArrayDeque] var start: Int, private[ArrayDeque] var end: Int ) extends AbstractBuffer[A] @@ -51,7 +51,7 @@ class ArrayDeque[A] protected ( reset(array, start, end) - private[this] def reset(array: Array[AnyRef], start: Int, end: Int) = { + private[this] def reset(array: Array[AnyRef | Null], start: Int, end: Int) = { assert((array.length & (array.length - 1)) == 0, s"Array.length must be power of 2") requireBounds(idx = start, until = array.length) requireBounds(idx = end, until = array.length) @@ -305,7 +305,7 @@ class ArrayDeque[A] protected ( if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeHeadAssumingNonEmpty(resizeInternalRepr) @inline private[this] def removeHeadAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { - val elem = array(start) + val elem = array(start).nn array(start) = null start = start_+(1) if (resizeInternalRepr) resize(length) @@ -333,7 +333,7 @@ class ArrayDeque[A] protected ( @`inline` private[this] def removeLastAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { end = end_-(1) - val elem = array(end) + val elem = array(end).nn array(end) = null if (resizeInternalRepr) resize(length) elem.asInstanceOf[A] @@ -461,7 +461,7 @@ class ArrayDeque[A] protected ( this } - protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = + protected def ofArray(array: Array[AnyRef | Null], end: Int): ArrayDeque[A] = new ArrayDeque[A](array, start = 0, end) override def copyToArray[B >: A](dest: Array[B], destStart: Int, len: Int): Int = { @@ -504,7 +504,7 @@ class ArrayDeque[A] protected ( array.length > ArrayDeque.DefaultInitialSize && array.length - len > len } - @inline private[this] def _get(idx: Int): A = array(start_+(idx)).asInstanceOf[A] + @inline private[this] def _get(idx: Int): A = array(start_+(idx)).nn.asInstanceOf[A] @inline private[this] def _set(idx: Int, elem: A) = array(start_+(idx)) = elem.asInstanceOf[AnyRef] @@ -564,18 +564,18 @@ object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { require(len >= 0, s"Non-negative array size required") val size = (1 << 31) >>> java.lang.Integer.numberOfLeadingZeros(len) << 1 require(size >= 0, s"ArrayDeque too big - cannot allocate ArrayDeque of length $len") - new Array[AnyRef](Math.max(size, DefaultInitialSize)) + new Array[AnyRef | Null](Math.max(size, DefaultInitialSize)) } } trait ArrayDequeOps[A, +CC[_], +C <: AnyRef] extends StrictOptimizedSeqOps[A, CC, C] { - protected def array: Array[AnyRef] + protected def array: Array[AnyRef | Null] final override def clone(): C = klone() protected def klone(): C - protected def ofArray(array: Array[AnyRef], end: Int): C + protected def ofArray(array: Array[AnyRef | Null], end: Int): C protected def start_+(idx: Int): Int diff --git a/library/src/scala/collection/mutable/ArraySeq.scala b/library/src/scala/collection/mutable/ArraySeq.scala index 0537092d0b13..ca606825441f 100644 --- a/library/src/scala/collection/mutable/ArraySeq.scala +++ b/library/src/scala/collection/mutable/ArraySeq.scala @@ -120,7 +120,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` * at runtime. */ - def make[T](x: Array[T]): ArraySeq[T] = ((x: @unchecked) match { + def make[T](x: Array[T] | Null): ArraySeq[T] | Null = ((x: @unchecked) match { case null => null case x: Array[AnyRef] => new ofRef[AnyRef](x) case x: Array[Int] => new ofInt(x) @@ -132,7 +132,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => case x: Array[Short] => new ofShort(x) case x: Array[Boolean] => new ofBoolean(x) case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[ArraySeq[T]] + }).asInstanceOf[ArraySeq[T] | Null] @SerialVersionUID(3L) final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { diff --git a/library/src/scala/collection/mutable/BitSet.scala b/library/src/scala/collection/mutable/BitSet.scala index ba77d7161a0b..af37c9be6f71 100644 --- a/library/src/scala/collection/mutable/BitSet.scala +++ b/library/src/scala/collection/mutable/BitSet.scala @@ -326,21 +326,21 @@ class BitSet(protected[collection] final var elems: Array[Long]) // * over-allocating -- the resulting array will be exactly the right size // * multiple resizing allocations -- the array is allocated one time, not log(n) times. var i = nwords - 1 - var newArray: Array[Long] = null + var newArray: Array[Long] | Null = null while (i >= 0) { val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) if (w != 0L) { if (newArray eq null) { newArray = new Array(i + 1) } - newArray(i) = w + newArray.nn(i) = w } i -= 1 } if (newArray eq null) { empty } else { - fromBitMaskNoCopy(newArray) + fromBitMaskNoCopy(newArray.nn) } } diff --git a/library/src/scala/collection/mutable/CollisionProofHashMap.scala b/library/src/scala/collection/mutable/CollisionProofHashMap.scala index f56e679df2d2..d1ceae91673c 100644 --- a/library/src/scala/collection/mutable/CollisionProofHashMap.scala +++ b/library/src/scala/collection/mutable/CollisionProofHashMap.scala @@ -47,7 +47,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] /** The actual hash table. */ - private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) + private[this] var table: Array[Node | Null] = new Array[Node | Null](tableSizeFor(initialCapacity)) /** The next size value at which to resize (capacity * load factor). */ private[this] var threshold: Int = newThreshold(table.length) @@ -95,7 +95,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double } } - @`inline` private[this] def findNode(elem: K): Node = { + @`inline` private[this] def findNode(elem: K): Node | Null = { val hash = computeHash(elem) table(index(hash)) match { case null => null @@ -121,24 +121,24 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, getOld = false); this } - @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] | Null = { if(contentSize + 1 >= threshold) growTable(table.length * 2) val hash = computeHash(key) val idx = index(hash) put0(key, value, getOld, hash, idx) } - private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] | Null = { val res = table(idx) match { case n: RBNode @uc => insert(n, idx, key, hash, value) case _old => - val old: LLNode = _old.asInstanceOf[LLNode] + val old: LLNode | Null = _old.asInstanceOf[LLNode | Null] if(old eq null) { table(idx) = new LLNode(key, hash, value, null) } else { var remaining = CollisionProofHashMap.treeifyThreshold - var prev: LLNode = null + var prev: LLNode | Null = null var n = old while((n ne null) && n.hash <= hash && remaining > 0) { if(n.hash == hash && key == n.key) { @@ -151,11 +151,11 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double remaining -= 1 } if(remaining == 0) { - treeify(old, idx) + treeify(old.nn, idx) return put0(key, value, getOld, hash, idx) } if(prev eq null) table(idx) = new LLNode(key, hash, value, old) - else prev.next = new LLNode(key, hash, value, prev.next) + else prev.nn.next = new LLNode(key, hash, value, prev.nn.next) } true } @@ -165,11 +165,11 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double private[this] def treeify(old: LLNode, idx: Int): Unit = { table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) - var n: LLNode = old.next + var n: LLNode | Null = old.next while(n ne null) { val root = table(idx).asInstanceOf[RBNode] - insertIntoExisting(root, idx, n.key, n.hash, n.value, root) - n = n.next + insertIntoExisting(root, idx, n.nn.key, n.nn.hash, n.nn.value, root) + n = n.nn.next } } @@ -215,7 +215,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double protected[this] def extract(node: RBNode): R private[this] var i = 0 - private[this] var node: Node = null + private[this] var node: Node | Null = null private[this] val len = table.length def hasNext: Boolean = { @@ -243,7 +243,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double else node match { case n: RBNode @uc => val r = extract(n) - node = CollisionProofHashMap.successor(n ) + node = CollisionProofHashMap.successor(n) r case n: LLNode @uc => val r = extract(n) @@ -276,7 +276,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double var i = 0 while (i < oldlen) { val old = table(i) - if(old ne null) splitBucket(old, i, i + oldlen, oldlen) + if(old ne null) splitBucket(old.nn, i, i + oldlen, oldlen) i += 1 } oldlen *= 2 @@ -284,39 +284,39 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double } @`inline` private[this] def reallocTable(newlen: Int) = { - table = new Array(newlen) + table = new Array[Node | Null](newlen) threshold = newThreshold(table.length) } - @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { + @`inline` private[this] def splitBucket(tree: Node | Null, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case null => } private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) //preLow.next = null - //preHigh.next = null - var lastLow: LLNode = preLow - var lastHigh: LLNode = preHigh - var n = list + var lastLow: LLNode | Null = preLow + var lastHigh: LLNode | Null = preHigh + var n: LLNode | Null = list while(n ne null) { - val next = n.next - if((n.hash & mask) == 0) { // keep low - lastLow.next = n + val next = n.nn.next + if((n.nn.hash & mask) == 0) { // keep low + lastLow.nn.next = n lastLow = n } else { // move to high - lastHigh.next = n + lastHigh.nn.next = n lastHigh = n } n = next } - lastLow.next = null + lastLow.nn.next = null if(list ne preLow.next) table(lowBucket) = preLow.next if(preHigh.next ne null) { table(highBucket) = preHigh.next - lastHigh.next = null + lastHigh.nn.next = null } } @@ -392,10 +392,10 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double case null => () case n: LLNode @uc => val nd = n.getNode(key, hash) - if(nd != null) return nd.value + if(nd ne null) return nd.value case n => val nd = n.asInstanceOf[RBNode].getNode(key, hash) - if(nd != null) return nd.value + if(nd ne null) return nd.value } val table0 = table val default = defaultValue @@ -459,8 +459,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: - @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red - @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red + @`inline` private[this] def isRed(node: RBNode | Null) = (node ne null) && node.nn.red + @`inline` private[this] def isBlack(node: RBNode | Null) = (node eq null) || !node.nn.red @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { val i = hash - node.hash @@ -487,52 +487,52 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double table(bucket) = fixAfterInsert(_root, z) return true } - else insertIntoExisting(_root, bucket, key, hash, value, next) + else insertIntoExisting(_root, bucket, key, hash, value, next.nn) } } - private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { + private[this] final def insert(tree: RBNode | Null, bucket: Int, key: K, hash: Int, value: V): Boolean = { if(tree eq null) { table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) true - } else insertIntoExisting(tree, bucket, key, hash, value, tree) + } else insertIntoExisting(tree.nn, bucket, key, hash, value, tree.nn) } private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { var root = _root var z = node while (isRed(z.parent)) { - if (z.parent eq z.parent.parent.left) { - val y = z.parent.parent.right + if (z.parent eq z.parent.nn.parent.nn.left) { + val y = z.parent.nn.parent.nn.right if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent + z.parent.nn.red = false + y.nn.red = false + z.parent.nn.parent.nn.red = true + z = z.parent.nn.parent } else { - if (z eq z.parent.right) { - z = z.parent + if (z eq z.parent.nn.right) { + z = z.parent.nn root = rotateLeft(root, z) } - z.parent.red = false - z.parent.parent.red = true - root = rotateRight(root, z.parent.parent) + z.parent.nn.red = false + z.parent.nn.parent.nn.red = true + root = rotateRight(root, z.parent.nn.parent) } } else { // symmetric cases - val y = z.parent.parent.left + val y = z.parent.nn.parent.nn.left if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent + z.parent.nn.red = false + y.nn.red = false + z.parent.nn.parent.nn.red = true + z = z.parent.nn.parent } else { - if (z eq z.parent.left) { - z = z.parent + if (z eq z.parent.nn.left) { + z = z.parent.nn root = rotateRight(root, z) } - z.parent.red = false - z.parent.parent.red = true - root = rotateLeft(root, z.parent.parent) + z.parent.nn.red = false + z.parent.nn.parent.nn.red = true + root = rotateLeft(root, z.parent.nn.parent) } } } @@ -550,8 +550,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double val oldValue = z.value var y = z var yIsRed = y.red - var x: RBNode = null - var xParent: RBNode = null + var x: RBNode | Null = null + var xParent: RBNode | Null = null if (z.left eq null) { x = z.right @@ -573,11 +573,11 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double xParent = y.parent root = transplant(root, y, y.right) y.right = z.right - y.right.parent = y + y.right.nn.parent = y } root = transplant(root, z, y) y.left = z.left - y.left.parent = y + y.left.nn.parent = y y.red = z.red } @@ -587,17 +587,17 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double } else Statics.pfMarker } - private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { + private[this] def fixAfterDelete(_root: RBNode, node: RBNode | Null, parent: RBNode | Null): RBNode = { var root = _root var x = node var xParent = parent while ((x ne root) && isBlack(x)) { - if (x eq xParent.left) { - var w = xParent.right + if (x eq xParent.nn.left) { + var w = xParent.nn.right // assert(w ne null) - if (w.red) { - w.red = false + if (w.nn.red) { + w.nn.red = false xParent.red = true root = rotateLeft(root, xParent) w = xParent.right @@ -693,20 +693,20 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. */ - private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { + private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode | Null): RBNode = { var root = _root - if (to.parent eq null) root = from - else if (to eq to.parent.left) to.parent.left = from - else to.parent.right = from + if (to.parent eq null) root = from.nn + else if (to eq to.parent.left) to.parent.left = from.nn + else to.parent.right = from.nn if (from ne null) from.parent = to.parent root } // building - def fromNodes(xs: Iterator[Node], size: Int): RBNode = { + def fromNodes(xs: Iterator[Node], size: Int): RBNode | Null = { val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): RBNode = size match { + def f(level: Int, size: Int): RBNode | Null = size match { case 0 => null case 1 => val nn = xs.next() @@ -726,7 +726,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double } val n = new RBNode(key, hash, value, red = false, left, right, null) if(left ne null) left.parent = n - right.parent = n + if(right ne null) right.parent = n n } f(1, size) @@ -765,7 +765,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { @SerialVersionUID(3L) private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(using ordering) ++= it def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(using ordering) } @@ -787,62 +787,62 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { /////////////////////////// Red-Black Tree Node - final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V] | Null, var right: RBNode[K, V] | Null, var parent: RBNode[K, V] | Null) extends Node { override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" - @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] | Null = { val cmp = compare(k, h, this) if (cmp < 0) { - if(left ne null) left.getNode(k, h) else null + if(left ne null) left.nn.getNode(k, h) else null } else if (cmp > 0) { - if(right ne null) right.getNode(k, h) else null + if(right ne null) right.nn.getNode(k, h) else null } else this } def foreach[U](f: ((K, V)) => U): Unit = { - if(left ne null) left.foreach(f) + if(left ne null) left.nn.foreach(f) f((key, value)) - if(right ne null) right.foreach(f) + if(right ne null) right.nn.foreach(f) } def foreachEntry[U](f: (K, V) => U): Unit = { - if(left ne null) left.foreachEntry(f) + if(left ne null) left.nn.foreachEntry(f) f(key, value) - if(right ne null) right.foreachEntry(f) + if(right ne null) right.nn.foreachEntry(f) } def foreachNode[U](f: RBNode[K, V] => U): Unit = { - if(left ne null) left.foreachNode(f) + if(left ne null) left.nn.foreachNode(f) f(this) - if(right ne null) right.foreachNode(f) + if(right ne null) right.nn.foreachNode(f) } } - @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B] | Null): RBNode[A, B] = new RBNode(key, hash, value, red, null, null, parent) @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = - if (node.left eq null) node else minNodeNonNull(node.left) + if (node.left eq null) node else minNodeNonNull(node.left.nn) /** * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, * therefore, the last node), this method returns `null`. */ - private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { - if (node.right ne null) minNodeNonNull(node.right) + private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] | Null = { + if (node.right ne null) minNodeNonNull(node.right.nn) else { var x = node var y = x.parent while ((y ne null) && (x eq y.right)) { - x = y + x = y.nn y = y.parent } y } } - private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { - private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) + private final class RBNodesIterator[A, B](tree: RBNode[A, B] | Null)(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private[this] var nextNode: RBNode[A, B] | Null = if(tree ne null) minNodeNonNull(tree.nn) else null def hasNext: Boolean = nextNode ne null @@ -850,38 +850,38 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { def next(): RBNode[A, B] = nextNode match { case null => Iterator.empty.next() case node => - nextNode = successor(node) - node + nextNode = successor(node.nn) + node.nn } } /////////////////////////// Linked List Node - private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V] | Null) extends Node { override def toString = s"LLNode($key, $value, $hash) -> $next" private[this] def eq(a: Any, b: Any): Boolean = if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) - @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] | Null = { if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this else if((next eq null) || (hash > h)) null - else next.getNode(k, h) + else next.nn.getNode(k, h) } @tailrec def foreach[U](f: ((K, V)) => U): Unit = { f((key, value)) - if(next ne null) next.foreach(f) + if(next ne null) next.nn.foreach(f) } @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { f(key, value) - if(next ne null) next.foreachEntry(f) + if(next ne null) next.nn.foreachEntry(f) } @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { f(this) - if(next ne null) next.foreachNode(f) + if(next ne null) next.nn.foreachNode(f) } } } diff --git a/library/src/scala/collection/mutable/HashTable.scala b/library/src/scala/collection/mutable/HashTable.scala index d58f6e01b7ac..d1e5fb0800a8 100644 --- a/library/src/scala/collection/mutable/HashTable.scala +++ b/library/src/scala/collection/mutable/HashTable.scala @@ -60,7 +60,7 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] /** The array keeping track of the number of elements in 32 element blocks. */ - protected var sizemap: Array[Int] = null + protected var sizemap: Array[Int] | Null = null protected var seedvalue: Int = tableSizeSeed @@ -78,7 +78,7 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] private def lastPopulatedIndex = { var idx = table.length - 1 - while (table(idx) == null && idx > 0) + while ((table(idx) eq null) && idx > 0) idx -= 1 idx @@ -135,7 +135,7 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] protected[collection] final def findEntry0(key: A, h: Int): Entry = { var e = table(h).asInstanceOf[Entry] - while (e != null && !elemEquals(e.key, key)) e = e.next + while ((e ne null) && !elemEquals(e.key, key)) e = e.next e } @@ -182,7 +182,7 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] */ private[collection] final def removeEntry0(key: A, h: Int) : Entry = { var e = table(h).asInstanceOf[Entry] - if (e != null) { + if (e ne null) { if (elemEquals(e.key, key)) { table(h) = e.next tableSize = tableSize - 1 @@ -191,11 +191,11 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] return e } else { var e1 = e.next - while (e1 != null && !elemEquals(e1.key, key)) { + while ((e1 ne null) && !elemEquals(e1.key, key)) { e = e1 e1 = e1.next } - if (e1 != null) { + if (e1 ne null) { e.next = e1.next tableSize = tableSize - 1 nnSizeMapRemove(h) @@ -214,11 +214,11 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] var idx = lastPopulatedIndex var es = iterTable(idx) - def hasNext = es != null + def hasNext = es ne null def next() = { val res = es es = es.next - while (es == null && idx > 0) { + while ((es eq null) && idx > 0) { idx = idx - 1 es = iterTable(idx) } @@ -232,12 +232,12 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] var idx = lastPopulatedIndex var es = iterTable(idx) - while (es != null) { + while (es ne null) { val next = es.next // Cache next in case f removes es. f(es.asInstanceOf[Entry]) es = next - while (es == null && idx > 0) { + while ((es eq null) && idx > 0) { idx -= 1 es = iterTable(idx) } @@ -248,7 +248,7 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] */ def clearTable(): Unit = { var i = table.length - 1 - while (i >= 0) { table(i) = null; i = i - 1 } + while (i >= 0) { table(i) = null.asInstanceOf[HashEntry[A, Entry]]; i = i - 1 } tableSize = 0 nnSizeMapReset(0) } @@ -260,7 +260,7 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] var i = oldTable.length - 1 while (i >= 0) { var e = oldTable(i) - while (e != null) { + while (e ne null) { val h = index(elemHashCode(e.key)) val e1 = e.next e.next = table(h).asInstanceOf[Entry] @@ -293,17 +293,17 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] * there. */ protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) += 1 + sizemap.nn(h >> sizeMapBucketBitSize) += 1 } protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) -= 1 + sizemap.nn(h >> sizeMapBucketBitSize) -= 1 } protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { val nsize = calcSizeMapSize(tableLength) - if (sizemap.length != nsize) sizemap = new Array[Int](nsize) - else java.util.Arrays.fill(sizemap, 0) + if (sizemap.nn.length != nsize) sizemap = new Array[Int](nsize) + else java.util.Arrays.fill(sizemap.nn, 0) } private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize @@ -336,14 +336,14 @@ private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] } tableidx += 1 } - sizemap(bucketidx) = currbucketsize + sizemap.nn(bucketidx) = currbucketsize tableuntil += sizeMapBucketSize bucketidx += 1 } } private[collection] def printSizeMap() = { - println(sizemap.to(collection.immutable.List)) + println(sizemap.nn.to(collection.immutable.List)) } protected final def sizeMapDisable() = sizemap = null diff --git a/library/src/scala/collection/mutable/LinkedHashMap.scala b/library/src/scala/collection/mutable/LinkedHashMap.scala index d529fee42596..4ac619030b6d 100644 --- a/library/src/scala/collection/mutable/LinkedHashMap.scala +++ b/library/src/scala/collection/mutable/LinkedHashMap.scala @@ -47,7 +47,7 @@ class LinkedHashMap[K, V] // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper // would not return the elements in insertion order - private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] + private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] | Null private[collection] def _firstEntry: Entry = firstEntry @@ -67,19 +67,19 @@ class LinkedHashMap[K, V] private[this] var contentSize = 0 override def last: (K, V) = - if (size > 0) (lastEntry.key, lastEntry.value) + if (size > 0) (lastEntry.nn.key, lastEntry.nn.value) else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") override def lastOption: Option[(K, V)] = - if (size > 0) Some((lastEntry.key, lastEntry.value)) + if (size > 0) Some((lastEntry.nn.key, lastEntry.nn.value)) else None override def head: (K, V) = - if (size > 0) (firstEntry.key, firstEntry.value) + if (size > 0) (firstEntry.nn.key, firstEntry.nn.value) else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") override def headOption: Option[(K, V)] = - if (size > 0) Some((firstEntry.key, firstEntry.value)) + if (size > 0) Some((firstEntry.nn.key, firstEntry.nn.value)) else None override def size = contentSize @@ -288,9 +288,9 @@ class LinkedHashMap[K, V] case (None, None) => // do nothing case (Some(_), None) => - if (previousEntry != null) previousEntry.next = foundEntry.next - else table(indexedHash) = foundEntry.next - deleteEntry(foundEntry) + if (previousEntry != null) previousEntry.nn.next = foundEntry.nn.next + else table(indexedHash) = foundEntry.nn.next + deleteEntry(foundEntry.nn) contentSize -= 1 case (None, Some(value)) => @@ -318,7 +318,7 @@ class LinkedHashMap[K, V] var cur = firstEntry while (cur ne null) { f((cur.key, cur.value)) - cur = cur.later + cur = cur.later.nn } } @@ -326,7 +326,7 @@ class LinkedHashMap[K, V] var cur = firstEntry while (cur ne null) { f(cur.key, cur.value) - cur = cur.later + cur = cur.later.nn } } @@ -350,7 +350,7 @@ class LinkedHashMap[K, V] val e = new Entry(key, hash, value) if (firstEntry eq null) firstEntry = e else { - lastEntry.later = e + lastEntry.nn.later = e e.earlier = lastEntry } lastEntry = e @@ -360,9 +360,9 @@ class LinkedHashMap[K, V] /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */ private[this] def deleteEntry(e: Entry): Unit = { if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later + else e.earlier.nn.later = e.later if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier + else e.later.nn.earlier = e.earlier e.earlier = null e.later = null e.next = null @@ -375,7 +375,7 @@ class LinkedHashMap[K, V] put0(key, value, getOld, hash, idx) } - private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] | Null = { table(idx) match { case null => table(idx) = createNewEntry(key, hash, value) @@ -389,15 +389,15 @@ class LinkedHashMap[K, V] return if (getOld) Some(old) else null } prev = n - n = n.next + n = n.next.nn } val nnode = createNewEntry(key, hash, value) if (prev eq null) { nnode.next = old table(idx) = nnode } else { - nnode.next = prev.next - prev.next = nnode + nnode.next = prev.nn.next + prev.nn.next = nnode } } contentSize += 1 @@ -427,7 +427,7 @@ class LinkedHashMap[K, V] var lastHigh = preHigh var n = old while (n ne null) { - val next = n.next + val next = n.next.nn if ((n.hash & oldlen) == 0) { // keep low lastLow.next = n lastLow = n @@ -490,15 +490,15 @@ object LinkedHashMap extends MapFactory[LinkedHashMap] { /** Class for the linked hash map entry, used internally. */ private[mutable] final class LinkedEntry[K, V](val key: K, val hash: Int, var value: V) { - var earlier: LinkedEntry[K, V] = null - var later: LinkedEntry[K, V] = null - var next: LinkedEntry[K, V] = null + var earlier: LinkedEntry[K, V] | Null = null + var later: LinkedEntry[K, V] | Null = null + var next: LinkedEntry[K, V] | Null = null @tailrec - final def findEntry(k: K, h: Int): LinkedEntry[K, V] = + final def findEntry(k: K, h: Int): LinkedEntry[K, V] | Null = if (h == hash && k == key) this else if ((next eq null) || (hash > h)) null - else next.findEntry(k, h) + else next.nn.findEntry(k, h) } /** The default load factor for the hash table */ diff --git a/library/src/scala/collection/mutable/LinkedHashSet.scala b/library/src/scala/collection/mutable/LinkedHashSet.scala index 1a189d607010..c48cf00801d8 100644 --- a/library/src/scala/collection/mutable/LinkedHashSet.scala +++ b/library/src/scala/collection/mutable/LinkedHashSet.scala @@ -45,35 +45,35 @@ class LinkedHashSet[A] /*private*/ type Entry = LinkedHashSet.Entry[A] - protected var firstEntry: Entry = null + protected var firstEntry: Entry | Null = null - protected var lastEntry: Entry = null + protected var lastEntry: Entry | Null = null /* Uses the same implementation as mutable.HashSet. The hashtable holds the following invariant: * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. * - Every bucket is sorted in ascendant hash order * - The sum of the lengths of all buckets is equal to contentSize. */ - private[this] var table = new Array[Entry](tableSizeFor(LinkedHashSet.defaultinitialSize)) + private[this] var table = new Array[Entry | Null](tableSizeFor(LinkedHashSet.defaultinitialSize)) private[this] var threshold: Int = newThreshold(table.length) private[this] var contentSize = 0 override def last: A = - if (size > 0) lastEntry.key + if (size > 0) lastEntry.nn.key else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") override def lastOption: Option[A] = - if (size > 0) Some(lastEntry.key) + if (size > 0) Some(lastEntry.nn.key) else None override def head: A = - if (size > 0) firstEntry.key + if (size > 0) firstEntry.nn.key else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") override def headOption: Option[A] = - if (size > 0) Some(firstEntry.key) + if (size > 0) Some(firstEntry.nn.key) else None override def size: Int = contentSize @@ -106,11 +106,11 @@ class LinkedHashSet[A] override def remove(elem: A): Boolean = remove0(elem, computeHash(elem)) private[this] abstract class LinkedHashSetIterator[T] extends AbstractIterator[T] { - private[this] var cur = firstEntry + private[this] var cur: Entry | Null = firstEntry def extract(nd: Entry): T def hasNext: Boolean = cur ne null def next(): T = - if (hasNext) { val r = extract(cur); cur = cur.later; r } + if (hasNext) { val r = extract(cur.nn); cur = cur.nn.later; r } else Iterator.empty.next() } @@ -123,10 +123,10 @@ class LinkedHashSet[A] } override def foreach[U](f: A => U): Unit = { - var cur = firstEntry + var cur: Entry | Null = firstEntry while (cur ne null) { - f(cur.key) - cur = cur.later + f(cur.nn.key) + cur = cur.nn.later } } @@ -153,7 +153,7 @@ class LinkedHashSet[A] @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - @`inline` private[this] def findEntry(key: A): Entry = { + @`inline` private[this] def findEntry(key: A): Entry | Null = { val hash = computeHash(key) table(index(hash)) match { case null => null @@ -169,7 +169,7 @@ class LinkedHashSet[A] val e = new Entry(key, hash) if (firstEntry eq null) firstEntry = e else { - lastEntry.later = e + lastEntry.nn.later = e e.earlier = lastEntry } lastEntry = e @@ -179,9 +179,9 @@ class LinkedHashSet[A] /** Delete the entry from the LinkedHashSet, set the `earlier` and `later` pointers correctly */ private[this] def deleteEntry(e: Entry): Unit = { if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later + else e.earlier.nn.later = e.later if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier + else e.later.nn.earlier = e.earlier e.earlier = null e.later = null e.next = null @@ -192,7 +192,7 @@ class LinkedHashSet[A] case null => table(idx) = createNewEntry(elem, hash) case old => - var prev: Entry = null + var prev: Entry | Null = null var n = old while ((n ne null) && n.hash <= hash) { if (n.hash == hash && elem == n.key) return false @@ -245,7 +245,7 @@ class LinkedHashSet[A] throw new RuntimeException(s"new hash table size $newlen exceeds maximum") var oldlen = table.length threshold = newThreshold(newlen) - if (size == 0) table = new Array(newlen) + if (size == 0) table = new Array[Entry | Null](newlen) else { table = java.util.Arrays.copyOf(table, newlen) val preLow = new Entry(null.asInstanceOf[A], 0) @@ -328,15 +328,15 @@ object LinkedHashSet extends IterableFactory[LinkedHashSet] { /** Class for the linked hash set entry, used internally. */ private[mutable] final class Entry[A](val key: A, val hash: Int) { - var earlier: Entry[A] = null - var later: Entry[A] = null - var next: Entry[A] = null + var earlier: Entry[A] | Null = null + var later: Entry[A] | Null = null + var next: Entry[A] | Null = null @tailrec - final def findEntry(k: A, h: Int): Entry[A] = + final def findEntry(k: A, h: Int): Entry[A] | Null = if (h == hash && k == key) this else if ((next eq null) || (hash > h)) null - else next.findEntry(k, h) + else next.nn.findEntry(k, h) } /** The default load factor for the hash table */ diff --git a/library/src/scala/collection/mutable/ListBuffer.scala b/library/src/scala/collection/mutable/ListBuffer.scala index 273704592abd..8f0c0b9cd81b 100644 --- a/library/src/scala/collection/mutable/ListBuffer.scala +++ b/library/src/scala/collection/mutable/ListBuffer.scala @@ -47,11 +47,11 @@ class ListBuffer[A] @transient private[this] var mutationCount: Int = 0 private var first: List[A] = Nil - private var last0: ::[A] = null // last element (`last0` just because the name `last` is already taken) + private var last0: ::[A] | Null = null // last element (`last0` just because the name `last` is already taken) private[this] var aliased = false private[this] var len = 0 - private type Predecessor[A0] = ::[A0] /*| Null*/ + private type Predecessor[A0] = ::[A0] | Null def iterator: Iterator[A] = new MutationTracker.CheckedIterator(first.iterator, mutationCount) @@ -99,7 +99,7 @@ class ListBuffer[A] if (isEmpty) xs else { ensureUnaliased() - last0.next = xs + last0.nn.next = xs toList } } @@ -115,7 +115,7 @@ class ListBuffer[A] final def addOne(elem: A): this.type = { ensureUnaliased() val last1 = new ::[A](elem, Nil) - if (len == 0) first = last1 else last0.next = last1 + if (len == 0) first = last1 else last0.nn.next = last1 last0 = last1 len += 1 this @@ -147,7 +147,7 @@ class ListBuffer[A] val fresh = new ListBuffer[A].freshFrom(it) ensureUnaliased() if (len == 0) first = fresh.first - else last0.next = fresh.first + else last0.nn.next = fresh.first last0 = fresh.last0 len += fresh.length } @@ -200,7 +200,7 @@ class ListBuffer[A] } private def getNext(p: Predecessor[A]): List[A] = - if (p == null) first else p.next + if (p == null) first else p.nn.next def update(idx: Int, elem: A): Unit = { ensureUnaliased() @@ -213,7 +213,7 @@ class ListBuffer[A] first = newElem } else { // `p` can not be `null` because the case where `idx == 0` is handled above - val p = locate(idx) + val p = locate(idx).nn val newElem = new :: (elem, p.tail.tail) if (last0 eq p.tail) { last0 = newElem @@ -229,7 +229,7 @@ class ListBuffer[A] else { val p = locate(idx) val nx = elem :: getNext(p) - if(p eq null) first = nx else p.next = nx + if(p == null) first = nx else p.nn.next = nx len += 1 } } @@ -243,8 +243,8 @@ class ListBuffer[A] private def insertAfter(prev: Predecessor[A], fresh: ListBuffer[A]): Unit = { if (!fresh.isEmpty) { val follow = getNext(prev) - if (prev eq null) first = fresh.first else prev.next = fresh.first - fresh.last0.next = follow + if (prev == null) first = fresh.first else prev.nn.next = fresh.first + fresh.last0.nn.next = follow if (follow.isEmpty) last0 = fresh.last0 len += fresh.length } @@ -268,12 +268,12 @@ class ListBuffer[A] if (idx < 0 || idx >= len) throw CommonErrors.indexOutOfBounds(index = idx, max = len - 1) val p = locate(idx) val nx = getNext(p) - if(p eq null) { + if(p == null) { first = nx.tail if(first.isEmpty) last0 = null } else { if(last0 eq nx) last0 = p - p.next = nx.tail + p.nn.next = nx.tail } len -= 1 nx.head @@ -292,7 +292,7 @@ class ListBuffer[A] @tailrec def ahead(p: List[A], n: Int): List[A] = if (n == 0) p else ahead(p.tail, n - 1) val nx = ahead(getNext(prev), n) - if(prev eq null) first = nx else prev.next = nx + if(prev == null) first = nx else prev.nn.next = nx if(nx.isEmpty) last0 = prev len -= n } @@ -320,20 +320,20 @@ class ListBuffer[A] def flatMapInPlace(f: A => IterableOnce[A]): this.type = { mutationCount += 1 var src = first - var dst: List[A] = null + var dst: List[A] | Null = null last0 = null len = 0 while(!src.isEmpty) { val it = f(src.head).iterator while(it.hasNext) { val v = new ::(it.next(), Nil) - if(dst eq null) dst = v else last0.next = v + if(dst eq null) dst = v else last0.nn.next = v last0 = v len += 1 } src = src.tail } - first = if(dst eq null) Nil else dst + first = if(dst eq null) Nil else dst.nn aliased = false // we just rebuilt a fresh, unaliased instance this } @@ -345,13 +345,13 @@ class ListBuffer[A] */ def filterInPlace(p: A => Boolean): this.type = { ensureUnaliased() - var prev: Predecessor[A] = null + var prev: Predecessor[A] | Null = null var cur: List[A] = first while (!cur.isEmpty) { val follow = cur.tail if (!p(cur.head)) { if(prev eq null) first = follow - else prev.next = follow + else prev.nn.next = follow len -= 1 } else { prev = cur.asInstanceOf[Predecessor[A]] @@ -393,7 +393,7 @@ class ListBuffer[A] * @return The last element of this $coll. * @throws NoSuchElementException If the $coll is empty. */ - override def last: A = if (last0 eq null) throw new NoSuchElementException("last of empty ListBuffer") else last0.head + override def last: A = if (last0 eq null) throw new NoSuchElementException("last of empty ListBuffer") else last0.nn.head /** * Optionally selects the last element. @@ -402,7 +402,7 @@ class ListBuffer[A] * * @return the last element of this $coll$ if it is nonempty, `None` if it is empty. */ - override def lastOption: Option[A] = if (last0 eq null) None else Some(last0.head) + override def lastOption: Option[A] = if (last0 eq null) None else Option(last0.nn.head) @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix = "ListBuffer" diff --git a/library/src/scala/collection/mutable/LongMap.scala b/library/src/scala/collection/mutable/LongMap.scala index e36c337437e3..9600d51c87e7 100644 --- a/library/src/scala/collection/mutable/LongMap.scala +++ b/library/src/scala/collection/mutable/LongMap.scala @@ -70,12 +70,12 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff private[this] var mask = 0 private[this] var extraKeys: Int = 0 - private[this] var zeroValue: AnyRef = null - private[this] var minValue: AnyRef = null + private[this] var zeroValue: AnyRef | Null = null + private[this] var minValue: AnyRef | Null = null private[this] var _size = 0 private[this] var _vacant = 0 - private[this] var _keys: Array[Long] = null - private[this] var _values: Array[AnyRef] = null + private[this] var _keys: Array[Long] | Null = null + private[this] var _values: Array[AnyRef] | Null = null if (initBlank) defaultInitialize(initialBufferSize) @@ -88,7 +88,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff } private[collection] def initializeTo( - m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] + m: Int, ek: Int, zv: AnyRef | Null, mv: AnyRef | Null, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] ): Unit = { mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz } @@ -111,7 +111,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff private def seekEmpty(k: Long): Int = { var e = toIndex(k) var x = 0 - while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + while (_keys.nn(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } e } @@ -119,7 +119,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff var e = toIndex(k) var x = 0 var q = 0L - while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + while ({ q = _keys.nn(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } e | MissingBit } @@ -127,13 +127,13 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff var e = toIndex(k) var x = 0 var q = 0L - while ({ q = _keys(e); if (q==k) return e; q+q != 0}) { + while ({ q = _keys.nn(e); if (q==k) return e; q+q != 0}) { x += 1 e = (e + 2*(x+1)*x - 3) & mask } if (q == 0) return e | MissingBit val o = e | MissVacant - while ({ q = _keys(e); if (q==k) return e; q != 0}) { + while ({ q = _keys.nn(e); if (q==k) return e; q != 0}) { x += 1 e = (e + 2*(x+1)*x - 3) & mask } @@ -153,7 +153,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff } else { val i = seekEntry(key) - if (i < 0) None else Some(_values(i).asInstanceOf[V]) + if (i < 0) None else Some(_values.nn(i).asInstanceOf[V]) } } @@ -165,7 +165,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff } else { val i = seekEntry(key) - if (i < 0) default else _values(i).asInstanceOf[V1] + if (i < 0) default else _values.nn(i).asInstanceOf[V1] } } @@ -186,7 +186,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff var i = seekEntryOrOpen(key) if (i < 0) { val value = { - val oks = _keys + val oks = _keys.nn val j = i & IndexMask val ok = oks(j) val ans = defaultValue @@ -203,13 +203,13 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff } _size += 1 val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] + _keys.nn(j) = key + _values.nn(j) = value.asInstanceOf[AnyRef] if ((i & VacantBit) != 0) _vacant -= 1 else if (imbalanced) repack() value } - else _values(i).asInstanceOf[V] + else _values.nn(i).asInstanceOf[V] } } @@ -228,7 +228,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff } else { val i = seekEntry(key) - if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V] + if (i < 0) null.asInstanceOf[V] else _values.nn(i).asInstanceOf[V] } } @@ -244,7 +244,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff } else { val i = seekEntry(key) - if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + if (i < 0) defaultEntry(key) else _values.nn(i).asInstanceOf[V] } } @@ -254,8 +254,8 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff override def default(key: Long) = defaultEntry(key) private def repack(newMask: Int): Unit = { - val ok = _keys - val ov = _values + val ok = _keys.nn + val ov = _values.nn mask = newMask _keys = new Array[Long](mask+1) _values = new Array[AnyRef](mask+1) @@ -265,8 +265,8 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff val k = ok(i) if (k != -k) { val j = seekEmpty(k) - _keys(j) = k - _values(j) = ov(i) + _keys.nn(j) = k + _values.nn(j) = ov(i) } i += 1 } @@ -385,12 +385,12 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff private[this] val kz = _keys private[this] val vz = _values - private[this] var nextPair: (Long, V) = + private[this] var nextPair: (Long, V) | Null = if (extraKeys==0) null else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V]) else (Long.MinValue, minValue.asInstanceOf[V]) - private[this] var anotherPair: (Long, V) = + private[this] var anotherPair: (Long, V) | Null = if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V]) else null @@ -409,9 +409,9 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff }) def next() = { if (nextPair == null && !hasNext) throw new NoSuchElementException("next") - val ans = nextPair + val ans = nextPair.nn if (anotherPair != null) { - nextPair = anotherPair + nextPair = anotherPair.nn anotherPair = null } else nextPair = null @@ -455,7 +455,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff val kz = java.util.Arrays.copyOf(_keys, _keys.length) val vz = java.util.Arrays.copyOf(_values, _values.length) val lm = new LongMap[V](defaultEntry, 1, initBlank = false) - lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) + lm.initializeTo(mask, extraKeys, zeroValue.nn, minValue.nn, _size, _vacant, kz, vz) lm } diff --git a/library/src/scala/collection/mutable/OpenHashMap.scala b/library/src/scala/collection/mutable/OpenHashMap.scala index 5840a0abc954..3299ec766642 100644 --- a/library/src/scala/collection/mutable/OpenHashMap.scala +++ b/library/src/scala/collection/mutable/OpenHashMap.scala @@ -69,7 +69,7 @@ class OpenHashMap[Key, Value](initialSize : Int) with DefaultSerializable { import OpenHashMap.OpenEntry - private type Entry = OpenEntry[Key, Value] + private type Entry = OpenEntry[Key, Value] | Null /** A default constructor creates a hashmap with initial size `8`. */ @@ -115,8 +115,8 @@ class OpenHashMap[Key, Value](initialSize : Int) table = new Array[Entry](newSize) mask = newSize - 1 oldTable.foreach( entry => - if (entry != null && entry.value != None) - table(findIndex(entry.key, entry.hash)) = entry ) + if (entry != null && entry.nn.value != None) + table(findIndex(entry.nn.key, entry.nn.hash)) = entry ) deleted = 0 } @@ -134,10 +134,10 @@ class OpenHashMap[Key, Value](initialSize : Int) var entry = table(index) while (entry != null) { - if (entry.hash == hash && entry.key == key && entry.value != None) + if (entry.nn.hash == hash && entry.nn.key == key && entry.nn.value != None) return index - if (firstDeletedIndex == -1 && entry.value == None) + if (firstDeletedIndex == -1 && entry.nn.value == None) firstDeletedIndex = index j += 1 @@ -170,15 +170,15 @@ class OpenHashMap[Key, Value](initialSize : Int) size += 1 None } else { - val res = entry.value - if (entry.value == None) { - entry.key = key - entry.hash = hash + val res = entry.nn.value + if (entry.nn.value == None) { + entry.nn.key = key + entry.nn.hash = hash size += 1 deleted -= 1 modCount += 1 } - entry.value = Some(value) + entry.nn.value = Some(value) res } } @@ -186,9 +186,10 @@ class OpenHashMap[Key, Value](initialSize : Int) /** Delete the hash table slot contained in the given entry. */ @`inline` private[this] def deleteSlot(entry: Entry) = { - entry.key = null.asInstanceOf[Key] - entry.hash = 0 - entry.value = None + assert(entry != null) + entry.nn.key = null.asInstanceOf[Key] + entry.nn.hash = 0 + entry.nn.value = None size -= 1 deleted += 1 @@ -196,8 +197,8 @@ class OpenHashMap[Key, Value](initialSize : Int) override def remove(key : Key): Option[Value] = { val entry = table(findIndex(key, hashOf(key))) - if (entry != null && entry.value != None) { - val res = entry.value + if (entry != null && entry.nn.value != None) { + val res = entry.nn.value deleteSlot(entry) res } else None @@ -209,9 +210,9 @@ class OpenHashMap[Key, Value](initialSize : Int) var entry = table(index) var j = 0 while(entry != null){ - if (entry.hash == hash && - entry.key == key){ - return entry.value + if (entry.nn.hash == hash && + entry.nn.key == key){ + return entry.nn.value } j += 1 @@ -227,14 +228,14 @@ class OpenHashMap[Key, Value](initialSize : Int) * @return the iterator */ def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] { - override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get) + override protected def nextResult(node: Entry): (Key, Value) = (node.nn.key, node.nn.value.get) } override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] { - override protected def nextResult(node: Entry): Key = node.key + override protected def nextResult(node: Entry): Key = node.nn.key } override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] { - override protected def nextResult(node: Entry): Value = node.value.get + override protected def nextResult(node: Entry): Value = node.nn.value.get } private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] { @@ -243,14 +244,14 @@ class OpenHashMap[Key, Value](initialSize : Int) private[this] def advance(): Unit = { if (initialModCount != modCount) throw new ConcurrentModificationException - while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 + while((index <= mask) && (table(index) == null || table(index).nn.value == None)) index+=1 } def hasNext = {advance(); index <= mask } def next() = { advance() - val result = table(index) + val result = table(index).nn index += 1 nextResult(result) } @@ -259,7 +260,7 @@ class OpenHashMap[Key, Value](initialSize : Int) override def clone() = { val it = new OpenHashMap[Key, Value] - foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)) + foreachUndeletedEntry(entry => it.put(entry.nn.key, entry.nn.hash, entry.nn.value.get)) it } @@ -277,28 +278,28 @@ class OpenHashMap[Key, Value](initialSize : Int) val startModCount = modCount foreachUndeletedEntry(entry => { if (modCount != startModCount) throw new ConcurrentModificationException - f((entry.key, entry.value.get))} + f((entry.nn.key, entry.nn.value.get))} ) } override def foreachEntry[U](f : (Key, Value) => U): Unit = { val startModCount = modCount foreachUndeletedEntry(entry => { if (modCount != startModCount) throw new ConcurrentModificationException - f(entry.key, entry.value.get)} + f(entry.nn.key, entry.nn.value.get)} ) } private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = { - table.foreach(entry => if (entry != null && entry.value != None) f(entry)) + table.foreach(entry => if (entry != null && entry.nn.value != None) f(entry)) } override def mapValuesInPlace(f : (Key, Value) => Value): this.type = { - foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) + foreachUndeletedEntry(entry => entry.nn.value = Some(f(entry.nn.key, entry.nn.value.get))) this } override def filterInPlace(f : (Key, Value) => Boolean): this.type = { - foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) + foreachUndeletedEntry(entry => if (entry != null && !f(entry.nn.key, entry.nn.value.get)) deleteSlot(entry)) this } diff --git a/library/src/scala/collection/mutable/Queue.scala b/library/src/scala/collection/mutable/Queue.scala index cc3dad2e2495..9daf9328af02 100644 --- a/library/src/scala/collection/mutable/Queue.scala +++ b/library/src/scala/collection/mutable/Queue.scala @@ -27,7 +27,7 @@ import scala.collection.generic.DefaultSerializable * @define mayNotTerminateInf * @define willNotTerminateInf */ -class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) +class Queue[A] protected (array: Array[AnyRef | Null], start: Int, end: Int) extends ArrayDeque[A](array, start, end) with IndexedSeqOps[A, Queue, Queue[A]] with StrictOptimizedSeqOps[A, Queue, Queue[A]] @@ -37,7 +37,7 @@ class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) with DefaultSerializable { def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = - this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + this(ArrayDeque.alloc[AnyRef | Null](initialSize), start = 0, end = 0) override def iterableFactory: SeqFactory[Queue] = Queue @@ -116,7 +116,7 @@ class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) bf.result() } - override protected def ofArray(array: Array[AnyRef], end: Int): Queue[A] = + override protected def ofArray(array: Array[AnyRef | Null], end: Int): Queue[A] = new Queue(array, start = 0, end) } diff --git a/library/src/scala/collection/mutable/Stack.scala b/library/src/scala/collection/mutable/Stack.scala index 01aacc22c65e..cb0cfdfc20e5 100644 --- a/library/src/scala/collection/mutable/Stack.scala +++ b/library/src/scala/collection/mutable/Stack.scala @@ -33,7 +33,7 @@ import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, Stri * @define willNotTerminateInf */ @migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") -class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) +class Stack[A] protected (array: Array[AnyRef | Null], start: Int, end: Int) extends ArrayDeque[A](array, start, end) with IndexedSeqOps[A, Stack, Stack[A]] with StrictOptimizedSeqOps[A, Stack, Stack[A]] @@ -43,7 +43,7 @@ class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) with DefaultSerializable { def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = - this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + this(ArrayDeque.alloc[AnyRef | Null](initialSize), start = 0, end = 0) override def iterableFactory: SeqFactory[Stack] = Stack @@ -120,7 +120,7 @@ class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) bf.result() } - override protected def ofArray(array: Array[AnyRef], end: Int): Stack[A] = + override protected def ofArray(array: Array[AnyRef | Null], end: Int): Stack[A] = new Stack(array, start = 0, end) } diff --git a/library/src/scala/collection/mutable/UnrolledBuffer.scala b/library/src/scala/collection/mutable/UnrolledBuffer.scala index 4aecac001505..26090397864d 100644 --- a/library/src/scala/collection/mutable/UnrolledBuffer.scala +++ b/library/src/scala/collection/mutable/UnrolledBuffer.scala @@ -128,26 +128,24 @@ sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) def iterator: Iterator[T] = new AbstractIterator[T] { var pos: Int = -1 - var node: Unrolled[T] = headptr + var node: Unrolled[T] | Null = headptr scan() private def scan(): Unit = { pos += 1 - while (pos >= node.size) { + while (node != null && pos >= node.size) { pos = 0 node = node.next - if (node eq null) return } } - def hasNext = node ne null + def hasNext = node != null def next() = if (hasNext) { - val r = node.array(pos) + val r = node.nn.array(pos) scan() r } else Iterator.empty.next() } - // this should be faster than the iterator override def foreach[U](f: T => U) = headptr.foreach(f) def result() = this @@ -261,12 +259,16 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] private[collection] val unrolledlength = 32 /** Unrolled buffer node. - */ - class Unrolled[T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { + */ class Unrolled[T: ClassTag] private[collection] ( + var size: Int, + var array: Array[T], + var next: Unrolled[T] | Null, + val buff: UnrolledBuffer[T] | Null + ) { private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) - private def nextlength = if (buff eq null) unrolledlength else buff.calcNextLength(array.length) + private def nextlength = if (buff == null) unrolledlength else buff.nn.calcNextLength(array.length) // adds and returns itself or the new unrolled if full @tailrec final def append(elem: T): Unrolled[T] = if (size < array.length) { @@ -275,12 +277,12 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] this } else { next = new Unrolled[T](0, new Array[T](nextlength), null, buff) - next append elem + next.nn.append(elem) } def foreach[U](f: T => U): Unit = { - var unrolled = this + var unrolled: Unrolled[T] | Null = this var i = 0 - while (unrolled ne null) { + while (unrolled != null) { val chunkarr = unrolled.array val chunksz = unrolled.size while (i < chunksz) { @@ -293,9 +295,9 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] } } def mapInPlace(f: T => T): Unit = { - var unrolled = this + var unrolled: Unrolled[T] | Null = this var i = 0 - while (unrolled ne null) { + while (unrolled != null) { val chunkarr = unrolled.array val chunksz = unrolled.size while (i < chunksz) { @@ -346,7 +348,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] size -= 1 if (tryMergeWithNext()) buffer.lastPtr = this r - } else next.remove(idx - size, buffer) + } else next.nn.remove(idx - size, buffer) @tailrec final def subtractOne(elem: T, buffer: UnrolledBuffer[T]): Boolean = { var i = 0 @@ -357,7 +359,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] } i += 1 } - if(next ne null) next.subtractOne(elem, buffer) else false + if(next ne null) next.nn.subtractOne(elem, buffer) else false } // shifts left elements after `leftb` (overwrites `leftb`) @@ -369,11 +371,11 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] } nullout(i, i + 1) } - protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDenom)) { + protected def tryMergeWithNext() = if (next != null && (size + next.nn.size) < (array.length * waterline / waterlineDenom)) { // copy the next array, then discard the next node - Array.copy(next.array, 0, array, size, next.size) - size = size + next.size - next = next.next + Array.copy(next.nn.array, 0, array, size, next.nn.size) + size = size + next.nn.size + next = next.nn.next if (next eq null) true else false // checks if last node was thrown out } else false @@ -381,7 +383,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] if (idx < size) { // divide this node at the appropriate position and insert all into head // update new next - val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) + val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff.nn) Array.copy(array, idx, newnextnode.array, 0, size - idx) newnextnode.size = size - idx newnextnode.next = next @@ -414,7 +416,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] } appended } - else next.insertAll(idx - size, t, buffer) + else next.nn.insertAll(idx - size, t, buffer) } private def nullout(from: Int, until: Int): Unit = { @@ -443,5 +445,5 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] // Todo -- revisit whether inheritance is the best way to achieve this functionality private[collection] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz - override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) + override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null.asInstanceOf[Unrolled[T] | Null], this) } diff --git a/library/src/scala/concurrent/BatchingExecutor.scala b/library/src/scala/concurrent/BatchingExecutor.scala index ac197c89f8c1..5577d01bdcbc 100644 --- a/library/src/scala/concurrent/BatchingExecutor.scala +++ b/library/src/scala/concurrent/BatchingExecutor.scala @@ -90,14 +90,14 @@ private[concurrent] object BatchingExecutorStatics { * */ private[concurrent] trait BatchingExecutor extends Executor { - private[this] final val _tasksLocal = new ThreadLocal[AnyRef]() + private[this] final val _tasksLocal = new ThreadLocal[AnyRef | Null]() /* * Batch implements a LIFO queue (stack) and is used as a trampolining Runnable. * In order to conserve allocations, the first element in the batch is stored "unboxed" in * the `first` field. Subsequent Runnables are stored in the array called `other`. */ - private[this] sealed abstract class AbstractBatch protected (protected final var first: Runnable, protected final var other: Array[Runnable], protected final var size: Int) { + private[this] sealed abstract class AbstractBatch protected (protected final var first: Runnable | Null, protected final var other: Array[Runnable], protected final var size: Int) { private[this] final def ensureCapacity(curSize: Int): Array[Runnable] = { val curOther = this.other @@ -128,7 +128,7 @@ private[concurrent] trait BatchingExecutor extends Executor { (this.size: @switch) match { case 0 => case 1 => - val next = this.first + val next = this.first.nn this.first = null this.size = 0 next.run() @@ -136,14 +136,14 @@ private[concurrent] trait BatchingExecutor extends Executor { case sz => val o = this.other val next = o(sz - 2) - o(sz - 2) = null + o(sz - 2) = null.asInstanceOf[Runnable] // Explicit cast to Runnable this.size = sz - 1 next.run() runN(n - 1) } } - private[this] final class AsyncBatch private(_first: Runnable, _other: Array[Runnable], _size: Int) extends AbstractBatch(_first, _other, _size) with Runnable with BlockContext with (BlockContext => Throwable) { + private[this] final class AsyncBatch private(_first: Runnable | Null, _other: Array[Runnable], _size: Int) extends AbstractBatch(_first, _other, _size) with Runnable with BlockContext with (BlockContext => Throwable | Null) { private[this] final var parentBlockContext: BlockContext = BatchingExecutorStatics.MissingParentBlockContext final def this(runnable: Runnable) = this(runnable, BatchingExecutorStatics.emptyBatchArray, 1) @@ -158,7 +158,7 @@ private[concurrent] trait BatchingExecutor extends Executor { } /* LOGIC FOR ASYNCHRONOUS BATCHES */ - override final def apply(prevBlockContext: BlockContext): Throwable = try { + override final def apply(prevBlockContext: BlockContext): Throwable | Null = try { parentBlockContext = prevBlockContext runN(BatchingExecutorStatics.runLimit) null @@ -174,12 +174,12 @@ private[concurrent] trait BatchingExecutor extends Executor { * Only attempt to resubmit when there are `Runnables` left to process. * Note that `cause` can be `null`. */ - private[this] final def resubmit(cause: Throwable): Throwable = + private[this] final def resubmit(cause: Throwable | Null): Throwable | Null = if (this.size > 0) { try { submitForExecution(this); cause } catch { case inner: Throwable => if (NonFatal(inner)) { - val e = new ExecutionException("Non-fatal error occurred and resubmission failed, see suppressed exception.", cause) + val e = new ExecutionException("Non-fatal error occurred and resubmission failed, see suppressed exception.", cause.nn) e.addSuppressed(inner) e } else inner @@ -234,7 +234,7 @@ private[concurrent] trait BatchingExecutor extends Executor { */ protected final def submitAsyncBatched(runnable: Runnable): Unit = { val b = _tasksLocal.get - if (b.isInstanceOf[AsyncBatch]) b.asInstanceOf[AsyncBatch].push(runnable) + if (b != null && b.isInstanceOf[AsyncBatch]) b.asInstanceOf[AsyncBatch].push(runnable) else submitForExecution(new AsyncBatch(runnable)) } @@ -246,7 +246,7 @@ private[concurrent] trait BatchingExecutor extends Executor { Objects.requireNonNull(runnable, "runnable is null") val tl = _tasksLocal val b = tl.get - if (b.isInstanceOf[SyncBatch]) b.asInstanceOf[SyncBatch].push(runnable) + if (b != null && b.isInstanceOf[SyncBatch]) b.asInstanceOf[SyncBatch].push(runnable) else { val i = if (b ne null) b.asInstanceOf[java.lang.Integer].intValue else 0 if (i < BatchingExecutorStatics.syncPreBatchDepth) { diff --git a/library/src/scala/concurrent/Channel.scala b/library/src/scala/concurrent/Channel.scala index a9ada60e3da0..e6569b5df594 100644 --- a/library/src/scala/concurrent/Channel.scala +++ b/library/src/scala/concurrent/Channel.scala @@ -21,7 +21,7 @@ package scala.concurrent class Channel[A] { private class LinkedList { var elem: A = _ - var next: LinkedList = _ + var next: LinkedList | Null = _ } private[this] var written = new LinkedList // FIFO queue, realized through private[this] var lastWritten = written // aliasing of a linked list @@ -35,7 +35,7 @@ class Channel[A] { def write(x: A): Unit = synchronized { lastWritten.elem = x lastWritten.next = new LinkedList - lastWritten = lastWritten.next + lastWritten = lastWritten.next.nn if (nreaders > 0) notify() } @@ -53,7 +53,7 @@ class Channel[A] { finally nreaders -= 1 } val x = written.elem - written = written.next + written = written.next.nn x } } diff --git a/library/src/scala/concurrent/ExecutionContext.scala b/library/src/scala/concurrent/ExecutionContext.scala index b132e2dee5b7..1df336a47989 100644 --- a/library/src/scala/concurrent/ExecutionContext.scala +++ b/library/src/scala/concurrent/ExecutionContext.scala @@ -197,7 +197,7 @@ object ExecutionContext { * * @return the global [[ExecutionContext]] */ - final lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor) + final lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor | Null) /** * WARNING: Only ever execute logic which will quickly return control to the caller. @@ -253,7 +253,7 @@ object ExecutionContext { * @param reporter a function for error reporting * @return the `ExecutionContext` using the given `ExecutorService` */ - def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService = + def fromExecutorService(e: ExecutorService | Null, reporter: Throwable => Unit): ExecutionContextExecutorService = impl.ExecutionContextImpl.fromExecutorService(e, reporter) /** Creates an `ExecutionContext` from the given `ExecutorService` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]]. @@ -269,7 +269,7 @@ object ExecutionContext { * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. * @return the `ExecutionContext` using the given `ExecutorService` */ - def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter) + def fromExecutorService(e: ExecutorService | Null): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter) /** Creates an `ExecutionContext` from the given `Executor`. * @@ -277,7 +277,7 @@ object ExecutionContext { * @param reporter a function for error reporting * @return the `ExecutionContext` using the given `Executor` */ - def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor = + def fromExecutor(e: Executor | Null, reporter: Throwable => Unit): ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(e, reporter) /** Creates an `ExecutionContext` from the given `Executor` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]]. @@ -285,7 +285,7 @@ object ExecutionContext { * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. * @return the `ExecutionContext` using the given `Executor` */ - def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter) + def fromExecutor(e: Executor | Null): ExecutionContextExecutor = fromExecutor(e, defaultReporter) /** The default reporter simply prints the stack trace of the `Throwable` to [[java.lang.System#err System.err]]. * diff --git a/library/src/scala/concurrent/impl/ExecutionContextImpl.scala b/library/src/scala/concurrent/impl/ExecutionContextImpl.scala index 262a12b1b4b9..320d1181fb34 100644 --- a/library/src/scala/concurrent/impl/ExecutionContextImpl.scala +++ b/library/src/scala/concurrent/impl/ExecutionContextImpl.scala @@ -16,9 +16,9 @@ import java.util.concurrent.{ Semaphore, ForkJoinPool, ForkJoinWorkerThread, Cal import java.util.Collection import scala.concurrent.{ BlockContext, ExecutionContext, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService } -private[scala] class ExecutionContextImpl private[impl] (final val executor: Executor, final val reporter: Throwable => Unit) extends ExecutionContextExecutor { - require(executor ne null, "Executor must not be null") - override final def execute(runnable: Runnable): Unit = executor execute runnable +private[scala] class ExecutionContextImpl private[impl] (final val executor: Executor | Null, final val reporter: Throwable => Unit) extends ExecutionContextExecutor { + // require(executor ne null, "Executor must not be null") + override final def execute(runnable: Runnable): Unit = executor.nn execute runnable override final def reportFailure(t: Throwable): Unit = reporter(t) } @@ -28,7 +28,7 @@ private[concurrent] object ExecutionContextImpl { final val daemonic: Boolean, final val maxBlockers: Int, final val prefix: String, - final val uncaught: Thread.UncaughtExceptionHandler) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { + final val uncaught: Thread.UncaughtExceptionHandler | Null) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { require(prefix ne null, "DefaultThreadFactory.prefix must be non null") require(maxBlockers >= 0, "DefaultThreadFactory.maxBlockers must be greater-or-equal-to 0") @@ -53,7 +53,7 @@ private[concurrent] object ExecutionContextImpl { try { val b: ForkJoinPool.ManagedBlocker with (() => T) = new ForkJoinPool.ManagedBlocker with (() => T) { - private[this] final var result: T = null.asInstanceOf[T] + private[this] final var result: T = _ private[this] final var done: Boolean = false final override def block(): Boolean = { if (!done) { @@ -108,13 +108,13 @@ private[concurrent] object ExecutionContextImpl { } } - def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextExecutor = + def fromExecutor(e: Executor | Null, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextExecutor = e match { case null => createDefaultExecutorService(reporter) case some => new ExecutionContextImpl(some, reporter) } - def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): + def fromExecutorService(es: ExecutorService | Null, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextExecutorService = es match { case null => createDefaultExecutorService(reporter) case some => diff --git a/library/src/scala/concurrent/impl/FutureConvertersImpl.scala b/library/src/scala/concurrent/impl/FutureConvertersImpl.scala index a9eed4cbb055..854990325271 100644 --- a/library/src/scala/concurrent/impl/FutureConvertersImpl.scala +++ b/library/src/scala/concurrent/impl/FutureConvertersImpl.scala @@ -48,9 +48,11 @@ private[scala] object FutureConvertersImpl { override def thenCompose[U](fn: JFunction[_ >: T, _ <: CompletionStage[U]]): CompletableFuture[U] = thenComposeAsync(fn) - override def whenComplete(fn: BiConsumer[_ >: T, _ >: Throwable]): CompletableFuture[T] = whenCompleteAsync(fn) + override def whenComplete(fn: BiConsumer[_ >: T, _ >: Throwable | Null]): CompletableFuture[T] = whenCompleteAsync(fn) - override def handle[U](fn: BiFunction[_ >: T, Throwable, _ <: U]): CompletableFuture[U] = handleAsync(fn) + override def handle[U](fn: BiFunction[_ >: T, Throwable | Null, _ <: U] | Null): CompletableFuture[U] = + if (fn == null) handleAsync(null) + else handleAsync(fn) override def exceptionally(fn: JFunction[Throwable, _ <: T]): CompletableFuture[T] = { val cf = new CompletableFuture[T] @@ -91,11 +93,10 @@ private[scala] object FutureConvertersImpl { override def toString(): String = super[CompletableFuture].toString } - final class P[T](val wrapped: CompletionStage[T]) extends DefaultPromise[T] with BiFunction[T, Throwable, Unit] { - override def apply(v: T, e: Throwable): Unit = { + final class P[T](val wrapped: CompletionStage[T]) extends DefaultPromise[T] with BiFunction[T, Throwable | Null, Unit] { + override def apply(v: T, e: Throwable | Null): Unit = { if (e == null) success(v) else failure(e) } } } - diff --git a/library/src/scala/concurrent/impl/Promise.scala b/library/src/scala/concurrent/impl/Promise.scala index 89f1addb8aa8..2c8eb167df75 100644 --- a/library/src/scala/concurrent/impl/Promise.scala +++ b/library/src/scala/concurrent/impl/Promise.scala @@ -32,8 +32,8 @@ import java.io.{IOException, NotSerializableException, ObjectInputStream, Object */ private[impl] final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) { //@volatie not needed since we use acquire/release - /*@volatile*/ private[this] var _result: Try[T] = null - final def result: Try[T] = _result + /*@volatile*/ private[this] var _result: Try[T] | Null = null + final def result: Try[T] | Null = _result override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1 override protected def tryReleaseShared(ignore: Int): Boolean = { setState(1) @@ -236,7 +236,7 @@ private[concurrent] object Promise { else /*if (state.isInstanceOf[Callbacks[T]]) */ "Future()" } - private[this] final def tryAwait0(atMost: Duration): Try[T] = + private[this] final def tryAwait0(atMost: Duration): Try[T] | Null = if (atMost ne Duration.Undefined) { val v = value0 if (v ne null) v @@ -268,14 +268,14 @@ private[concurrent] object Promise { @throws(classOf[Exception]) final def result(atMost: Duration)(implicit permit: CanAwait): T = - tryAwait0(atMost).get // returns the value, or throws the contained exception + tryAwait0(atMost).nn.get // returns the value, or throws the contained exception override final def isCompleted: Boolean = value0 ne null override final def value: Option[Try[T]] = Option(value0) @tailrec // returns null if not completed - private final def value0: Try[T] = { + private final def value0: Try[T] | Null = { val state = get() if (state.isInstanceOf[Try[_]]) state.asInstanceOf[Try[T]] else if (state.isInstanceOf[Link[_]]) state.asInstanceOf[Link[T]].promise(this).value0 @@ -346,18 +346,18 @@ private[concurrent] object Promise { concatCallbacks(m.rest, new ManyCallbacks(m.first, right)) } - @tailrec private[this] final def removeCallback(cs: Callbacks[T], t: Transformation[_, _], result: Callbacks[T] = null): AnyRef = + @tailrec private[this] final def removeCallback(cs: Callbacks[T], t: Transformation[_, _], result: Callbacks[T] | Null = null): AnyRef = if (cs eq t) { if (result == null) Noop - else result + else result.nn } else if (cs.isInstanceOf[ManyCallbacks[_]]) { val m = cs.asInstanceOf[ManyCallbacks[T]] if (m.first eq t) { if (result == null) m.rest - else concatCallbacks(m.rest, result) + else concatCallbacks(m.rest, result.nn) } - else removeCallback(m.rest, t, if (result == null) m.first else new ManyCallbacks(m.first, result)) + else removeCallback(m.rest, t, if (result == null) m.first else new ManyCallbacks(m.first, result.nn)) } else cs // IMPORTANT: Noop should not be passed in here, `callbacks` cannot be null @@ -373,7 +373,7 @@ private[concurrent] object Promise { /** Link this promise to the root of another promise. */ - @tailrec private[concurrent] final def linkRootOf(target: DefaultPromise[T], link: Link[T]): Unit = + @tailrec private[concurrent] final def linkRootOf(target: DefaultPromise[T], link: Link[T] | Null): Unit = if (this ne target) { val state = get() if (state.isInstanceOf[Try[_]]) { @@ -433,7 +433,7 @@ private[concurrent] object Promise { override final def toString: String = "ManyCallbacks" } - private[this] final val Noop = new Transformation[Nothing, Nothing](Xform_noop, null, ExecutionContext.parasitic) + private[this] final val Noop = new Transformation[Nothing, Nothing](Xform_noop, null.asInstanceOf[Any => Any], ExecutionContext.parasitic) /** * A Transformation[F, T] receives an F (it is a Callback[F]) and applies a transformation function to that F, @@ -442,13 +442,13 @@ private[concurrent] object Promise { * function's type parameters are erased, and the _xform tag will be used to reify them. **/ final class Transformation[-F, T] private[this] ( - private[this] final var _fun: Any => Any, - private[this] final var _ec: ExecutionContext, - private[this] final var _arg: Try[F], + private[this] final var _fun: (Any => Any) | Null, + private[this] final var _ec: ExecutionContext | Null, + private[this] final var _arg: Try[F] | Null, private[this] final val _xform: Int ) extends DefaultPromise[T]() with Callbacks[F] with Runnable with Batchable { final def this(xform: Int, f: _ => _, ec: ExecutionContext) = - this(f.asInstanceOf[Any => Any], ec.prepare(): @nowarn("cat=deprecation"), null, xform) + this(f.asInstanceOf[Any => Any], ec.prepare(): @nowarn("cat=deprecation"), null.asInstanceOf[Try[F]], xform) final def benefitsFromBatching: Boolean = _xform != Xform_onComplete && _xform != Xform_foreach @@ -459,13 +459,13 @@ private[concurrent] object Promise { final def submitWithValue(resolved: Try[F]): this.type = { _arg = resolved val e = _ec - try e.execute(this) /* Safe publication of _arg, _fun, _ec */ + try e.nn.execute(this) /* Safe publication of _arg, _fun, _ec */ catch { case t: Throwable => _fun = null // allow to GC _arg = null // see above _ec = null // see above again - handleFailure(t, e) + handleFailure(t, e.nn) } this @@ -492,51 +492,51 @@ private[concurrent] object Promise { _arg = null // see above _ec = null // see above try { - val resolvedResult: Try[_] = + val resolvedResult: Try[_] | Null = (_xform: @switch) match { case Xform_noop => null case Xform_map => - if (v.isInstanceOf[Success[F]]) Success(fun(v.get)) else v // Faster than `resolve(v map fun)` + if (v.isInstanceOf[Success[F]]) Success(fun.nn(v.nn.get)) else v // Faster than `resolve(v map fun)` case Xform_flatMap => if (v.isInstanceOf[Success[F]]) { - val f = fun(v.get) + val f = fun.nn(v.nn.get) if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) null } else v case Xform_transform => - resolve(fun(v).asInstanceOf[Try[T]]) + resolve(fun.nn(v).asInstanceOf[Try[T]]) case Xform_transformWith => - val f = fun(v) + val f = fun.nn(v) if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) null case Xform_foreach => - v.foreach(fun) + v.nn.foreach(fun.nn) null case Xform_onComplete => - fun(v) + fun.nn(v) null case Xform_recover => - if (v.isInstanceOf[Failure[_]]) resolve(v.recover(fun.asInstanceOf[PartialFunction[Throwable, F]])) else v //recover F=:=T + if (v.isInstanceOf[Failure[_]]) resolve(v.nn.recover(fun.nn.asInstanceOf[PartialFunction[Throwable, F]])) else v //recover F=:=T case Xform_recoverWith => if (v.isInstanceOf[Failure[F]]) { - val f = fun.asInstanceOf[PartialFunction[Throwable, Future[T]]].applyOrElse(v.asInstanceOf[Failure[F]].exception, Future.recoverWithFailed) + val f = fun.nn.asInstanceOf[PartialFunction[Throwable, Future[T]]].applyOrElse(v.asInstanceOf[Failure[F]].exception, Future.recoverWithFailed) if (f ne Future.recoverWithFailedMarker) { if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) null } else v } else v case Xform_filter => - if (v.isInstanceOf[Failure[F]] || fun.asInstanceOf[F => Boolean](v.get)) v else Future.filterFailure + if (v.isInstanceOf[Failure[F]] || fun.nn.asInstanceOf[F => Boolean](v.nn.get)) v else Future.filterFailure case Xform_collect => - if (v.isInstanceOf[Success[F]]) Success(fun.asInstanceOf[PartialFunction[F, T]].applyOrElse(v.get, Future.collectFailed)) else v + if (v.isInstanceOf[Success[F]]) Success(fun.nn.asInstanceOf[PartialFunction[F, T]].applyOrElse(v.nn.get, Future.collectFailed)) else v case _ => Failure(new IllegalStateException("BUG: encountered transformation promise with illegal type: " + _xform)) // Safe not to `resolve` } if (resolvedResult ne null) tryComplete0(get(), resolvedResult.asInstanceOf[Try[T]]) // T is erased anyway so we won't have any use for it above } catch { - case t: Throwable => handleFailure(t, ec) + case t: Throwable => handleFailure(t, ec.nn) } } } diff --git a/library/src/scala/io/BufferedSource.scala b/library/src/scala/io/BufferedSource.scala index 2369b528f8f7..df43e2d14bb2 100644 --- a/library/src/scala/io/BufferedSource.scala +++ b/library/src/scala/io/BufferedSource.scala @@ -66,7 +66,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod class BufferedLineIterator extends AbstractIterator[String] with Iterator[String] { private[this] val lineReader = decachedReader - var nextLine: String = null + var nextLine: String | Null = null override def hasNext = { if (nextLine == null) diff --git a/library/src/scala/io/Codec.scala b/library/src/scala/io/Codec.scala index a6eeab50b299..4158855fa8df 100644 --- a/library/src/scala/io/Codec.scala +++ b/library/src/scala/io/Codec.scala @@ -38,10 +38,10 @@ class Codec(val charSet: Charset) { // these variables allow configuring the Codec object, and then // all decoders and encoders retrieved from it will use these settings. - private[this] var _onMalformedInput: Action = null - private[this] var _onUnmappableCharacter: Action = null - private[this] var _encodingReplacement: Array[Byte] = null - private[this] var _decodingReplacement: String = null + private[this] var _onMalformedInput: Action | Null = null + private[this] var _onUnmappableCharacter: Action | Null = null + private[this] var _encodingReplacement: Array[Byte] | Null = null + private[this] var _decodingReplacement: String | Null = null private[this] var _onCodingException: Handler = e => throw e /** The name of the Codec. */ @@ -57,16 +57,16 @@ class Codec(val charSet: Charset) { def name = charSet.name def encoder: CharsetEncoder = { val enc = charSet.newEncoder() - if (_onMalformedInput ne null) enc onMalformedInput _onMalformedInput - if (_onUnmappableCharacter ne null) enc onUnmappableCharacter _onUnmappableCharacter - if (_encodingReplacement ne null) enc replaceWith _encodingReplacement + if (_onMalformedInput ne null) enc onMalformedInput _onMalformedInput.nn + if (_onUnmappableCharacter ne null) enc onUnmappableCharacter _onUnmappableCharacter.nn + if (_encodingReplacement ne null) enc replaceWith _encodingReplacement.nn enc } def decoder: CharsetDecoder = { val dec = charSet.newDecoder() - if (_onMalformedInput ne null) dec onMalformedInput _onMalformedInput - if (_onUnmappableCharacter ne null) dec onUnmappableCharacter _onUnmappableCharacter - if (_decodingReplacement ne null) dec replaceWith _decodingReplacement + if (_onMalformedInput ne null) dec onMalformedInput _onMalformedInput.nn + if (_onUnmappableCharacter ne null) dec onUnmappableCharacter _onUnmappableCharacter.nn + if (_decodingReplacement ne null) dec replaceWith _decodingReplacement.nn dec } diff --git a/library/src/scala/io/Source.scala b/library/src/scala/io/Source.scala index 360c9fe0cf6d..4276233b96e4 100644 --- a/library/src/scala/io/Source.scala +++ b/library/src/scala/io/Source.scala @@ -157,13 +157,13 @@ object Source { def createBufferedSource( inputStream: InputStream, bufferSize: Int = DefaultBufSize, - reset: () => Source = null, - close: () => Unit = null + reset: (() => Source) | Null = null, + close: (() => Unit) | Null = null )(implicit codec: Codec): BufferedSource = { // workaround for default arguments being unable to refer to other parameters val resetFn = if (reset == null) () => createBufferedSource(inputStream, bufferSize, reset, close)(codec) else reset - new BufferedSource(inputStream, bufferSize)(codec) withReset resetFn withClose close + new BufferedSource(inputStream, bufferSize)(codec) withReset resetFn withClose (if (close == null) () => () else close.nn) } def fromInputStream(is: InputStream, enc: String): BufferedSource = @@ -343,8 +343,8 @@ abstract class Source extends Iterator[Char] with Closeable { report(pos, "warning! " + msg, out) } - private[this] var resetFunction: () => Source = null - private[this] var closeFunction: () => Unit = null + private[this] var resetFunction: (() => Source) | Null = null + private[this] var closeFunction: (() => Unit) | Null = null private[this] var positioner: Positioner = RelaxedPositioner def withReset(f: () => Source): this.type = { @@ -371,11 +371,11 @@ abstract class Source extends Iterator[Char] with Closeable { /** The close() method closes the underlying resource. */ def close(): Unit = { - if (closeFunction != null) closeFunction() + if (closeFunction != null) closeFunction.nn() } /** The reset() method creates a fresh copy of this Source. */ def reset(): Source = - if (resetFunction != null) resetFunction() + if (resetFunction != null) resetFunction.nn() else throw new UnsupportedOperationException("Source's reset() method was not set.") } diff --git a/library/src/scala/io/StdIn.scala b/library/src/scala/io/StdIn.scala index a39f99b4d689..5725fc28c3e8 100644 --- a/library/src/scala/io/StdIn.scala +++ b/library/src/scala/io/StdIn.scala @@ -27,7 +27,7 @@ private[scala] trait StdIn { * * @return the string read from the terminal or null if the end of stream was reached. */ - def readLine(): String = in.readLine() + def readLine(): String | Null = in.readLine() /** Print and flush formatted text to the default output, and read a full line from the default input. * Returns `null` if the end of the input stream has been reached. @@ -36,7 +36,7 @@ private[scala] trait StdIn { * @param args the parameters used to instantiate the format, as in `printf`. * @return the string read from the default input */ - def readLine(text: String, args: Any*): String = { + def readLine(text: String, args: Any*): String | Null = { printf(text, args: _*) out.flush() readLine() diff --git a/library/src/scala/jdk/AnyAccumulator.scala b/library/src/scala/jdk/AnyAccumulator.scala index fa952105fcca..72ad8eb0f8ec 100644 --- a/library/src/scala/jdk/AnyAccumulator.scala +++ b/library/src/scala/jdk/AnyAccumulator.scala @@ -325,7 +325,7 @@ private[jdk] class AnyAccumulatorStepper[A](private[this] val acc: AnyAccumulato ans } - def trySplit(): AnyStepper[A] = + def trySplit(): AnyStepper[A] | Null = if (N <= 1) null else { val half = N >> 1 diff --git a/library/src/scala/jdk/DoubleAccumulator.scala b/library/src/scala/jdk/DoubleAccumulator.scala index dfdb2feba9ea..0f4aeb1f6ce7 100644 --- a/library/src/scala/jdk/DoubleAccumulator.scala +++ b/library/src/scala/jdk/DoubleAccumulator.scala @@ -337,7 +337,7 @@ object DoubleAccumulator extends collection.SpecificIterableFactory[Double, Doub override def newBuilder: DoubleAccumulator = new DoubleAccumulator class SerializationProxy[A](@transient private val acc: DoubleAccumulator) extends Serializable { - @transient private var result: DoubleAccumulator = _ + @transient private var result: DoubleAccumulator | Null = _ private def writeObject(out: ObjectOutputStream): Unit = { out.defaultWriteObject() @@ -359,7 +359,7 @@ object DoubleAccumulator extends collection.SpecificIterableFactory[Double, Doub result = res } - private def readResolve(): AnyRef = result + private def readResolve(): AnyRef = result.nn } } @@ -405,7 +405,7 @@ private[jdk] class DoubleAccumulatorStepper(private val acc: DoubleAccumulator) ans } - def trySplit(): DoubleStepper = + def trySplit(): DoubleStepper | Null = if (N <= 1) null else { val half = N >> 1 diff --git a/library/src/scala/jdk/IntAccumulator.scala b/library/src/scala/jdk/IntAccumulator.scala index 9b7a904b36e3..fae270370602 100644 --- a/library/src/scala/jdk/IntAccumulator.scala +++ b/library/src/scala/jdk/IntAccumulator.scala @@ -410,7 +410,7 @@ private[jdk] class IntAccumulatorStepper(private val acc: IntAccumulator) extend ans } - def trySplit(): IntStepper = + def trySplit(): IntStepper | Null = if (N <= 1) null else { val half = N >> 1 diff --git a/library/src/scala/jdk/LongAccumulator.scala b/library/src/scala/jdk/LongAccumulator.scala index 38b868ae1111..e675a9691776 100644 --- a/library/src/scala/jdk/LongAccumulator.scala +++ b/library/src/scala/jdk/LongAccumulator.scala @@ -405,7 +405,7 @@ private[jdk] class LongAccumulatorStepper(private val acc: LongAccumulator) exte ans } - def trySplit(): LongStepper = + def trySplit(): LongStepper | Null = if (N <= 1) null else { val half = N >> 1 diff --git a/library/src/scala/jdk/javaapi/FutureConverters.scala b/library/src/scala/jdk/javaapi/FutureConverters.scala index d28a8da8a92e..c688478865ad 100644 --- a/library/src/scala/jdk/javaapi/FutureConverters.scala +++ b/library/src/scala/jdk/javaapi/FutureConverters.scala @@ -72,7 +72,7 @@ object FutureConverters { case f: Future[T @unchecked] => f case _ => val p = new P[T](cs) - val completedCF = cs match { + val completedCF: CompletableFuture[T] | Null = cs match { case cf0: CompletableFuture[T @unchecked] => // drop `MinimalStage` (scala/bug#12918) val cf = cf0.toCompletableFuture @@ -80,7 +80,7 @@ object FutureConverters { case _ => null } if (completedCF != null) - p.tryComplete(Success(completedCF.join())) + p.tryComplete(Success(completedCF.nn.join())) else cs.handle(p) p.future diff --git a/library/src/scala/math/BigDecimal.scala b/library/src/scala/math/BigDecimal.scala index e70cdbab41e4..b33711a6b90f 100644 --- a/library/src/scala/math/BigDecimal.scala +++ b/library/src/scala/math/BigDecimal.scala @@ -304,7 +304,7 @@ object BigDecimal { implicit def double2bigDecimal(d: Double): BigDecimal = decimal(d) /** Implicit conversion from `java.math.BigDecimal` to `scala.BigDecimal`. */ - implicit def javaBigDecimal2bigDecimal(x: BigDec): BigDecimal = if (x == null) null else apply(x) + implicit def javaBigDecimal2bigDecimal(x: BigDec | Null): BigDecimal | Null = if (x == null) null else apply(x) } /** diff --git a/library/src/scala/math/BigInt.scala b/library/src/scala/math/BigInt.scala index 9a17ee02a51a..7b77ea73fb69 100644 --- a/library/src/scala/math/BigInt.scala +++ b/library/src/scala/math/BigInt.scala @@ -26,7 +26,7 @@ object BigInt { private[this] val minCached = -1024 private[this] val maxCached = 1024 - private[this] val cache = new Array[BigInt](maxCached - minCached + 1) + private[this] val cache = new Array[BigInt | Null](maxCached - minCached + 1) private[this] def getCached(i: Int): BigInt = { val offset = i - minCached @@ -123,7 +123,7 @@ object BigInt { /** Implicit conversion from `java.math.BigInteger` to `scala.BigInt`. */ - implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = if (x eq null) null else apply(x) + implicit def javaBigInteger2bigInt(x: BigInteger | Null): BigInt | Null = if (x eq null) null else apply(x) // this method is adapted from Google Guava's version at // https://github.com/google/guava/blob/master/guava/src/com/google/common/math/LongMath.java @@ -176,7 +176,7 @@ object BigInt { * * It wraps `java.math.BigInteger`, with optimization for small values that can be encoded in a `Long`. */ -final class BigInt private (private var _bigInteger: BigInteger, private val _long: Long) +final class BigInt private (private var _bigInteger: BigInteger | Null, private val _long: Long) extends ScalaNumber with ScalaNumericConversions with Serializable @@ -288,15 +288,15 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo if (this.longEncoding) that.longEncoding && (this._long == that._long) else - !that.longEncoding && (this._bigInteger == that._bigInteger) + !that.longEncoding && (this._bigInteger == that._bigInteger.nn) /** Compares this BigInt with the specified BigInt */ def compare(that: BigInt): Int = if (this.longEncoding) { - if (that.longEncoding) java.lang.Long.compare(this._long, that._long) else -that._bigInteger.signum() + if (that.longEncoding) java.lang.Long.compare(this._long, that._long) else -that._bigInteger.nn.signum() } else { - if (that.longEncoding) _bigInteger.signum() else this._bigInteger.compareTo(that._bigInteger) + if (that.longEncoding) _bigInteger.nn.signum() else this._bigInteger.nn.compareTo(that._bigInteger.nn) } /** Addition of BigInts diff --git a/library/src/scala/ref/ReferenceQueue.scala b/library/src/scala/ref/ReferenceQueue.scala index 70743708c732..25a188e14f38 100644 --- a/library/src/scala/ref/ReferenceQueue.scala +++ b/library/src/scala/ref/ReferenceQueue.scala @@ -17,7 +17,7 @@ class ReferenceQueue[+T <: AnyRef] { private[ref] val underlying: java.lang.ref.ReferenceQueue[_ <: T] = new java.lang.ref.ReferenceQueue[T] override def toString: String = underlying.toString - protected def Wrapper(jref: java.lang.ref.Reference[_]): Option[Reference[T]] = + protected def Wrapper(jref: java.lang.ref.Reference[_] | Null): Option[Reference[T]] = jref match { case null => None case ref => Some(ref.asInstanceOf[ReferenceWithWrapper[T]].wrapper) diff --git a/library/src/scala/ref/SoftReference.scala b/library/src/scala/ref/SoftReference.scala index 859eef5e7fef..6f18339d1140 100644 --- a/library/src/scala/ref/SoftReference.scala +++ b/library/src/scala/ref/SoftReference.scala @@ -12,7 +12,7 @@ package scala.ref -class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] { +class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T] | Null) extends ReferenceWrapper[T] { def this(value : T) = this(value, null) val underlying: java.lang.ref.SoftReference[_ <: T] = @@ -31,5 +31,5 @@ object SoftReference { def unapply[T <: AnyRef](sr: SoftReference[T]): Option[T] = Option(sr.underlying.get) } -private class SoftReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: SoftReference[T]) - extends java.lang.ref.SoftReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] +private class SoftReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T] | Null, val wrapper: SoftReference[T]) + extends java.lang.ref.SoftReference[T](value, if (queue == null) null else queue.nn.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] diff --git a/library/src/scala/ref/WeakReference.scala b/library/src/scala/ref/WeakReference.scala index 5ca06063590b..e3a0d78d27e6 100644 --- a/library/src/scala/ref/WeakReference.scala +++ b/library/src/scala/ref/WeakReference.scala @@ -17,7 +17,7 @@ package scala.ref * The new functionality is (1) results are Option values, instead of using null. * (2) There is an extractor that maps the weak reference itself into an option. */ -class WeakReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] { +class WeakReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T] | Null) extends ReferenceWrapper[T] { def this(value: T) = this(value, null) val underlying: java.lang.ref.WeakReference[_ <: T] = new WeakReferenceWithWrapper[T](value, queue, this) @@ -33,5 +33,5 @@ object WeakReference { def unapply[T <: AnyRef](wr: WeakReference[T]): Option[T] = Option(wr.underlying.get) } -private class WeakReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: WeakReference[T]) - extends java.lang.ref.WeakReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] +private class WeakReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T] | Null, val wrapper: WeakReference[T]) + extends java.lang.ref.WeakReference[T](value, if (queue == null) null else queue.nn.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] diff --git a/library/src/scala/reflect/ClassManifestDeprecatedApis.scala b/library/src/scala/reflect/ClassManifestDeprecatedApis.scala index cc8d0a457c2a..e99e3e96070a 100644 --- a/library/src/scala/reflect/ClassManifestDeprecatedApis.scala +++ b/library/src/scala/reflect/ClassManifestDeprecatedApis.scala @@ -136,7 +136,7 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { protected def argString = if (typeArguments.nonEmpty) typeArguments.mkString("[", ", ", "]") - else if (runtimeClass.isArray) "["+ClassManifest.fromClass(runtimeClass.getComponentType)+"]" + else if (runtimeClass.isArray) "["+ClassManifest.fromClass(runtimeClass.getComponentType.nn)+"]" else "" } diff --git a/library/src/scala/reflect/NameTransformer.scala b/library/src/scala/reflect/NameTransformer.scala index 4980ed5bd6b4..34859428a34e 100644 --- a/library/src/scala/reflect/NameTransformer.scala +++ b/library/src/scala/reflect/NameTransformer.scala @@ -31,10 +31,10 @@ object NameTransformer { private[this] val nops = 128 private[this] val ncodes = 26 * 26 - private class OpCodes(val op: Char, val code: String, val next: OpCodes) + private class OpCodes(val op: Char, val code: String, val next: OpCodes | Null) - private[this] val op2code = new Array[String](nops) - private[this] val code2op = new Array[OpCodes](ncodes) + private[this] val op2code = new Array[String | Null](nops) + private[this] val code2op = new Array[OpCodes | Null](ncodes) private def enterOp(op: Char, code: String) = { op2code(op.toInt) = code val c = (code.charAt(1) - 'a') * 26 + code.charAt(2) - 'a' @@ -67,7 +67,7 @@ object NameTransformer { * @return the string with all recognized opchars replaced with their encoding */ def encode(name: String): String = { - var buf: StringBuilder = null + var buf: StringBuilder | Null = null val len = name.length() var i = 0 while (i < len) { @@ -104,11 +104,11 @@ object NameTransformer { //System.out.println("decode: " + name);//DEBUG val name = if (name0.endsWith("")) name0.stripSuffix("") + "this" else name0 - var buf: StringBuilder = null + var buf: StringBuilder | Null = null val len = name.length() var i = 0 while (i < len) { - var ops: OpCodes = null + var ops: OpCodes | Null = null var unicode = false val c = name charAt i if (c == '$' && i + 2 < len) { diff --git a/library/src/scala/runtime/LambdaDeserializer.scala b/library/src/scala/runtime/LambdaDeserializer.scala index 76fc5d778bec..66940c5ba0a9 100644 --- a/library/src/scala/runtime/LambdaDeserializer.scala +++ b/library/src/scala/runtime/LambdaDeserializer.scala @@ -38,26 +38,27 @@ object LambdaDeserializer { * @param lookup The factory for method handles. Must have access to the implementation method, the * functional interface class, and `java.io.Serializable`. * @param cache A cache used to avoid spinning up a class for each deserialization of a given lambda. May be `null` + * @param targetMethodMap A map of method handles for the target methods * @param serialized The lambda to deserialize. Note that this is typically created by the `readResolve` * member of the anonymous class created by `LambdaMetaFactory`. * @return An instance of the functional interface */ - def deserializeLambda(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle], - targetMethodMap: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = { + def deserializeLambda(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle] | Null, + targetMethodMap: java.util.Map[String, MethodHandle] | Null, serialized: SerializedLambda): AnyRef = { val result = deserializeLambdaOrNull(lookup, cache, targetMethodMap, serialized) if (result == null) throw new IllegalArgumentException("Illegal lambda deserialization") else result } - def deserializeLambdaOrNull(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle], - targetMethodMap: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = { + def deserializeLambdaOrNull(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle] | Null, + targetMethodMap: java.util.Map[String, MethodHandle] | Null, serialized: SerializedLambda): AnyRef | Null = { assert(targetMethodMap != null) def slashDot(name: String) = name.replaceAll("/", ".") val loader = lookup.lookupClass().getClassLoader val implClass = loader.loadClass(slashDot(serialized.getImplClass)) val key = LambdaDeserialize.nameAndDescriptorKey(serialized.getImplMethodName, serialized.getImplMethodSignature) - def makeCallSite: CallSite = { + def makeCallSite: CallSite | Null = { import serialized._ def parseDescriptor(s: String) = MethodType.fromMethodDescriptorString(s, loader) @@ -89,8 +90,8 @@ object LambdaDeserializer { } // Lookup the implementation method - val implMethod: MethodHandle = if (targetMethodMap.containsKey(key)) { - targetMethodMap.get(key) + val implMethod: MethodHandle = if (targetMethodMap.nn.containsKey(key)) { + targetMethodMap.nn.get(key) } else { return null } @@ -111,13 +112,13 @@ object LambdaDeserializer { val callSite = makeCallSite if (callSite == null) return null callSite.getTarget - } else cache.synchronized{ - cache.get(key) match { + } else cache.nn.synchronized { + cache.nn.get(key) match { case null => val callSite = makeCallSite if (callSite == null) return null val temp = callSite.getTarget - cache.put(key, temp) + cache.nn.put(key, temp) temp case target => target } diff --git a/library/src/scala/runtime/MethodCache.scala b/library/src/scala/runtime/MethodCache.scala index 2aa41c9e352a..982f94f69107 100644 --- a/library/src/scala/runtime/MethodCache.scala +++ b/library/src/scala/runtime/MethodCache.scala @@ -32,13 +32,13 @@ private[scala] sealed abstract class MethodCache { * `null` is returned. If `null` is returned, find's caller should look- * up the right method using whichever means it prefers, and add it to * the cache for later use. */ - def find(forReceiver: JClass[_]): JMethod + def find(forReceiver: JClass[_]): JMethod | Null def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache } private[scala] final class EmptyMethodCache extends MethodCache { - def find(forReceiver: JClass[_]): JMethod = null + def find(forReceiver: JClass[_]): JMethod | Null = null def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache = new PolyMethodCache(this, forReceiver, forMethod, 1) @@ -50,7 +50,7 @@ private[scala] final class MegaMethodCache( private[this] val forParameterTypes: Array[JClass[_]] ) extends MethodCache { - def find(forReceiver: JClass[_]): JMethod = + def find(forReceiver: JClass[_]): JMethod | Null = forReceiver.getMethod(forName, forParameterTypes:_*) def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache = this @@ -67,14 +67,14 @@ private[scala] final class PolyMethodCache( /** To achieve tail recursion this must be a separate method * from `find`, because the type of next is not `PolyMethodCache`. */ - @tailrec private def findInternal(forReceiver: JClass[_]): JMethod = + @tailrec private def findInternal(forReceiver: JClass[_]): JMethod | Null = if (forReceiver eq receiver) method else next match { case x: PolyMethodCache => x findInternal forReceiver - case _ => next find forReceiver + case _ => next.find(forReceiver) } - def find(forReceiver: JClass[_]): JMethod = findInternal(forReceiver) + def find(forReceiver: JClass[_]): JMethod | Null = findInternal(forReceiver) // TODO: come up with a more realistic number final private val MaxComplexity = 160 diff --git a/library/src/scala/runtime/ScalaRunTime.scala b/library/src/scala/runtime/ScalaRunTime.scala index 5c227b33c5ef..3218595e3987 100644 --- a/library/src/scala/runtime/ScalaRunTime.scala +++ b/library/src/scala/runtime/ScalaRunTime.scala @@ -27,7 +27,7 @@ import java.lang.reflect.{Method => JMethod} * outside the API and subject to change or removal without notice. */ object ScalaRunTime { - def isArray(x: Any, atLevel: Int = 1): Boolean = + def isArray(x: Any | Null, atLevel: Int = 1): Boolean = x != null && isArrayClass(x.getClass, atLevel) private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean = @@ -53,7 +53,7 @@ object ScalaRunTime { classTag[T].runtimeClass.asInstanceOf[jClass[T]] /** Retrieve generic array element */ - def array_apply(xs: AnyRef, idx: Int): Any = { + def array_apply(xs: AnyRef | Null, idx: Int): Any = { (xs: @unchecked) match { case x: Array[AnyRef] => x(idx).asInstanceOf[Any] case x: Array[Int] => x(idx).asInstanceOf[Any] @@ -69,7 +69,7 @@ object ScalaRunTime { } /** update generic array element */ - def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { + def array_update(xs: AnyRef | Null, idx: Int, value: Any): Unit = { (xs: @unchecked) match { case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef] case x: Array[Int] => x(idx) = value.asInstanceOf[Int] @@ -85,11 +85,11 @@ object ScalaRunTime { } /** Get generic array length */ - @inline def array_length(xs: AnyRef): Int = java.lang.reflect.Array.getLength(xs) + @inline def array_length(xs: AnyRef | Null): Int = java.lang.reflect.Array.getLength(xs) // TODO: bytecode Object.clone() will in fact work here and avoids // the type switch. See Array_clone comment in BCodeBodyBuilder. - def array_clone(xs: AnyRef): AnyRef = (xs: @unchecked) match { + def array_clone(xs: AnyRef | Null): AnyRef = (xs: @unchecked) match { case x: Array[AnyRef] => x.clone() case x: Array[Int] => x.clone() case x: Array[Double] => x.clone() @@ -106,7 +106,7 @@ object ScalaRunTime { * Needed to deal with vararg arguments of primitive types that are passed * to a generic Java vararg parameter T ... */ - def toObjectArray(src: AnyRef): Array[Object] = { + def toObjectArray(src: AnyRef | Null): Array[Object] = { def copy[@specialized T <: AnyVal](src: Array[T]): Array[Object] = { val length = src.length if (length == 0) Array.emptyObjectArray @@ -215,6 +215,7 @@ object ScalaRunTime { // When doing our own iteration is dangerous def useOwnToString(x: Any) = x match { + case null => false // Range/NumericRange have a custom toString to avoid walking a gazillion elements case _: Range | _: NumericRange[_] => true // Sorted collections to the wrong thing (for us) on iteration - ticket #3493 @@ -285,7 +286,7 @@ object ScalaRunTime { // In cases where an empty array would appear, the compiler uses a direct reference to Nil instead. // Synthetic Java varargs forwarders (@annotation.varargs or varargs bridges when overriding) may pass // `null` to these methods; but returning `null` or `ArraySeq(null)` makes little difference in practice. - def genericWrapArray[T](xs: Array[T]): ArraySeq[T] = ArraySeq.unsafeWrapArray(xs) + def genericWrapArray[T](xs: Array[T]): ArraySeq[T] = ArraySeq.unsafeWrapArray(xs).nn def wrapRefArray[T <: AnyRef](xs: Array[T]): ArraySeq[T] = new ArraySeq.ofRef[T](xs) def wrapIntArray(xs: Array[Int]): ArraySeq[Int] = new ArraySeq.ofInt(xs) def wrapDoubleArray(xs: Array[Double]): ArraySeq[Double] = new ArraySeq.ofDouble(xs) diff --git a/library/src/scala/runtime/StructuralCallSite.scala b/library/src/scala/runtime/StructuralCallSite.scala index 8e245e6c99db..e864f8d7aa5f 100644 --- a/library/src/scala/runtime/StructuralCallSite.scala +++ b/library/src/scala/runtime/StructuralCallSite.scala @@ -30,7 +30,7 @@ final class StructuralCallSite private (callType: MethodType) { cache } - def find(receiver: Class[_]): Method = get.find(receiver) + def find(receiver: Class[_]): Method = get.find(receiver).nn def add(receiver: Class[_], m: Method): Method = { cache = new SoftReference(get.add(receiver, m)) diff --git a/library/src/scala/sys/PropImpl.scala b/library/src/scala/sys/PropImpl.scala index 390c5c9c576d..8dc93f5af721 100644 --- a/library/src/scala/sys/PropImpl.scala +++ b/library/src/scala/sys/PropImpl.scala @@ -20,7 +20,7 @@ import scala.collection.mutable private[sys] class PropImpl[+T](val key: String, valueFn: String => T) extends Prop[T] { def value: T = if (isSet) valueFn(get) else zero def isSet = underlying contains key - def set(newValue: String): String = { + def set(newValue: String | Null): String | Null = { val old = if (isSet) get else null underlying(key) = newValue old @@ -32,7 +32,7 @@ private[sys] class PropImpl[+T](val key: String, valueFn: String => T) extends P old } def get: String = - if (isSet) underlying.getOrElse(key, "") + if (isSet) underlying.getOrElse(key, "").nn else "" def clear(): Unit = underlying -= key @@ -40,7 +40,7 @@ private[sys] class PropImpl[+T](val key: String, valueFn: String => T) extends P def or[T1 >: T](alt: => T1): T1 = if (isSet) value else alt /** The underlying property map, in our case always sys.props */ - protected def underlying: mutable.Map[String, String] = scala.sys.props + protected def underlying: mutable.Map[String, String | Null] = scala.sys.props protected def zero: T = null.asInstanceOf[T] private def getString = if (isSet) "currently: " + get else "unset" override def toString = "%s (%s)".format(key, getString) diff --git a/library/src/scala/sys/SystemProperties.scala b/library/src/scala/sys/SystemProperties.scala index 1f848a73358e..78557cfd65a4 100644 --- a/library/src/scala/sys/SystemProperties.scala +++ b/library/src/scala/sys/SystemProperties.scala @@ -28,10 +28,10 @@ import scala.language.implicitConversions * @define coll mutable map */ class SystemProperties -extends mutable.AbstractMap[String, String] { +extends mutable.AbstractMap[String, String | Null] { - override def empty: mutable.Map[String, String] = mutable.Map[String, String]() - override def default(key: String): String = null + override def empty: mutable.Map[String, String | Null] = mutable.Map[String, String | Null]() + override def default(key: String): String | Null = null def iterator: Iterator[(String, String)] = wrapAccess { val ps = System.getProperties() @@ -40,7 +40,7 @@ extends mutable.AbstractMap[String, String] { override def isEmpty: Boolean = iterator.isEmpty def names: Iterator[String] = wrapAccess ( - System.getProperties().stringPropertyNames().asScala.iterator + System.getProperties().stringPropertyNames().asScala.nn.iterator ) getOrElse Iterator.empty def get(key: String): Option[String] = @@ -50,7 +50,7 @@ extends mutable.AbstractMap[String, String] { override def clear(): Unit = wrapAccess(System.getProperties().clear()) def subtractOne (key: String): this.type = { wrapAccess(System.clearProperty(key)) ; this } - def addOne (kv: (String, String)): this.type = { wrapAccess(System.setProperty(kv._1, kv._2)) ; this } + def addOne (kv: (String, String | Null)): this.type = { wrapAccess(System.setProperty(kv._1, kv._2)) ; this } @annotation.nowarn("cat=deprecation") // AccessControlException is deprecated on JDK 17 def wrapAccess[T](body: => T): Option[T] = diff --git a/library/src/scala/sys/package.scala b/library/src/scala/sys/package.scala index 122f19d12c3a..733e7cce07b3 100644 --- a/library/src/scala/sys/package.scala +++ b/library/src/scala/sys/package.scala @@ -64,7 +64,7 @@ package object sys { * * @return a Map containing the system environment variables. */ - def env: Map[String, String] = Map.from(System.getenv().asScala).withDefault { v => + def env: Map[String, String] = Map.from(System.getenv().nn.asScala).withDefault { v => val s = System.getenv(v) if (s == null) throw new NoSuchElementException(v) s @@ -92,6 +92,6 @@ package object sys { val tarray = new Array[Thread](num) val got = Thread.enumerate(tarray) - ArraySeq.unsafeWrapArray(tarray).take(got) + ArraySeq.unsafeWrapArray(tarray).nn.take(got) } } diff --git a/library/src/scala/sys/process/BasicIO.scala b/library/src/scala/sys/process/BasicIO.scala index a242fe312bbf..74bafe09026c 100644 --- a/library/src/scala/sys/process/BasicIO.scala +++ b/library/src/scala/sys/process/BasicIO.scala @@ -194,7 +194,7 @@ object BasicIO { /** Calls `processLine` with the result of `readLine` until the latter returns * `null` or the current thread is interrupted. */ - def processLinesFully(processLine: String => Unit)(readLine: () => String): Unit = { + def processLinesFully(processLine: String => Unit)(readLine: () => String | Null): Unit = { def working = !Thread.currentThread.isInterrupted def halting = { Thread.currentThread.interrupt(); null } @tailrec diff --git a/library/src/scala/sys/process/ProcessBuilderImpl.scala b/library/src/scala/sys/process/ProcessBuilderImpl.scala index 186ad134a218..3c7b71d5993a 100644 --- a/library/src/scala/sys/process/ProcessBuilderImpl.scala +++ b/library/src/scala/sys/process/ProcessBuilderImpl.scala @@ -85,7 +85,7 @@ private[process] trait ProcessBuilderImpl { val process = p.start() // start the external process // spawn threads that process the input, output, and error streams using the functions defined in `io` - val inThread = + val inThread: Thread | Null = if (inherit || (writeInput eq BasicIO.connectNoOp)) null else Spawn("Simple-input", daemon = true)(writeInput(process.getOutputStream)) val outThread = Spawn("Simple-output", daemonizeThreads)(processOutput(process.getInputStream())) diff --git a/library/src/scala/sys/process/ProcessImpl.scala b/library/src/scala/sys/process/ProcessImpl.scala index 3ed0c5766412..336273428c57 100644 --- a/library/src/scala/sys/process/ProcessImpl.scala +++ b/library/src/scala/sys/process/ProcessImpl.scala @@ -260,7 +260,7 @@ private[process] trait ProcessImpl { * The implementation of `exitValue` interrupts `inputThread` * and then waits until all I/O threads die before returning. */ - private[process] class SimpleProcess(p: JProcess, inputThread: Thread, outputThreads: List[Thread]) extends Process { + private[process] class SimpleProcess(p: JProcess, inputThread: Thread | Null, outputThreads: List[Thread]) extends Process { override def isAlive() = p.isAlive() override def exitValue() = { try p.waitFor() // wait for the process to terminate @@ -277,7 +277,7 @@ private[process] trait ProcessImpl { finally interrupt() } // we interrupt the input thread to notify it that it can terminate - private[this] def interrupt(): Unit = if (inputThread != null) inputThread.interrupt() + private[this] def interrupt(): Unit = if (inputThread != null) inputThread.nn.interrupt() } private[process] final class ThreadProcess(thread: Thread, success: LinkedBlockingQueue[Boolean]) extends Process { override def isAlive() = thread.isAlive() diff --git a/library/src/scala/util/Properties.scala b/library/src/scala/util/Properties.scala index 24dee49ae951..0ae9ef261733 100644 --- a/library/src/scala/util/Properties.scala +++ b/library/src/scala/util/Properties.scala @@ -53,9 +53,9 @@ private[scala] trait PropertiesTrait { def propIsSet(name: String) = System.getProperty(name) != null def propIsSetTo(name: String, value: String) = propOrNull(name) == value - def propOrElse(name: String, alt: => String) = Option(System.getProperty(name)).getOrElse(alt) - def propOrEmpty(name: String) = propOrElse(name, "") - def propOrNull(name: String) = propOrElse(name, null) + def propOrElse(name: String, alt: => String | Null) = Option(System.getProperty(name)).getOrElse(alt) + def propOrEmpty(name: String) = propOrElse(name, "").asInstanceOf[String] + def propOrNull(name: String): String | Null = propOrElse(name, null) def propOrNone(name: String) = Option(propOrNull(name)) def propOrFalse(name: String) = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase) def setProp(name: String, value: String) = System.setProperty(name, value) diff --git a/library/src/scala/util/Random.scala b/library/src/scala/util/Random.scala index 84b44fcc91f5..a42cfc1a3b00 100644 --- a/library/src/scala/util/Random.scala +++ b/library/src/scala/util/Random.scala @@ -249,7 +249,7 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable { chars charAt (self nextInt chars.length) } - LazyList continually nextAlphaNum + LazyList.continually(nextAlphaNum) } } diff --git a/library/src/scala/util/Sorting.scala b/library/src/scala/util/Sorting.scala index 7e2da2434f82..115ea885c626 100644 --- a/library/src/scala/util/Sorting.scala +++ b/library/src/scala/util/Sorting.scala @@ -177,7 +177,7 @@ object Sorting { } // Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0. - private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = { + private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] | Null = null): Unit = { if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord) else { val iK = (i0 + iN) >>> 1 // Bit shift equivalent to unsigned math, no overflow @@ -233,8 +233,8 @@ object Sorting { // TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible) // Maybe also rename all these methods to `sort`. - @inline private def sort[T](a: Array[T], from: Int, until: Int, ord: Ordering[T]): Unit = (a: @unchecked) match { - case _: Array[AnyRef] => + @inline private def sort[T](a: Array[T] | Null, from: Int, until: Int, ord: Ordering[T]): Unit = (a: @unchecked) match { + case a: Array[AnyRef] => // Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes) if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering") java.util.Arrays.sort(a, from, until, ord) diff --git a/library/src/scala/util/Using.scala b/library/src/scala/util/Using.scala index ebec5e7007ec..36b9fc0b2557 100644 --- a/library/src/scala/util/Using.scala +++ b/library/src/scala/util/Using.scala @@ -197,7 +197,7 @@ object Using { } private def manage[A](op: Manager => A): A = { - var toThrow: Throwable = null + var toThrow: Throwable | Null = null try { op(this) } catch { @@ -207,7 +207,7 @@ object Using { } finally { closed = true var rs = resources - resources = null // allow GC, in case something is holding a reference to `this` + resources = null.asInstanceOf // allow GC, in case something is holding a reference to `this` while (rs.nonEmpty) { val resource = rs.head rs = rs.tail @@ -291,7 +291,7 @@ object Using { def resource[R, A](resource: R)(body: R => A)(implicit releasable: Releasable[R]): A = { if (resource == null) throw new NullPointerException("null resource") - var toThrow: Throwable = null + var toThrow: Throwable | Null = null try { body(resource) } catch { diff --git a/library/src/scala/util/control/ControlThrowable.scala b/library/src/scala/util/control/ControlThrowable.scala index ea5ff549e121..c01e0fddc030 100644 --- a/library/src/scala/util/control/ControlThrowable.scala +++ b/library/src/scala/util/control/ControlThrowable.scala @@ -40,7 +40,7 @@ package scala.util.control * Instances of `ControlThrowable` should not normally have a cause. * Legacy subclasses may set a cause using `initCause`. */ -abstract class ControlThrowable(message: String) extends Throwable( +abstract class ControlThrowable(message: String | Null) extends Throwable( message, /*cause*/ null, /*enableSuppression=*/ false, /*writableStackTrace*/ false) { def this() = this(message = null) diff --git a/library/src/scala/util/hashing/MurmurHash3.scala b/library/src/scala/util/hashing/MurmurHash3.scala index 1fa98e790445..8a1617b45a23 100644 --- a/library/src/scala/util/hashing/MurmurHash3.scala +++ b/library/src/scala/util/hashing/MurmurHash3.scala @@ -82,7 +82,7 @@ private[hashing] class MurmurHash3 { } /** See the [[MurmurHash3.caseClassHash(x:Product,caseClassName:String)]] overload */ - final def caseClassHash(x: Product, seed: Int, caseClassName: String): Int = { + final def caseClassHash(x: Product, seed: Int, caseClassName: String | Null): Int = { val arr = x.productArity val aye = (if (caseClassName != null) caseClassName else x.productPrefix).hashCode if (arr == 0) aye @@ -394,7 +394,7 @@ object MurmurHash3 extends MurmurHash3 { * val res2: Int = -668012062 * }}} */ - def caseClassHash(x: Product, caseClassName: String = null): Int = caseClassHash(x, productSeed, caseClassName) + def caseClassHash(x: Product, caseClassName: String | Null = null): Int = caseClassHash(x, productSeed, caseClassName) private[scala] def arraySeqHash[@specialized T](a: Array[T]): Int = arrayHash(a, seqSeed) private[scala] def tuple2Hash(x: Any, y: Any): Int = tuple2Hash(x.##, y.##, productSeed) diff --git a/library/src/scala/util/matching/Regex.scala b/library/src/scala/util/matching/Regex.scala index c19bc2a925b1..45baff2e2031 100644 --- a/library/src/scala/util/matching/Regex.scala +++ b/library/src/scala/util/matching/Regex.scala @@ -324,7 +324,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends def unapplySeq(c: Char): Option[List[Char]] = { val m = pattern matcher c.toString if (runMatcher(m)) { - if (m.groupCount > 0) Some((m group 1).toList) else Some(Nil) + if (m.groupCount > 0) Some(m.group(1).nn.toList) else Some(Nil) } else None } @@ -338,7 +338,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * and the result of that match is used. */ def unapplySeq(m: Match): Option[List[String]] = - if (m.matched == null) None + if (m.matched.nn == null) None else if (m.matcher.pattern == this.pattern) Regex.extractGroupsFromMatch(m) else unapplySeq(m.matched) @@ -413,7 +413,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends */ def findFirstIn(source: CharSequence): Option[String] = { val m = pattern.matcher(source) - if (m.find) Some(m.group) else None + if (m.find) Option(m.group) else None } /** Return an optional first match of this `Regex` in the given character sequence, @@ -444,7 +444,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends */ def findPrefixOf(source: CharSequence): Option[String] = { val m = pattern.matcher(source) - if (m.lookingAt) Some(m.group) else None + if (m.lookingAt) Option(m.group) else None } /** Return an optional match of this `Regex` at the beginning of the @@ -649,45 +649,45 @@ object Regex { def end(i: Int): Int /** The matched string, or `null` if nothing was matched. */ - def matched: String = + def matched: String | Null = if (start >= 0) source.subSequence(start, end).toString else null /** The matched string in group `i`, * or `null` if nothing was matched. */ - def group(i: Int): String = + def group(i: Int): String | Null = if (start(i) >= 0) source.subSequence(start(i), end(i)).toString else null /** All capturing groups, i.e., not including group(0). */ - def subgroups: List[String] = (1 to groupCount).toList map group + def subgroups: List[String] = (1 to groupCount).toList map (i => group(i).nn) /** The char sequence before first character of match, * or `null` if nothing was matched. */ - def before: CharSequence = + def before: CharSequence | Null = if (start >= 0) source.subSequence(0, start) else null /** The char sequence before first character of match in group `i`, * or `null` if nothing was matched for that group. */ - def before(i: Int): CharSequence = + def before(i: Int): CharSequence | Null = if (start(i) >= 0) source.subSequence(0, start(i)) else null /** Returns char sequence after last character of match, * or `null` if nothing was matched. */ - def after: CharSequence = + def after: CharSequence | Null = if (end >= 0) source.subSequence(end, source.length) else null /** The char sequence after last character of match in group `i`, * or `null` if nothing was matched for that group. */ - def after(i: Int): CharSequence = + def after(i: Int): CharSequence | Null = if (end(i) >= 0) source.subSequence(end(i), source.length) else null @@ -707,18 +707,18 @@ object Regex { * @return The requested group * @throws IllegalArgumentException if the requested group name is not defined */ - def group(id: String): String = ( + def group(id: String): String | Null = ( if (groupNamesNowarn.isEmpty) - matcher group id + matcher.group(id) else nameToIndex.get(id) match { case Some(index) => group(index) - case None => matcher group id + case None => matcher.group(id) } ) /** The matched string; equivalent to `matched.toString`. */ - override def toString: String = matched + override def toString: String = matched.nn } /** Provides information about a successful match. */ @@ -767,7 +767,7 @@ object Regex { * */ object Match { - def unapply(m: Match): Some[String] = Some(m.matched) + def unapply(m: Match): Some[String] = Some(m.matched.nn) } /** An extractor object that yields the groups in the match. Using this extractor @@ -788,7 +788,7 @@ object Regex { } @inline private def extractGroupsFromMatch(m: Match): Option[List[String]] = - Some(List.tabulate(m.groupCount) { i => m.group(i + 1) }) + Some(List.tabulate(m.groupCount) { i => m.group(i + 1).nn }) /** A class to step through a sequence of regex matches. * @@ -837,7 +837,7 @@ object Regex { case 2 => nextSeen = 0 ; next() case 3 => throw new NoSuchElementException } - matcher.group + matcher.group.nn } /** Report emptiness. */ diff --git a/project.scala b/project.scala new file mode 100644 index 000000000000..e292e446f4ff --- /dev/null +++ b/project.scala @@ -0,0 +1 @@ +//> using option -Yexplicit-nulls \ No newline at end of file diff --git a/readme.md b/readme.md index c6ad8d0bc04b..d9acb23a7bb1 100644 --- a/readme.md +++ b/readme.md @@ -1,4 +1,12 @@ dotty ===== -The experimental compiler for a Scala dialect based on DOT \ No newline at end of file +The experimental compiler for a Scala dialect based on DOT + +``` +scala compile --server=false -O="--color:never" -O="--nowarn" . &> out.txt +``` + +``` +pipx run fix.py +``` \ No newline at end of file