diff --git a/tests/pos-special/stdlib/collection/concurrent/BasicNode.java b/tests/pos-special/stdlib/collection/concurrent/BasicNode.java new file mode 100644 index 000000000000..c6ec91e4fde8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/BasicNode.java @@ -0,0 +1,19 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +public abstract class BasicNode { + + public abstract String string(int lev); + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java b/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java new file mode 100644 index 000000000000..ddffa365234e --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; + +abstract class CNodeBase extends MainNode { + + @SuppressWarnings("unchecked") + public static final AtomicIntegerFieldUpdater> updater = + AtomicIntegerFieldUpdater.newUpdater((Class>) (Class) CNodeBase.class, "csize"); + + public volatile int csize = -1; + + public boolean CAS_SIZE(int oldval, int nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_SIZE(int nval) { + updater.set(this, nval); + } + + public int READ_SIZE() { + return updater.get(this); + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/Gen.java b/tests/pos-special/stdlib/collection/concurrent/Gen.java new file mode 100644 index 000000000000..07af2983f32d --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/Gen.java @@ -0,0 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +final class Gen {} diff --git a/tests/pos-special/stdlib/collection/concurrent/INodeBase.java b/tests/pos-special/stdlib/collection/concurrent/INodeBase.java new file mode 100644 index 000000000000..dfb99806594f --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/INodeBase.java @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + +abstract class INodeBase extends BasicNode { + + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) INodeBase.class, (Class>) (Class) MainNode.class, "mainnode"); + + static final Object RESTART = new Object(); + + static final Object NO_SUCH_ELEMENT_SENTINEL = new Object(); + + public volatile MainNode mainnode = null; + + public final Gen gen; + + public INodeBase(Gen generation) { + gen = generation; + } + + public BasicNode prev() { + return null; + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/MainNode.java b/tests/pos-special/stdlib/collection/concurrent/MainNode.java new file mode 100644 index 000000000000..f7f022974e9e --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/MainNode.java @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + +abstract class MainNode extends BasicNode { + + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) MainNode.class, (Class>) (Class) MainNode.class, "prev"); + + public volatile MainNode prev = null; + + public abstract int cachedSize(Object ct); + + // standard contract + public abstract int knownSize(); + + public boolean CAS_PREV(MainNode oldval, MainNode nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_PREV(MainNode nval) { + updater.set(this, nval); + } + + // do we need this? unclear in the javadocs... + // apparently not - volatile reads are supposed to be safe + // regardless of whether there are concurrent ARFU updates + @Deprecated @SuppressWarnings("unchecked") + public MainNode READ_PREV() { + return (MainNode) updater.get(this); + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala new file mode 100644 index 000000000000..0824ecc44519 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala @@ -0,0 +1,1207 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package concurrent + +import java.util.concurrent.atomic._ +import scala.{unchecked => uc} +import scala.annotation.tailrec +import scala.collection.concurrent.TrieMap.RemovalPolicy +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{List, Nil} +import scala.collection.mutable.GrowableBuilder +import scala.util.Try +import scala.util.hashing.Hashing +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + +private[collection] final class INode[sealed K, sealed V](bn: MainNode[K, V], g: Gen, equiv: Equiv[K]) extends INodeBase[K, V](g) { + import INodeBase._ + + WRITE(bn) + + def this(g: Gen, equiv: Equiv[K]) = this(null, g, equiv) + + def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval) + + def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n) + + def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct) + + def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = { + val m = /*READ*/mainnode + val prevval = /*READ*/m.prev + if (prevval eq null) m + else GCAS_Complete(m, ct) + } + + @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else { + // complete the GCAS + val prev = /*READ*/m.prev + val ctr = ct.readRoot(abort = true) + + prev match { + case null => + m + case fn: FailedNode[_, _] => // try to commit to previous value + if (CAS(m, fn.prev)) fn.prev + else GCAS_Complete(/*READ*/mainnode, ct) + case vn: MainNode[_, _] => + // Assume that you've read the root from the generation G. + // Assume that the snapshot algorithm is correct. + // ==> you can only reach nodes in generations <= G. + // ==> `gen` is <= G. + // We know that `ctr.gen` is >= G. + // ==> if `ctr.gen` = `gen` then they are both equal to G. + // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G, + // or both + if ((ctr.gen eq gen) && ct.nonReadOnly) { + // try to commit + if (m.CAS_PREV(prev, null)) m + else GCAS_Complete(m, ct) + } else { + // try to abort + m.CAS_PREV(prev, new FailedNode(prev)) + GCAS_Complete(/*READ*/mainnode, ct) + } + } + } + + def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = { + n.WRITE_PREV(old) + if (CAS(old, n)) { + GCAS_Complete(n, ct) + /*READ*/n.prev eq null + } else false + } + + private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2) + + private def inode(cn: MainNode[K, V]) = { + val nin = new INode[K, V](gen, equiv) + nin.WRITE(cn) + nin + } + + def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = { + val nin = new INode[K, V](ngen, equiv) + val main = GCAS_READ(ct) + nin.WRITE(main) + nin + } + + /** Inserts a key value pair, overwriting the old pair if the keys match. + * + * @return true if successful, false otherwise + */ + @tailrec def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) + else false + } + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + GCAS(cn, nn, ct) + } + case basicNode => throw new MatchError(basicNode) + } + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) + GCAS(cn, ncnode, ct) + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + false + case ln: LNode[K, V] => // 3) an l-node + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + case mainNode => throw new MatchError(mainNode) + } + } + + + + /** Inserts a new key value pair, given that a specific condition is met. + * + * @param cond KEY_PRESENT_OR_ABSENT - don't care if the key was there, insert or overwrite + * KEY_ABSENT - key wasn't there, insert only, do not overwrite + * KEY_PRESENT - key was there, overwrite only, do not insert + * other value `v` - only overwrite if the current value is this + * @param fullEquals whether to use reference or full equals when comparing `v` to the current value + * @param hc the hashcode of `k` + * + * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) + */ + @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, fullEquals: Boolean, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, fullEquals, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, fullEquals, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] @uc => cond match { + case INode.KEY_PRESENT_OR_ABSENT => + if (sn.hc == hc && equal(sn.k, k, ct)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct)) Some(sn.v) else null + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_ABSENT => + if (sn.hc == hc && equal(sn.k, k, ct)) Some(sn.v) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_PRESENT => + if (sn.hc == hc && equal(sn.k, k, ct)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + case otherv => + if (sn.hc == hc && equal(sn.k, k, ct) && (if (fullEquals) sn.v == otherv else sn.v.asInstanceOf[AnyRef] eq otherv)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + } + case basicNode => throw new MatchError(basicNode) + } + } else cond match { + case INode.KEY_PRESENT_OR_ABSENT | INode.KEY_ABSENT => + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) + if (GCAS(cn, ncnode, ct)) None else null + case INode.KEY_PRESENT => None + case otherv => None + } + case sn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => // 3) an l-node + def insertln() = { + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + } + cond match { + case INode.KEY_PRESENT_OR_ABSENT => + val optv = ln.get(k) + if (insertln()) optv else null + case INode.KEY_ABSENT => + ln.get(k) match { + case None => if (insertln()) None else null + case optv => optv + } + case INode.KEY_PRESENT => + ln.get(k) match { + case Some(v0) => if (insertln()) Some(v0) else null + case None => None + } + case otherv => + ln.get(k) match { + case Some(v0) if (if (fullEquals) v0 == otherv else v0.asInstanceOf[AnyRef] eq otherv) => + if (insertln()) Some(otherv.asInstanceOf[V]) else null + case _ => None + } + } + case mainNode => throw new MatchError(mainNode) + } + } + + /** Looks up the value associated with the key. + * + * @param hc the hashcode of `k` + * + * @return NO_SUCH_ELEMENT_SENTINEL if no value has been found, RESTART if the operation wasn't successful, + * or any other value otherwise + */ + @tailrec def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multinode + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + if ((bmp & flag) == 0) NO_SUCH_ELEMENT_SENTINEL // 1a) bitmap shows no binding + else { // 1b) bitmap contains a value - descend + val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + sub match { + case in: INode[K, V] @uc => + if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) + else RESTART + } + case sn: SNode[K, V] @uc => // 2) singleton node + if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef] + else NO_SUCH_ELEMENT_SENTINEL + case basicNode => throw new MatchError(basicNode) + } + } + case tn: TNode[_, _] => // 3) non-live node + def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { + clean(parent, ct, lev - 5) + RESTART + } else { + if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef] + else NO_SUCH_ELEMENT_SENTINEL + } + cleanReadOnly(tn) + case ln: LNode[K, V] => // 5) an l-node + ln.get(k).asInstanceOf[Option[AnyRef]].getOrElse(NO_SUCH_ELEMENT_SENTINEL) + case mainNode => throw new MatchError(mainNode) + } + } + + /** Removes the key associated with the given value. + * + * @param hc the hashcode of `k` + * + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * + * @return null if not successful, an Option[V] indicating the previous value otherwise + */ + def rec_remove( + k: K, + v: V, + removalPolicy: Int, + hc: Int, + lev: Int, + parent: INode[K, V], + startgen: Gen, + ct: TrieMap[K, V]): Option[V] = { + + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => + val idx = (hc >>> lev) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) None + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + val res = sub match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_remove(k, v, removalPolicy, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removalPolicy, hc, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct) && RemovalPolicy.shouldRemove(removalPolicy)(sn.v, v)) { + val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) + if (GCAS(cn, ncn, ct)) Some(sn.v) else null + } else None + case basicNode => throw new MatchError(basicNode) + } + + if (res == None || (res eq null)) res + else { + @tailrec def cleanParent(nonlive: AnyRef): Unit = { + val pm = parent.GCAS_READ(ct) + pm match { + case cn: CNode[K, V] => + val idx = (hc >>> (lev - 5)) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + if (sub eq this) (nonlive: @uc) match { + case tn: TNode[K, V] @uc => + val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) + if (!parent.GCAS(cn, ncn, ct)) + if (ct.readRoot().gen == startgen) cleanParent(nonlive) + } + } + case _ => // parent is no longer a cnode, we're done + } + } + + if (parent ne null) { // never tomb at root + val n = GCAS_READ(ct) + if (n.isInstanceOf[TNode[_, _]]) + cleanParent(n) + } + + res + } + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => + if (removalPolicy == RemovalPolicy.Always) { + val optv = ln.get(k) + val nn = ln.removed(k, ct) + if (GCAS(ln, nn, ct)) optv else null + } else ln.get(k) match { + case optv @ Some(v0) if RemovalPolicy.shouldRemove(removalPolicy)(v, v0) => + val nn = ln.removed(k, ct) + if (GCAS(ln, nn, ct)) optv else null + case _ => None + } + case mainNode => throw new MatchError(mainNode) + } + } + + private def clean(nd: INode[K, V], ct: TrieMap[K, V], lev: Int): Unit = { + val m = nd.GCAS_READ(ct) + m match { + case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct) + case _ => + } + } + + def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null + + def cachedSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).cachedSize(ct) + + def knownSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).knownSize() + + /* this is a quiescent method! */ + def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { + case null => "" + case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc) + case cn: CNode[_, _] => cn.string(lev) + case ln: LNode[_, _] => ln.string(lev) + case x => "".format(x) + }) + +} + + +private[concurrent] object INode { + //////////////////////////////////////////////////////////////////////////////////////////////////// + // Arguments for `cond` argument in TrieMap#rec_insertif + //////////////////////////////////////////////////////////////////////////////////////////////////// + final val KEY_PRESENT = new AnyRef + final val KEY_ABSENT = new AnyRef + final val KEY_PRESENT_OR_ABSENT = new AnyRef + + def newRootNode[sealed K, sealed V](equiv: Equiv[K]) = { + val gen = new Gen + val cn = new CNode[K, V](0, new Array(0), gen) + new INode[K, V](cn, gen, equiv) + } +} + + +private[concurrent] final class FailedNode[sealed K, sealed V](p: MainNode[K, V]) extends MainNode[K, V] { + WRITE_PREV(p) + + def string(lev: Int) = throw new UnsupportedOperationException + + def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException + + def knownSize: Int = throw new UnsupportedOperationException + + override def toString = "FailedNode(%s)".format(p) +} + + +private[concurrent] trait KVNode[sealed K, sealed V] { + def kvPair: (K, V) +} + + +private[collection] final class SNode[sealed K, sealed V](final val k: K, final val v: V, final val hc: Int) + extends BasicNode with KVNode[K, V] { + def copy = new SNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) +} + +// Tomb Node, used to ensure proper ordering during removals +private[collection] final class TNode[sealed K, sealed V](final val k: K, final val v: V, final val hc: Int) + extends MainNode[K, V] with KVNode[K, V] { + def copy = new TNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def cachedSize(ct: AnyRef): Int = 1 + def knownSize: Int = 1 + def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) +} + +// List Node, leaf node that handles hash collisions +private[collection] final class LNode[sealed K, sealed V](val entries: List[(K, V)], equiv: Equiv[K]) + extends MainNode[K, V] { + + def this(k: K, v: V, equiv: Equiv[K]) = this((k -> v) :: Nil, equiv) + + def this(k1: K, v1: V, k2: K, v2: V, equiv: Equiv[K]) = + this(if (equiv.equiv(k1, k2)) (k2 -> v2) :: Nil else (k1 -> v1) :: (k2 -> v2) :: Nil, equiv) + + def inserted(k: K, v: V) = { + var k0: K = k + @tailrec + def remove(elems: List[(K, V)], acc: List[(K, V)]): List[(K, V)] = { + if (elems.isEmpty) acc + else if (equiv.equiv(elems.head._1, k)) { + k0 = elems.head._1 + acc ::: elems.tail + } else remove(elems.tail, elems.head :: acc) + } + val e = remove(entries, Nil) + new LNode((k0 -> v) :: e, equiv) + } + + def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = { + val updmap = entries.filterNot(entry => equiv.equiv(entry._1, k)) + if (updmap.sizeIs > 1) new LNode(updmap, equiv) + else { + val (k, v) = updmap.iterator.next() + new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses + } + } + + def get(k: K): Option[V] = entries.find(entry => equiv.equiv(entry._1, k)).map(_._2) + + def cachedSize(ct: AnyRef): Int = entries.size + + def knownSize: Int = -1 // shouldn't ever be empty, and the size of a list is not known + + def string(lev: Int) = (" " * lev) + "LNode(%s)".format(entries.mkString(", ")) + +} + +// Ctrie Node, contains bitmap and array of references to branch nodes +private[collection] final class CNode[sealed K, sealed V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { + // this should only be called from within read-only snapshots + def cachedSize(ct: AnyRef): Int = { + val currsz = READ_SIZE() + if (currsz != -1) currsz + else { + val sz = computeSize(ct.asInstanceOf[TrieMap[K, V]]) + while (READ_SIZE() == -1) CAS_SIZE(-1, sz) + READ_SIZE() + } + } + + def knownSize: Int = READ_SIZE() // this should only ever return -1 if unknown + + // lends itself towards being parallelizable by choosing + // a random starting offset in the array + // => if there are concurrent size computations, they start + // at different positions, so they are more likely to + // to be independent + private def computeSize(ct: TrieMap[K, V]): Int = { + var i = 0 + var sz = 0 + val offset = + if (array.length > 0) + //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */ + java.util.concurrent.ThreadLocalRandom.current.nextInt(0, array.length) + else 0 + while (i < array.length) { + val pos = (i + offset) % array.length + array(pos) match { + case sn: SNode[_, _] => sz += 1 + case in: INode[K, V] @uc => sz += in.cachedSize(ct) + case basicNode => throw new MatchError(basicNode) + } + i += 1 + } + sz + } + + def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = { + val len = array.length + val narr = new Array[BasicNode](len) + Array.copy(array, 0, narr, 0, len) + narr(pos) = nn + new CNode[K, V](bitmap, narr, gen) + } + + def removedAt(pos: Int, flag: Int, gen: Gen) = { + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len - 1) + Array.copy(arr, 0, narr, 0, pos) + Array.copy(arr, pos + 1, narr, pos, len - pos - 1) + new CNode[K, V](bitmap ^ flag, narr, gen) + } + + def insertedAt(pos: Int, flag: Int, k: K, v: V, hc: Int, gen: Gen) = { + val len = array.length + val bmp = bitmap + val narr = new Array[BasicNode](len + 1) + Array.copy(array, 0, narr, 0, pos) + narr(pos) = new SNode(k, v, hc) + Array.copy(array, pos, narr, pos + 1, len - pos) + new CNode[K, V](bmp | flag, narr, gen) + } + + /** Returns a copy of this cnode such that all the i-nodes below it are copied + * to the specified generation `ngen`. + */ + def renewed(ngen: Gen, ct: TrieMap[K, V]) = { + var i = 0 + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len) + while (i < len) { + arr(i) match { + case in: INode[K, V] @uc => narr(i) = in.copyToGen(ngen, ct) + case bn: BasicNode => narr(i) = bn + } + i += 1 + } + new CNode[K, V](bitmap, narr, ngen) + } + + private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match { + case tn: TNode[_, _] => tn.copyUntombed + case _ => inode + } + + def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { + case sn: SNode[K, V] @uc => sn.copyTombed + case _ => this + } else this + + // - if the branching factor is 1 for this CNode, and the child + // is a tombed SNode, returns its tombed version + // - otherwise, if there is at least one non-null node below, + // returns the version of this node with at least some null-inodes + // removed (those existing when the op began) + // - if there are only null-i-nodes below, returns null + def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = { + val bmp = bitmap + var i = 0 + val arr = array + val tmparray = new Array[BasicNode](arr.length) + while (i < arr.length) { // construct new bitmap + val sub = arr(i) + sub match { + case in: INode[K, V] @uc => + val inodemain = in.gcasRead(ct) + assert(inodemain ne null) + tmparray(i) = resurrect(in, inodemain) + case sn: SNode[K, V] @uc => + tmparray(i) = sn + case basicNode => throw new MatchError(basicNode) + } + i += 1 + } + + new CNode[K, V](bmp, tmparray, gen).toContracted(lev) + } + + def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) + + override def toString = { + def elems: Seq[String] = array.flatMap { + case sn: SNode[K, V] @uc => Iterable.single(sn.kvPair._2.toString) + case in: INode[K, V] @uc => Iterable.single(augmentString(in.toString).drop(14) + "(" + in.gen + ")") + case basicNode => throw new MatchError(basicNode) + } + f"CNode(sz: ${elems.size}%d; ${elems.sorted.mkString(", ")})" + } +} + +private[concurrent] object CNode { + + def dual[sealed K, sealed V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { + val xidx = (xhc >>> lev) & 0x1f + val yidx = (yhc >>> lev) & 0x1f + val bmp = (1 << xidx) | (1 << yidx) + if (xidx == yidx) { + val subinode = new INode[K, V](gen, equiv)//(TrieMap.inodeupdater) + subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen, equiv) + new CNode(bmp, Array(subinode), gen) + } else { + if (xidx < yidx) new CNode(bmp, Array(x, y), gen) + else new CNode(bmp, Array(y, x), gen) + } + } else { + new LNode(x.k, x.v, y.k, y.v, equiv) + } + +} + + +private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) { + @volatile var committed = false +} + + +/** A concurrent hash-trie or TrieMap is a concurrent thread-safe lock-free + * implementation of a hash array mapped trie. It is used to implement the + * concurrent map abstraction. It has particularly scalable concurrent insert + * and remove operations and is memory-efficient. It supports O(1), atomic, + * lock-free snapshots which are used to implement linearizable lock-free size, + * iterator and clear operations. The cost of evaluating the (lazy) snapshot is + * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. + * + * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] + */ +@SerialVersionUID(-5212455458703321708L) +final class TrieMap[sealed K, sealed V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) + extends scala.collection.mutable.AbstractMap[K, V] + with scala.collection.concurrent.Map[K, V] + with scala.collection.mutable.MapOps[K, V, TrieMap, TrieMap[K, V]] + with scala.collection.MapFactoryDefaults[K, V, TrieMap, mutable.Iterable] + with DefaultSerializable { + + private[this] var hashingobj = if (hashf.isInstanceOf[Hashing.Default[_]]) new TrieMap.MangledHashing[K] else hashf + private[this] var equalityobj = ef + @transient + private[this] var rootupdater = rtupd + def hashing = hashingobj + def equality = equalityobj + @volatile private var root = r + + def this(hashf: Hashing[K], ef: Equiv[K]) = this( + INode.newRootNode(ef), + AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"), + hashf, + ef + ) + + def this() = this(Hashing.default, Equiv.universal) + + override def mapFactory: MapFactory[TrieMap] = TrieMap + + /* internal methods */ + + private def writeObject(out: java.io.ObjectOutputStream): Unit = { + out.writeObject(hashingobj) + out.writeObject(equalityobj) + + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + out.writeObject(k) + out.writeObject(v) + } + out.writeObject(TrieMapSerializationEnd) + } + + private def readObject(in: java.io.ObjectInputStream): Unit = { + root = INode.newRootNode(equality) + rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root") + + hashingobj = in.readObject().asInstanceOf[Hashing[K]] + equalityobj = in.readObject().asInstanceOf[Equiv[K]] + + var obj: AnyRef = in.readObject() + + while (obj != TrieMapSerializationEnd) { + obj = in.readObject() + if (obj != TrieMapSerializationEnd) { + val k = obj.asInstanceOf[K] + val v = in.readObject().asInstanceOf[V] + update(k, v) + } + } + } + + private def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) + + private[collection] def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort) + + private[concurrent] def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { + val r = /*READ*/root + r match { + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => RDCSS_Complete(abort) + case x => throw new MatchError(x) + } + } + + @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { + val v = /*READ*/root + v match { + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => + val RDCSS_Descriptor(ov, exp, nv) = desc + if (abort) { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } else { + val oldmain = ov.gcasRead(this) + if (oldmain eq exp) { + if (CAS_ROOT(desc, nv)) { + desc.committed = true + nv + } else RDCSS_Complete(abort) + } else { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } + } + case x => throw new MatchError(x) + } + } + + private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = { + val desc = RDCSS_Descriptor(ov, expectedmain, nv) + if (CAS_ROOT(ov, desc)) { + RDCSS_Complete(abort = false) + /*READ*/desc.committed + } else false + } + + @tailrec private def inserthc(k: K, hc: Int, v: V): Unit = { + val r = RDCSS_READ_ROOT() + if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) + } + + @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef, fullEquals: Boolean): Option[V] = { + val r = RDCSS_READ_ROOT() + + val ret = r.rec_insertif(k, v, hc, cond, fullEquals, 0, null, r.gen, this) + if (ret eq null) insertifhc(k, hc, v, cond, fullEquals) + else ret + } + + /** Finds the value associated with this key + * + * @param k the key to look up + * @param hc the hashcode of `k` + * + * @return the value: V associated with `k`, if it exists. Otherwise, INodeBase.NO_SUCH_ELEMENT_SENTINEL + */ + @tailrec private def lookuphc(k: K, hc: Int): AnyRef = { + val r = RDCSS_READ_ROOT() + val res = r.rec_lookup(k, hc, 0, null, r.gen, this) + if (res eq INodeBase.RESTART) lookuphc(k, hc) + else res + } + + /** Removes a key-value pair from the map + * + * @param k the key to remove + * @param v the value compare with the value found associated with the key + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * @return an Option[V] indicating the previous value + */ + @tailrec private def removehc(k: K, v: V, removalPolicy: Int, hc: Int): Option[V] = { + val r = RDCSS_READ_ROOT() + val res = r.rec_remove(k, v, removalPolicy, hc, 0, null, r.gen, this) + if (res ne null) res + else removehc(k, v, removalPolicy, hc) + } + + + def string = RDCSS_READ_ROOT().string(0) + + /* public methods */ + + def isReadOnly = rootupdater eq null + + def nonReadOnly = rootupdater ne null + + /** Returns a snapshot of this TrieMap. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * in the snapshot or this TrieMap are accessed, they are rewritten. + * This means that the work of rebuilding both the snapshot and this + * TrieMap is distributed across all the threads doing updates or accesses + * subsequent to the snapshot creation. + */ + @tailrec def snapshot(): TrieMap[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.gcasRead(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater, hashing, equality) + else snapshot() + } + + /** Returns a read-only snapshot of this TrieMap. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * of this TrieMap are accessed, it is rewritten. The work of creating + * the snapshot is thus distributed across subsequent updates + * and accesses on this TrieMap by all threads. + * Note that the snapshot itself is never rewritten unlike when calling + * the `snapshot` method, but the obtained snapshot cannot be modified. + * + * This method is used by other methods such as `size` and `iterator`. + */ + @tailrec def readOnlySnapshot(): scala.collection.Map[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.gcasRead(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality) + else readOnlySnapshot() + } + + @tailrec override def clear(): Unit = { + val r = RDCSS_READ_ROOT() + if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V](equality))) clear() + } + + def computeHash(k: K) = hashingobj.hash(k) + + @deprecated("Use getOrElse(k, null) instead.", "2.13.0") + def lookup(k: K): V = { + val hc = computeHash(k) + val lookupRes = lookuphc(k, hc) + val res = if (lookupRes == INodeBase.NO_SUCH_ELEMENT_SENTINEL) null else lookupRes + res.asInstanceOf[V] + } + + override def apply(k: K): V = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) throw new NoSuchElementException + else res.asInstanceOf[V] + } + + def get(k: K): Option[V] = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) None else Some(res).asInstanceOf[Option[V]] + } + + override def put(key: K, value: V): Option[V] = { + val hc = computeHash(key) + insertifhc(key, hc, value, INode.KEY_PRESENT_OR_ABSENT, fullEquals = false /* unused */) + } + + override def update(k: K, v: V): Unit = { + val hc = computeHash(k) + inserthc(k, hc, v) + } + + def addOne(kv: (K, V)) = { + update(kv._1, kv._2) + this + } + + override def remove(k: K): Option[V] = { + val hc = computeHash(k) + removehc(k = k, v = null.asInstanceOf[V], RemovalPolicy.Always, hc = hc) + } + + def subtractOne(k: K) = { + remove(k) + this + } + + def putIfAbsent(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) + } + + // TODO once computeIfAbsent is added to concurrent.Map, + // move the comment there and tweak the 'at most once' part + /** If the specified key is not already in the map, computes its value using + * the given thunk `op` and enters it into the map. + * + * If the specified mapping function throws an exception, + * that exception is rethrown. + * + * Note: This method will invoke op at most once. + * However, `op` may be invoked without the result being added to the map if + * a concurrent process is also trying to add a value corresponding to the + * same key `k`. + * + * @param k the key to modify + * @param op the expression that computes the value + * @return the newly added value + */ + override def getOrElseUpdate(k: K, op: => V): V = { + val hc = computeHash(k) + lookuphc(k, hc) match { + case INodeBase.NO_SUCH_ELEMENT_SENTINEL => + val v = op + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) match { + case Some(oldValue) => oldValue + case None => v + } + case oldValue => oldValue.asInstanceOf[V] + } + } + + def remove(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.FullEquals, hc).nonEmpty + } + + override private[collection] def removeRefEq(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.ReferenceEq, hc).nonEmpty + } + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef], fullEquals = true).nonEmpty + } + + override private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newValue, oldValue.asInstanceOf[AnyRef], fullEquals = false).nonEmpty + } + + def replace(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_PRESENT, fullEquals = false /* unused */) + } + + def iterator: Iterator[(K, V)] = { + if (nonReadOnly) readOnlySnapshot().iterator + else new TrieMapIterator(0, this) + } + + //////////////////////////////////////////////////////////////////////////// + // + // scala/bug#10177 These methods need overrides as the inherited implementations + // call `.iterator` more than once, which doesn't guarantee a coherent + // view of the data if there is a concurrent writer + // Note that the we don't need overrides for keysIterator or valuesIterator + // TrieMapTest validates the behaviour. + override def values: Iterable[V] = { + if (nonReadOnly) readOnlySnapshot().values + else super.values + } + override def keySet: Set[K] = { + if (nonReadOnly) readOnlySnapshot().keySet + else super.keySet + } + + override def view: MapView[K, V] = if (nonReadOnly) readOnlySnapshot().view else super.view + + @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") + override def filterKeys(p: K => Boolean): collection.MapView[K, V]^{p} = view.filterKeys(p) + + @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") + override def mapValues[W](f: V => W): collection.MapView[K, W]^{f} = view.mapValues(f) + // END extra overrides + /////////////////////////////////////////////////////////////////// + + override def size: Int = + if (nonReadOnly) readOnlySnapshot().size + else RDCSS_READ_ROOT().cachedSize(this) + override def knownSize: Int = + if (nonReadOnly) -1 + else RDCSS_READ_ROOT().knownSize(this) + override def isEmpty: Boolean = + (if (nonReadOnly) readOnlySnapshot() else this).sizeIs == 0 // sizeIs checks knownSize + override protected[this] def className = "TrieMap" + + override def lastOption: Option[(K, V)] = if (isEmpty) None else Try(last).toOption +} + + +@SerialVersionUID(3L) +object TrieMap extends MapFactory[TrieMap] { + + def empty[sealed K, sealed V]: TrieMap[K, V] = new TrieMap[K, V] + + def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): TrieMap[K, V] = new TrieMap[K, V]() ++= it + + def newBuilder[sealed K, sealed V]: mutable.GrowableBuilder[(K, V), TrieMap[K, V]] = new GrowableBuilder(empty[K, V]) + + @transient + val inodeupdater: AtomicReferenceFieldUpdater[INodeBase[_, _], MainNode[_, _]] = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") + + class MangledHashing[K] extends Hashing[K] { + def hash(k: K): Int = scala.util.hashing.byteswap32(k.##) + } + + private[concurrent] object RemovalPolicy { + final val Always = 0 + final val FullEquals = 1 + final val ReferenceEq = 2 + + def shouldRemove[V](removalPolicy: Int)(a: V, b: V): Boolean = + removalPolicy match { + case Always => true + case FullEquals => a == b + case ReferenceEq => a.asInstanceOf[AnyRef] eq b.asInstanceOf[AnyRef] + } + } +} + +// non-final as an extension point for parallel collections +private[collection] class TrieMapIterator[sealed K, sealed V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends AbstractIterator[(K, V)] { + private val stack = new Array[Array[BasicNode]](7) + private val stackpos = new Array[Int](7) + private var depth = -1 + private var subiter: Iterator[(K, V)] = null + private var current: KVNode[K, V] = null + + if (mustInit) initialize() + + def hasNext = (current ne null) || (subiter ne null) + + def next() = if (hasNext) { + var r: (K, V) = null + if (subiter ne null) { + r = subiter.next() + checkSubiter() + } else { + r = current.kvPair + advance() + } + r + } else Iterator.empty.next() + + private def readin(in: INode[K, V]) = in.gcasRead(ct) match { + case cn: CNode[K, V] => + depth += 1 + stack(depth) = cn.array + stackpos(depth) = -1 + advance() + case tn: TNode[K, V] => + current = tn + case ln: LNode[K, V] => + subiter = ln.entries.iterator + checkSubiter() + case null => + current = null + case mainNode => throw new MatchError(mainNode) + } + + private def checkSubiter() = if (!subiter.hasNext) { + subiter = null + advance() + } + + private def initialize(): Unit = { + assert(ct.isReadOnly) + + val r = ct.RDCSS_READ_ROOT() + readin(r) + } + + @tailrec + final def advance(): Unit = if (depth >= 0) { + val npos = stackpos(depth) + 1 + if (npos < stack(depth).length) { + stackpos(depth) = npos + stack(depth)(npos) match { + case sn: SNode[K, V] @uc => current = sn + case in: INode[K, V] @uc => readin(in) + case basicNode => throw new MatchError(basicNode) + } + } else { + depth -= 1 + advance() + } + } else current = null + + protected def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean): TrieMapIterator[K, V] = new TrieMapIterator[K, V](_lev, _ct, _mustInit) + + protected def dupTo(it: TrieMapIterator[K, V]): Unit = { + it.level = this.level + it.ct = this.ct + it.depth = this.depth + it.current = this.current + + // these need a deep copy + Array.copy(this.stack, 0, it.stack, 0, 7) + Array.copy(this.stackpos, 0, it.stackpos, 0, 7) + + // this one needs to be evaluated + if (this.subiter == null) it.subiter = null + else { + val lst = this.subiter.to(immutable.List) + this.subiter = lst.iterator + it.subiter = lst.iterator + } + } + + /** Returns a sequence of iterators over subsets of this iterator. + * It's used to ease the implementation of splitters for a parallel version of the TrieMap. + */ + protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) { + // the case where an LNode is being iterated + val it = newIterator(level + 1, ct, _mustInit = false) + it.depth = -1 + it.subiter = this.subiter + it.current = null + this.subiter = null + advance() + this.level += 1 + Seq(it, this) + } else if (depth == -1) { + this.level += 1 + Seq(this) + } else { + var d = 0 + while (d <= depth) { + val rem = stack(d).length - 1 - stackpos(d) + if (rem > 0) { + val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) + stack(d) = arr1 + stackpos(d) = -1 + val it = newIterator(level + 1, ct, _mustInit = false) + val xss: Array[Array[BasicNode]] = it.stack.asInstanceOf + // !!! cc split into separate xss and asInstanceOf needed because cc gets confused with + // two-dimensinal invariant arrays + xss(0) = arr2 + it.stackpos(0) = -1 + it.depth = 0 + it.advance() // <-- fix it + this.level += 1 + return Seq(this, it) + } + d += 1 + } + this.level += 1 + Seq(this) + } + +} + +/** Only used for ctrie serialization. */ +@SerialVersionUID(3L) +private[concurrent] case object TrieMapSerializationEnd diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala new file mode 100644 index 000000000000..bfae792c5107 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala @@ -0,0 +1,261 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.{unchecked => uc} +import language.experimental.captureChecking + +/** Defines converter methods from Scala to Java collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ +trait AsJavaConverters { + import JavaCollectionWrappers._ + + /** + * Converts a Scala `Iterator` to a Java `Iterator`. + * + * The returned Java `Iterator` is backed by the provided Scala `Iterator` and any side-effects of + * using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Iterator` will be returned. + * + * @param i The Scala `Iterator` to be converted. + * @return A Java `Iterator` view of the argument. + */ + def asJava[A](i: Iterator[A]): ju.Iterator[A] = i match { + case null => null + case wrapper: JIteratorWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) + } + + /** + * Converts a Scala `Iterator` to a Java `Enumeration`. + * + * The returned Java `Enumeration` is backed by the provided Scala `Iterator` and any side-effects + * of using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Enumeration` will be returned. + * + * @param i The Scala `Iterator` to be converted. + * @return A Java `Enumeration` view of the argument. + */ + def asJavaEnumeration[A](i: Iterator[A]): ju.Enumeration[A] = i match { + case null => null + case wrapper: JEnumerationWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) + } + + /** + * Converts a Scala `Iterable` to a Java `Iterable`. + * + * The returned Java `Iterable` is backed by the provided Scala `Iterable` and any side-effects of + * using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterable` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Iterable` will be returned. + * + * @param i The Scala `Iterable` to be converted. + * @return A Java `Iterable` view of the argument. + */ + def asJava[A](i: Iterable[A]): jl.Iterable[A] = i match { + case null => null + case wrapper: JIterableWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) + } + + /** + * Converts a Scala `Iterable` to an immutable Java `Collection`. + * + * If the Scala `Iterable` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Collection` will be returned. + * + * @param i The Scala `Iterable` to be converted. + * @return A Java `Collection` view of the argument. + */ + def asJavaCollection[A](i: Iterable[A]): ju.Collection[A] = i match { + case null => null + case wrapper: JCollectionWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) + } + + /** + * Converts a Scala mutable `Buffer` to a Java List. + * + * The returned Java List is backed by the provided Scala `Buffer` and any side-effects of using + * it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Buffer` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param b The Scala `Buffer` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](b: mutable.Buffer[A]): ju.List[A] = b match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableBufferWrapper(b) + } + + /** + * Converts a Scala mutable `Seq` to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param s The Scala `Seq` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](s: mutable.Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableSeqWrapper(s) + } + + /** + * Converts a Scala `Seq` to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param s The Scala `Seq` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](s: Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new SeqWrapper(s) + } + + /** + * Converts a Scala mutable `Set` to a Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Set` will be returned. + * + * @param s The Scala mutable `Set` to be converted. + * @return A Java `Set` view of the argument. + */ + def asJava[A](s: mutable.Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new MutableSetWrapper(s) + } + + /** + * Converts a Scala `Set` to a Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Set` will be returned. + * + * @param s The Scala `Set` to be converted. + * @return A Java `Set` view of the argument. + */ + def asJava[A](s: Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new SetWrapper(s) + } + + /** + * Converts a Scala mutable `Map` to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Map` will be returned. + * + * @param m The Scala mutable `Map` to be converted. + * @return A Java `Map` view of the argument. + */ + def asJava[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MutableMapWrapper(m) + } + + /** + * Converts a Scala mutable `Map` to a Java `Dictionary`. + * + * The returned Java `Dictionary` is backed by the provided Scala `Dictionary` and any + * side-effects of using it via the Java interface will be visible via the Scala interface and + * vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Dictionary` will be returned. + * + * @param m The Scala `Map` to be converted. + * @return A Java `Dictionary` view of the argument. + */ + def asJavaDictionary[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = m match { + case null => null + case wrapper: JDictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new DictionaryWrapper(m) + } + + /** + * Converts a Scala `Map` to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Map` will be returned. + * + * @param m The Scala `Map` to be converted. + * @return A Java `Map` view of the argument. + */ + def asJava[K, V](m: Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MapWrapper(m) + } + + /** + * Converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * + * The returned Java `ConcurrentMap` is backed by the provided Scala `concurrent.Map` and any + * side-effects of using it via the Java interface will be visible via the Scala interface and + * vice versa. + * + * If the Scala `concurrent.Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `ConcurrentMap` will be returned. + * + * @param m The Scala `concurrent.Map` to be converted. + * @return A Java `ConcurrentMap` view of the argument. + */ + def asJava[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = m match { + case null => null + case wrapper: JConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new ConcurrentMapWrapper(m) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala new file mode 100644 index 000000000000..14268f7aa165 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala @@ -0,0 +1,109 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} +import language.experimental.captureChecking + +/** Defines `asJava` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsJavaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsJava[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Iterator`, see + * [[AsJavaConverters.asJava[A](i:Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Iterator[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Enumeration`, see + * [[AsJavaConverters.asJavaEnumeration `scala.jdk.javaapi.CollectionConverters.asJavaEnumeration`]]. + */ + def asJavaEnumeration: ju.Enumeration[A] = conv.asJavaEnumeration(i) + } + + implicit class IterableHasAsJava[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Iterable`, see + * [[AsJavaConverters.asJava[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: jl.Iterable[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Collection`, see + * [[AsJavaConverters.asJavaCollection `scala.jdk.javaapi.CollectionConverters.asJavaCollection`]]. + */ + def asJavaCollection: ju.Collection[A] = conv.asJavaCollection(i) + } + + implicit class BufferHasAsJava[A](b: mutable.Buffer[A]) { + /** Converts a Scala `Buffer` to a Java `List`, see + * [[AsJavaConverters.asJava[A](b:scala\.collection\.mutable\.Buffer[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(b) + } + + implicit class MutableSeqHasAsJava[A](s: mutable.Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class SeqHasAsJava[A](s: Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class MutableSetHasAsJava[A](s: mutable.Set[A]) { + /** Converts a Scala `mutable.Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class SetHasAsJava[A](s: Set[A]) { + /** Converts a Scala `Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class MutableMapHasAsJava[K, V](m: mutable.Map[K, V]) { + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.mutable\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJavaDictionary `scala.jdk.javaapi.CollectionConverters.asJavaDictionary`]]. + */ + def asJavaDictionary: ju.Dictionary[K, V] = conv.asJavaDictionary(m) + } + + implicit class MapHasAsJava[K, V](m: Map[K, V]) { + /** Converts a Scala `Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + } + + implicit class ConcurrentMapHasAsJava[K, V](m: concurrent.Map[K, V]) { + /** Converts a Scala `concurrent.Map` to a Java `ConcurrentMap`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.concurrent\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: juc.ConcurrentMap[K, V] = conv.asJava(m) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala new file mode 100644 index 000000000000..6cc02b13bb06 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala @@ -0,0 +1,208 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.{unchecked => uc} +import language.experimental.captureChecking + +/** Defines converter methods from Java to Scala collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ +trait AsScalaConverters { + import JavaCollectionWrappers._ + + /** + * Converts a Java `Iterator` to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Iterator` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Iterator` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Iterator` will be returned. + * + * @param i The Java `Iterator` to be converted. + * @return A Scala `Iterator` view of the argument. + */ + def asScala[A](i: ju.Iterator[A]): Iterator[A] = i match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JIteratorWrapper(i) + } + + /** + * Converts a Java `Enumeration` to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Enumeration` and any side-effects + * of using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Enumeration` was previously obtained from an implicit or explicit call of + * `asJavaEnumeration` then the original Scala `Iterator` will be returned. + * + * @param e The Java `Enumeration` to be converted. + * @return A Scala `Iterator` view of the argument. + */ + def asScala[A](e: ju.Enumeration[A]): Iterator[A] = e match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JEnumerationWrapper(e) + } + + /** + * Converts a Java `Iterable` to a Scala `Iterable`. + * + * The returned Scala `Iterable` is backed by the provided Java `Iterable` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Iterable` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Iterable` will be returned. + * + * @param i The Java `Iterable` to be converted. + * @return A Scala `Iterable` view of the argument. + */ + def asScala[A](i: jl.Iterable[A]): Iterable[A] = i match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JIterableWrapper(i) + } + + /** + * Converts a Java `Collection` to a Scala `Iterable`. + * + * If the Java `Collection` was previously obtained from an implicit or explicit call of + * `asJavaCollection` then the original Scala `Iterable` will be returned. + * + * @param c The Java `Collection` to be converted. + * @return A Scala `Iterable` view of the argument. + */ + def asScala[A](c: ju.Collection[A]): Iterable[A] = c match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JCollectionWrapper(c) + } + + /** + * Converts a Java `List` to a Scala mutable `Buffer`. + * + * The returned Scala `Buffer` is backed by the provided Java `List` and any side-effects of using + * it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `List` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Buffer` will be returned. + * + * @param l The Java `List` to be converted. + * @return A Scala mutable `Buffer` view of the argument. + */ + def asScala[A](l: ju.List[A]): mutable.Buffer[A] = l match { + case null => null + case wrapper: MutableBufferWrapper[A @uc] => wrapper.underlying + case _ => new JListWrapper(l) + } + + /** + * Converts a Java `Set` to a Scala mutable `Set`. + * + * The returned Scala `Set` is backed by the provided Java `Set` and any side-effects of using it + * via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Set` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Set` will be returned. + * + * @param s The Java `Set` to be converted. + * @return A Scala mutable `Set` view of the argument. + */ + def asScala[A](s: ju.Set[A]): mutable.Set[A] = s match { + case null => null + case wrapper: MutableSetWrapper[A @uc] => wrapper.underlying + case _ => new JSetWrapper(s) + } + + /** + * Converts a Java `Map` to a Scala mutable `Map`. + * + * The returned Scala `Map` is backed by the provided Java `Map` and any side-effects of using it + * via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Map` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Map` will be returned. + * + * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), it is + * your responsibility to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an atomic `get` when `null` + * values may be present. + * + * @param m The Java `Map` to be converted. + * @return A Scala mutable `Map` view of the argument. + */ + def asScala[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = m match { + case null => null + case wrapper: MutableMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JMapWrapper(m) + } + + /** + * Converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. + * + * The returned Scala `ConcurrentMap` is backed by the provided Java `ConcurrentMap` and any + * side-effects of using it via the Scala interface will be visible via the Java interface and + * vice versa. + * + * If the Java `ConcurrentMap` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `ConcurrentMap` will be returned. + * + * @param m The Java `ConcurrentMap` to be converted. + * @return A Scala mutable `ConcurrentMap` view of the argument. + */ + def asScala[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = m match { + case null => null + case wrapper: ConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlyingConcurrentMap + case _ => new JConcurrentMapWrapper(m) + } + + /** + * Converts a Java `Dictionary` to a Scala mutable `Map`. + * + * The returned Scala `Map` is backed by the provided Java `Dictionary` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Dictionary` was previously obtained from an implicit or explicit call of + * `asJavaDictionary` then the original Scala `Map` will be returned. + * + * @param d The Java `Dictionary` to be converted. + * @return A Scala mutable `Map` view of the argument. + */ + def asScala[K, V](d: ju.Dictionary[K, V]): mutable.Map[K, V] = d match { + case null => null + case wrapper: DictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JDictionaryWrapper(d) + } + + /** + * Converts a Java `Properties` to a Scala mutable `Map[String, String]`. + * + * The returned Scala `Map[String, String]` is backed by the provided Java `Properties` and any + * side-effects of using it via the Scala interface will be visible via the Java interface and + * vice versa. + * + * @param p The Java `Properties` to be converted. + * @return A Scala mutable `Map[String, String]` view of the argument. + */ + def asScala(p: ju.Properties): mutable.Map[String, String] = p match { + case null => null + case _ => new JPropertiesWrapper(p) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala new file mode 100644 index 000000000000..d60bfc7f60a1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala @@ -0,0 +1,94 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} +import language.experimental.captureChecking + +/** Defines `asScala` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsScalaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsScala[A](i: ju.Iterator[A]) { + /** Converts a Java `Iterator` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](i:java\.util\.Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(i) + } + + implicit class EnumerationHasAsScala[A](e: ju.Enumeration[A]) { + /** Converts a Java `Enumeration` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](e:java\.util\.Enumeration[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(e) + } + + implicit class IterableHasAsScala[A](i: jl.Iterable[A]) { + /** Converts a Java `Iterable` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(i) + } + + implicit class CollectionHasAsScala[A](c: ju.Collection[A]) { + /** Converts a Java `Collection` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](c:java\.util\.Collection[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(c) + } + + implicit class ListHasAsScala[A](l: ju.List[A]) { + /** Converts a Java `List` to a Scala `Buffer`, see + * [[AsScalaConverters.asScala[A](l:java\.util\.List[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Buffer[A] = conv.asScala(l) + } + + implicit class SetHasAsScala[A](s: ju.Set[A]) { + /** Converts a Java `Set` to a Scala `Set`, see + * [[AsScalaConverters.asScala[A](s:java\.util\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Set[A] = conv.asScala(s) + } + + implicit class MapHasAsScala[K, V](m: ju.Map[K, V]) { + /** Converts a Java `Map` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.Map[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(m) + } + + implicit class ConcurrentMapHasAsScala[K, V](m: juc.ConcurrentMap[K, V]) { + /** Converts a Java `ConcurrentMap` to a Scala `concurrent.Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.concurrent\.ConcurrentMap[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: concurrent.Map[K, V] = conv.asScala(m) + } + + implicit class DictionaryHasAsScala[K, V](d: ju.Dictionary[K, V]) { + /** Converts a Java `Dictionary` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](d:java\.util\.Dictionary[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(d) + } + + implicit class PropertiesHasAsScala(i: ju.Properties) { + /** Converts a Java `Properties` to a Scala `Map`, see + * [[AsScalaConverters.asScala(p:java\.util\.Properties)* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[String, String] = conv.asScala(i) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala new file mode 100644 index 000000000000..1bc284462ff1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala @@ -0,0 +1,182 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.collection.JavaConverters._ +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** Defines implicit converter methods from Java to Scala collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +trait ToScalaImplicits { + /** Implicitly converts a Java `Iterator` to a Scala `Iterator`. + * @see [[JavaConverters.asScalaIterator]] + */ + implicit def `iterator asScala`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it) + + /** Implicitly converts a Java `Enumeration` to a Scala `Iterator`. + * @see [[JavaConverters.enumerationAsScalaIterator]] + */ + implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i) + + /** Implicitly converts a Java `Iterable` to a Scala `Iterable`. + * @see [[JavaConverters.iterableAsScalaIterable]] + */ + implicit def `iterable AsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i) + + /** Implicitly converts a Java `Collection` to an Scala `Iterable`. + * @see [[JavaConverters.collectionAsScalaIterable]] + */ + implicit def `collection AsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i) + + /** Implicitly converts a Java `List` to a Scala mutable `Buffer`. + * @see [[JavaConverters.asScalaBuffer]] + */ + implicit def `list asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l) + + /** Implicitly converts a Java `Set` to a Scala mutable `Set`. + * @see [[JavaConverters.asScalaSet]] + */ + implicit def `set asScala`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s) + + /** Implicitly converts a Java `Map` to a Scala mutable `Map`. + * @see [[JavaConverters.mapAsScalaMap]] + */ + implicit def `map AsScala`[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = mapAsScalaMap(m) + + /** Implicitly converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. + * @see [[JavaConverters.mapAsScalaConcurrentMap]] + */ + implicit def `map AsScalaConcurrentMap`[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = mapAsScalaConcurrentMap(m) + + /** Implicitly converts a Java `Dictionary` to a Scala mutable `Map`. + * @see [[JavaConverters.dictionaryAsScalaMap]] + */ + implicit def `dictionary AsScalaMap`[K, V](p: ju.Dictionary[K, V]): mutable.Map[K, V] = dictionaryAsScalaMap(p) + + /** Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. + * @see [[JavaConverters.propertiesAsScalaMap]] + */ + implicit def `properties AsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) +} + +/** Defines implicit conversions from Scala to Java collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +trait ToJavaImplicits { + /** Implicitly converts a Scala `Iterator` to a Java `Iterator`. + * @see [[JavaConverters.asJavaIterator]] + */ + implicit def `iterator asJava`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it) + + /** Implicitly converts a Scala `Iterator` to a Java `Enumeration`. + * @see [[JavaConverters.asJavaEnumeration]] + */ + implicit def `enumeration asJava`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it) + + /** Implicitly converts a Scala `Iterable` to a Java `Iterable`. + * @see [[JavaConverters.asJavaIterable]] + */ + implicit def `iterable asJava`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i) + + /** Implicitly converts a Scala `Iterable` to an immutable Java `Collection`. + * @see [[JavaConverters.asJavaCollection]] + */ + implicit def `collection asJava`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it) + + /** Implicitly converts a Scala mutable `Buffer` to a Java `List`. + * @see [[JavaConverters.bufferAsJavaList]] + */ + implicit def `buffer AsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b) + + /** Implicitly converts a Scala mutable `Seq` to a Java `List`. + * @see [[JavaConverters.mutableSeqAsJavaList]] + */ + implicit def `mutableSeq AsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq) + + /** Implicitly converts a Scala `Seq` to a Java `List`. + * @see [[JavaConverters.seqAsJavaList]] + */ + implicit def `seq AsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq) + + /** Implicitly converts a Scala mutable `Set` to a Java `Set`. + * @see [[JavaConverters.mutableSetAsJavaSet]] + */ + implicit def `mutableSet AsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s) + + /** Implicitly converts a Scala `Set` to a Java `Set`. + * @see [[JavaConverters.setAsJavaSet]] + */ + implicit def `set AsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s) + + /** Implicitly converts a Scala mutable `Map` to a Java `Map`. + * @see [[JavaConverters.mutableMapAsJavaMap]] + */ + implicit def `mutableMap AsJavaMap`[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = mutableMapAsJavaMap(m) + + /** Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * @see [[JavaConverters.asJavaDictionary]] + */ + implicit def `dictionary asJava`[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = asJavaDictionary(m) + + /** Implicitly converts a Scala `Map` to a Java `Map`. + * @see [[JavaConverters.mapAsJavaMap]] + */ + implicit def `map AsJavaMap`[K, V](m: Map[K, V]): ju.Map[K, V] = mapAsJavaMap(m) + + /** Implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * @see [[JavaConverters.mapAsJavaConcurrentMap]] + */ + implicit def `map AsJavaConcurrentMap`[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = mapAsJavaConcurrentMap(m) +} + +/** + * Convenience for miscellaneous implicit conversions from Scala to Java collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversionsToJava extends ToJavaImplicits + +/** + * Convenience for miscellaneous implicit conversions from Java to Scala collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversionsToScala extends ToScalaImplicits + +/** + * Convenience for miscellaneous implicit conversions between Java and Scala collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues. Example: + * + * {{{ + * import collection.convert.ImplicitConversions._ + * case class StringBox(s: String) + * val m = Map(StringBox("one") -> "uno") + * m.get("one") + * }}} + * + * The above example returns `null` instead of producing a type error at compile-time. The map is + * implicitly converted to a `java.util.Map` which provides a method `get(x: AnyRef)`. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversions extends ToScalaImplicits with ToJavaImplicits diff --git a/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala new file mode 100644 index 000000000000..e826bdeb23db --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala @@ -0,0 +1,616 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.util.{NavigableMap} +import java.{lang => jl, util => ju} + +import scala.jdk.CollectionConverters._ +import scala.util.Try +import scala.util.chaining._ +import scala.util.control.ControlThrowable +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures + +/** Wrappers for exposing Scala collections as Java collections and vice-versa */ +@SerialVersionUID(3L) +// not private[convert] because `WeakHashMap` uses JMapWrapper +private[collection] object JavaCollectionWrappers extends Serializable { + @SerialVersionUID(3L) + class IteratorWrapper[A](val underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next() + def hasMoreElements = underlying.hasNext + def nextElement() = underlying.next() + override def remove() = throw new UnsupportedOperationException + } + + @SerialVersionUID(3L) + class JIteratorWrapper[A](val underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next + } + + @SerialVersionUID(3L) + class JEnumerationWrapper[A](val underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { + def hasNext = underlying.hasMoreElements + def next() = underlying.nextElement + } + + trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { + val underlying: Iterable[A] + def size = underlying.size + override def iterator = new IteratorWrapper(underlying.iterator) + override def isEmpty = underlying.isEmpty + } + + @SerialVersionUID(3L) + class IterableWrapper[A](val underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] with Serializable { + import scala.runtime.Statics._ + override def equals(other: Any): Boolean = + other match { + case other: IterableWrapper[_] => underlying.equals(other.underlying) + case _ => false + } + override def hashCode = finalizeHash(mix(mix(0xcafebabe, "IterableWrapper".hashCode), anyHash(underlying)), 1) + } + + @SerialVersionUID(3L) + class JIterableWrapper[A](val underlying: jl.Iterable[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def iterableFactory = mutable.ArrayBuffer + override def isEmpty: Boolean = !underlying.iterator().hasNext + } + + @SerialVersionUID(3L) + class JCollectionWrapper[A](val underlying: ju.Collection[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def size = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterableFactory = mutable.ArrayBuffer + } + + @SerialVersionUID(3L) + class SeqWrapper[A](val underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + } + + @SerialVersionUID(3L) + class MutableSeqWrapper[A](val underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { + val p = underlying(i) + underlying(i) = elem + p + } + } + + @SerialVersionUID(3L) + class MutableBufferWrapper[A](val underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } + override def add(elem: A) = { underlying += elem; true } + override def remove(i: Int) = underlying remove i + } + + @SerialVersionUID(3L) + class JListWrapper[A](val underlying: ju.List[A]) + extends mutable.AbstractBuffer[A] + with SeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with StrictOptimizedSeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with IterableFactoryDefaults[A, mutable.Buffer] + with Serializable { + def length = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterator: Iterator[A] = underlying.iterator.asScala + def apply(i: Int) = underlying.get(i) + def update(i: Int, elem: A) = underlying.set(i, elem) + def prepend(elem: A) = { underlying.subList(0, 0) add elem; this } + def addOne(elem: A): this.type = { underlying add elem; this } + def insert(idx: Int,elem: A): Unit = underlying.subList(0, idx).add(elem) + def insertAll(i: Int, elems: IterableOnce[A]^) = { + val ins = underlying.subList(0, i) + elems.iterator.foreach(ins.add(_)) + } + def remove(i: Int) = underlying.remove(i) + def clear() = underlying.clear() + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): JListWrapper[A] = new JListWrapper(new ju.ArrayList[A](underlying)) + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } + def remove(from: Int, n: Int): Unit = underlying.subList(from, from+n).clear() + override def iterableFactory = mutable.ArrayBuffer + override def subtractOne(elem: A): this.type = { underlying.remove(elem.asInstanceOf[AnyRef]); this } + } + + @SerialVersionUID(3L) + class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self => + // Note various overrides to avoid performance gotchas. + override def contains(o: Object): Boolean = { + try { underlying.contains(o.asInstanceOf[A]) } + catch { case cce: ClassCastException => false } + } + override def isEmpty = underlying.isEmpty + def size = underlying.size + def iterator = new ju.Iterator[A] { + val ui = underlying.iterator + var prev: Option[A] = None + def hasNext = ui.hasNext + def next = { val e = ui.next(); prev = Some(e); e } + override def remove() = prev match { + case Some(e) => + underlying match { + case ms: mutable.Set[a] => + ms remove e + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + + @SerialVersionUID(3L) + class MutableSetWrapper[A](val underlying: mutable.Set[A]) extends SetWrapper[A](underlying) with Serializable { + override def add(elem: A) = { + val sz = underlying.size + underlying += elem + sz < underlying.size + } + override def remove(elem: AnyRef) = + try underlying.remove(elem.asInstanceOf[A]) + catch { case ex: ClassCastException => false } + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + class JSetWrapper[A](val underlying: ju.Set[A]) + extends mutable.AbstractSet[A] + with mutable.SetOps[A, mutable.Set, mutable.Set[A]] + with StrictOptimizedSetOps[A, mutable.Set, mutable.Set[A]] + with Serializable { + + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + def iterator: Iterator[A] = underlying.iterator.asScala + + def contains(elem: A): Boolean = underlying.contains(elem) + + def addOne(elem: A): this.type = { underlying add elem; this } + def subtractOne(elem: A): this.type = { underlying remove elem; this } + + override def remove(elem: A): Boolean = underlying remove elem + + override def clear(): Unit = { + underlying.clear() + } + + override def empty: mutable.Set[A] = new JSetWrapper(new ju.HashSet[A]) + + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): mutable.Set[A] = new JSetWrapper[A](new ju.LinkedHashSet[A](underlying)) + + override def iterableFactory: IterableFactory[mutable.Set] = mutable.HashSet + + override def filterInPlace(p: A => Boolean): this.type = { + if (underlying.size() > 0) underlying.removeIf(!p(_)) + this + } + } + + @SerialVersionUID(3L) + class MapWrapper[K, V](underlying: Map[K, V]) extends ju.AbstractMap[K, V] with Serializable { self => + override def size = underlying.size + + override def get(key: AnyRef): V = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def entrySet: ju.Set[ju.Map.Entry[K, V]] = new ju.AbstractSet[ju.Map.Entry[K, V]] { + def size = self.size + + def iterator = new ju.Iterator[ju.Map.Entry[K, V]] { + val ui = underlying.iterator + var prev : Option[K] = None + + def hasNext = ui.hasNext + + def next() = { + val (k, v) = ui.next() + prev = Some(k) + new ju.Map.Entry[K, V] { + def getKey = k + def getValue = v + def setValue(v1 : V) = self.put(k, v1) + + // It's important that this implementation conform to the contract + // specified in the javadocs of java.util.Map.Entry.hashCode + // + // See https://github.com/scala/bug/issues/10663 + override def hashCode = { + (if (k == null) 0 else k.hashCode()) ^ + (if (v == null) 0 else v.hashCode()) + } + + override def equals(other: Any) = other match { + case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue + case _ => false + } + } + } + + override def remove(): Unit = { + prev match { + case Some(k) => + underlying match { + case mm: mutable.Map[a, _] => + mm -= k + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + } + + override def containsKey(key: AnyRef): Boolean = try { + // Note: Subclass of collection.Map with specific key type may redirect generic + // contains to specific contains, which will throw a ClassCastException if the + // wrong type is passed. This is why we need a type cast to A inside a try/catch. + underlying.contains(key.asInstanceOf[K]) + } catch { + case ex: ClassCastException => false + } + } + + @SerialVersionUID(3L) + class MutableMapWrapper[K, V](val underlying: mutable.Map[K, V]) extends MapWrapper[K, V](underlying) { + override def put(k: K, v: V) = underlying.put(k, v) match { + case Some(v1) => v1 + case None => null.asInstanceOf[V] + } + + override def remove(k: AnyRef): V = try { + underlying remove k.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + abstract class AbstractJMapWrapper[K, V] + extends mutable.AbstractMap[K, V] + with JMapWrapperLike[K, V, mutable.Map, mutable.Map[K, V]] with Serializable + + trait JMapWrapperLike[K, V, +CC[X, Y] <: mutable.MapOps[X, Y, CC, _], +C <: mutable.MapOps[K, V, CC, C]] + extends mutable.MapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), mutable.Iterable, C] { + + def underlying: ju.Map[K, V] + + override def size = underlying.size + + // support Some(null) if currently bound to null + def get(k: K) = { + val v = underlying.get(k) + if (v != null) + Some(v) + else if (underlying.containsKey(k)) + Some(null.asInstanceOf[V]) + else + None + } + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => update(key, null.asInstanceOf[V]); null.asInstanceOf[V] + case v => v + } + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + // support Some(null) if currently bound to null + override def put(k: K, v: V): Option[V] = + if (v == null) { + val present = underlying.containsKey(k) + val result = underlying.put(k, v) + if (present) Some(result) else None + } else { + var result: Option[V @uncheckedCaptures] = None + def recompute(k0: K, v0: V): V = v.tap(_ => + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + ) + underlying.compute(k, recompute) + result + } + + override def update(k: K, v: V): Unit = underlying.put(k, v) + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => update(key, null.asInstanceOf[V]); Some(null.asInstanceOf[V]) + } + } + + // support Some(null) if currently bound to null + override def remove(k: K): Option[V] = { + var result: Option[V @uncheckedCaptures] = None + def recompute(k0: K, v0: V): V = { + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + null.asInstanceOf[V] + } + underlying.compute(k, recompute) + result + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { val e = ui.next(); (e.getKey, e.getValue) } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val i = underlying.entrySet().iterator() + while (i.hasNext) { + val entry = i.next() + f(entry.getKey, entry.getValue) + } + } + + override def clear() = underlying.clear() + + } + + /** Wraps a Java map as a Scala one. If the map is to support concurrent access, + * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized + * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility + * to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an + * atomic `get` when `null` values may be present. + */ + @SerialVersionUID(3L) + class JMapWrapper[K, V](val underlying : ju.Map[K, V]) + extends AbstractJMapWrapper[K, V] with Serializable { + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty = new JMapWrapper(new ju.HashMap[K, V]) + } + + @SerialVersionUID(3L) + class ConcurrentMapWrapper[K, V](underlying: concurrent.Map[K, V]) extends MutableMapWrapper[K, V](underlying) with juc.ConcurrentMap[K, V] { + + def underlyingConcurrentMap: concurrent.Map[K, V] = underlying + + override def putIfAbsent(k: K, v: V) = underlying.putIfAbsent(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + + override def remove(k: AnyRef, v: AnyRef) = try { + underlying.remove(k.asInstanceOf[K], v.asInstanceOf[V]) + } catch { + case ex: ClassCastException => + false + } + + override def replace(k: K, v: V): V = underlying.replace(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + + override def replace(k: K, oldval: V, newval: V) = underlying.replace(k, oldval, newval) + } + + /** Wraps a concurrent Java map as a Scala one. Single-element concurrent + * access is supported; multi-element operations such as maps and filters + * are not guaranteed to be atomic. + */ + @SerialVersionUID(3L) + class JConcurrentMapWrapper[K, V](val underlying: juc.ConcurrentMap[K, V]) + extends AbstractJMapWrapper[K, V] + with concurrent.Map[K, V] { + + override def get(k: K) = Option(underlying get k) + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => super/*[concurrent.Map]*/.getOrElseUpdate(key, op) + case v => v + } + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[K, V]) + + def putIfAbsent(k: K, v: V): Option[V] = Option(underlying.putIfAbsent(k, v)) + + def remove(k: K, v: V): Boolean = underlying.remove(k, v) + + def replace(k: K, v: V): Option[V] = Option(underlying.replace(k, v)) + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = underlying.replace(k, oldvalue, newvalue) + + override def lastOption: Option[(K, V)] = + underlying match { + case nav: NavigableMap[K @unchecked, V @unchecked] => Option(nav.lastEntry).map(e => (e.getKey, e.getValue)) + case _ if isEmpty => None + case _ => Try(last).toOption + } + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull // see scala/scala#10129 + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => super/*[concurrent.Map]*/.updateWith(key)(remappingFunction) + } + } + } + + @SerialVersionUID(3L) + class DictionaryWrapper[K, V](val underlying: mutable.Map[K, V]) extends ju.Dictionary[K, V] with Serializable { + def size: Int = underlying.size + def isEmpty: Boolean = underlying.isEmpty + def keys: ju.Enumeration[K] = underlying.keysIterator.asJavaEnumeration + def elements: ju.Enumeration[V] = underlying.valuesIterator.asJavaEnumeration + def get(key: AnyRef) = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + def put(key: K, value: V): V = underlying.put(key, value) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + override def remove(key: AnyRef) = try { + underlying remove key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + } + + @SerialVersionUID(3L) + class JDictionaryWrapper[K, V](val underlying: ju.Dictionary[K, V]) extends mutable.AbstractMap[K, V] with Serializable { + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + + def get(k: K) = Option(underlying get k) + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + override def put(k: K, v: V): Option[V] = Option(underlying.put(k, v)) + + override def update(k: K, v: V): Unit = { underlying.put(k, v) } + + override def remove(k: K): Option[V] = Option(underlying remove k) + def iterator = underlying.keys.asScala map (k => (k, underlying get k)) + + override def clear() = iterator.foreach(entry => underlying.remove(entry._1)) + + override def mapFactory = mutable.HashMap + } + + @SerialVersionUID(3L) + class JPropertiesWrapper(underlying: ju.Properties) + extends mutable.AbstractMap[String, String] + with mutable.MapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedMapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedIterableOps[(String, String), mutable.Iterable, mutable.Map[String, String]] + with Serializable { + + override def size = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = size + def get(k: String) = { + val v = underlying get k + if (v != null) Some(v.asInstanceOf[String]) else None + } + + def addOne(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: String): this.type = { underlying remove key; this } + + override def put(k: String, v: String): Option[String] = { + val r = underlying.put(k, v) + if (r != null) Some(r.asInstanceOf[String]) else None + } + + override def update(k: String, v: String): Unit = { underlying.put(k, v) } + + override def remove(k: String): Option[String] = { + val r = underlying remove k + if (r != null) Some(r.asInstanceOf[String]) else None + } + + def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { + val e = ui.next() + (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) + } + } + + override def clear() = underlying.clear() + + override def empty = new JPropertiesWrapper(new ju.Properties) + + def getProperty(key: String) = underlying.getProperty(key) + + def getProperty(key: String, defaultValue: String) = + underlying.getProperty(key, defaultValue) + + def setProperty(key: String, value: String) = + underlying.setProperty(key, value) + + override def mapFactory = mutable.HashMap + } + + /** Thrown when certain Map operations attempt to put a null value. */ + private val PutNull = new ControlThrowable {} +} diff --git a/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala new file mode 100644 index 000000000000..ddda95707881 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala @@ -0,0 +1,481 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert + +import java.util.Spliterator +import java.util.stream._ +import java.{lang => jl} + +import scala.annotation.implicitNotFound +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.convert.StreamExtensions.{AccumulatorFactoryInfo, StreamShape, StreamUnboxer} +import scala.jdk.CollectionConverters._ +import scala.jdk._ +import language.experimental.captureChecking + +/** Defines extension methods to create Java Streams for Scala collections, available through + * [[scala.jdk.javaapi.StreamConverters]]. + */ +trait StreamExtensions { + // collections + + implicit class IterableHasSeqStream[A](cc: IterableOnce[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = + s.fromStepper(cc.stepper, par = false) + } + + // Not `CC[X] <: IterableOnce[X]`, but `C` with an extra constraint, to support non-parametric classes like IntAccumulator + implicit class IterableNonGenericHasParStream[A, C <: IterableOnce[_]](c: C)(implicit ev: C <:< IterableOnce[A]) { + private type IterableOnceWithEfficientStepper = IterableOnce[A] { + def stepper[S <: Stepper[_]](implicit shape : StepperShape[A, S]) : S with EfficientSplit + } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[A, S, St], + st: StepperShape[A, St], + @implicitNotFound("`parStream` can only be called on collections where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: C <:< IterableOnceWithEfficientStepper): S = + s.fromStepper(ev(c).stepper, par = true) + } + + // maps + + implicit class MapHasSeqKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[K, S, St], st: StepperShape[K, St]): S = + s.fromStepper(cc.keyStepper, par = false) + + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[V, S, St], st: StepperShape[V, St]): S = + s.fromStepper(cc.valueStepper, par = false) + + // The asJavaSeqStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[(K, V), S, St], st: StepperShape[(K, V), St]): S = + s.fromStepper(cc.stepper, par = false) + } + + + implicit class MapHasParKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + private type MapOpsWithEfficientKeyStepper = collection.MapOps[K, V, collection.Map, _] { def keyStepper[S <: Stepper[_]](implicit shape : StepperShape[K, S]) : S with EfficientSplit } + private type MapOpsWithEfficientValueStepper = collection.MapOps[K, V, collection.Map, _] { def valueStepper[S <: Stepper[_]](implicit shape : StepperShape[V, S]) : S with EfficientSplit } + private type MapOpsWithEfficientStepper = collection.MapOps[K, V, collection.Map, _] { def stepper[S <: Stepper[_]](implicit shape : StepperShape[(K, V), S]) : S with EfficientSplit } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[K, S, St], + st: StepperShape[K, St], + @implicitNotFound("parKeyStream can only be called on maps where `keyStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientKeyStepper): S = + s.fromStepper(cc.keyStepper, par = true) + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[V, S, St], + st: StepperShape[V, St], + @implicitNotFound("parValueStream can only be called on maps where `valueStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientValueStepper): S = + s.fromStepper(cc.valueStepper, par = true) + + // The asJavaParStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[(K, V), S, St], + st: StepperShape[(K, V), St], + @implicitNotFound("parStream can only be called on maps where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientStepper): S = + s.fromStepper(cc.stepper, par = true) + } + + // steppers + + implicit class StepperHasSeqStream[A](stepper: Stepper[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] => st.seqUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = false) + } + } + + implicit class StepperHasParStream[A](stepper: Stepper[A] with EfficientSplit) { + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] with EfficientSplit => st.parUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = true) + } + } + + // arrays + // uses the JDK array spliterators (`DoubleArraySpliterator`). users can also call + // `array.stepper.seqStream`, which then uses the Scala steppers (`DoubleArrayStepper`). the + // steppers are also available on byte/short/char/float arrays (`WidenedByteArrayStepper`), + // JDK spliterators only for double/int/long/reference. + + implicit class DoubleArrayHasSeqParStream(a: Array[Double]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = asJavaSeqStream.parallel + } + + implicit class IntArrayHasSeqParStream(a: Array[Int]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = asJavaSeqStream.parallel + } + + implicit class LongArrayHasSeqParStream(a: Array[Long]) { + /** Create a sequential [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaSeqStream: LongStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaParStream: LongStream = asJavaSeqStream.parallel + } + + implicit class AnyArrayHasSeqParStream[A <: AnyRef](a: Array[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaSeqStream: Stream[A] = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaParStream: Stream[A] = asJavaSeqStream.parallel + } + + implicit class ByteArrayHasSeqParStream(a: Array[Byte]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class ShortArrayHasSeqParStream(a: Array[Short]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class CharArrayHasSeqParStream(a: Array[Char]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class FloatArrayHasSeqParStream(a: Array[Float]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = a.stepper.asJavaParStream + } + + + + // strings + + implicit class StringHasSeqParStream(s: String) { + /** + * A sequential stream on the characters of a string, same as [[asJavaSeqCharStream]]. See also + * [[asJavaSeqCodePointStream]]. + */ + def asJavaSeqStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ false) + /** + * A parallel stream on the characters of a string, same as [[asJavaParCharStream]]. See also + * [[asJavaParCodePointStream]]. + */ + def asJavaParStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ true) + + /** A sequential stream on the characters of a string. See also [[asJavaSeqCodePointStream]]. */ + def asJavaSeqCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ false) + /** A parallel stream on the characters of a string. See also [[asJavaParCodePointStream]]. */ + def asJavaParCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ true) + + /** A sequential stream on the code points of a string. See also [[asJavaSeqCharStream]]. */ + def asJavaSeqCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ false) + /** A parallel stream on the code points of a string. See also [[asJavaParCharStream]]. */ + def asJavaParCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ true) + } + + // toScala for streams + + implicit class StreamHasToScala[A](stream: Stream[A]) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts streams of boxed integers, longs or + * doubles are converted to the primitive accumulators ([[scala.jdk.IntAccumulator]], etc.). + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[A, C1])(implicit info: AccumulatorFactoryInfo[A, C1]): C1 = { + + def anyAcc = stream.collect(AnyAccumulator.supplier[A], AnyAccumulator.adder[A], AnyAccumulator.merger[A]) + if (info.companion == AnyAccumulator) anyAcc.asInstanceOf[C1] + else if (info.companion == IntAccumulator) stream.asInstanceOf[Stream[Int]].collect(IntAccumulator.supplier, IntAccumulator.boxedAdder, IntAccumulator.merger).asInstanceOf[C1] + else if (info.companion == LongAccumulator) stream.asInstanceOf[Stream[Long]].collect(LongAccumulator.supplier, LongAccumulator.boxedAdder, LongAccumulator.merger).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) stream.asInstanceOf[Stream[Double]].collect(DoubleAccumulator.supplier, DoubleAccumulator.boxedAdder, DoubleAccumulator.merger).asInstanceOf[C1] + else if (stream.isParallel) anyAcc.to(factory) + else factory.fromSpecific(stream.iterator.asScala) + } + + /** Convert a generic Java Stream wrapping a primitive type to a corresponding primitive + * Stream. + */ + def asJavaPrimitiveStream[S](implicit unboxer: StreamUnboxer[A, S]): S = unboxer(stream) + } + + implicit class IntStreamHasToScala(stream: IntStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `IntStream` to a primitive + * [[scala.jdk.IntAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Int, C1])(implicit info: AccumulatorFactoryInfo[Int, C1]): C1 = { + def intAcc = stream.collect(IntAccumulator.supplier, IntAccumulator.adder, IntAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Int], AnyAccumulator.unboxedIntAdder, AnyAccumulator.merger[Int]).asInstanceOf[C1] + else if (info.companion == IntAccumulator) intAcc.asInstanceOf[C1] + else if (stream.isParallel) intAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Int]].asScala) + } + } + + implicit class LongStreamHasToScala(stream: LongStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `LongStream` to a primitive + * [[scala.jdk.LongAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Long, C1])(implicit info: AccumulatorFactoryInfo[Long, C1]): C1 = { + def longAcc = stream.collect(LongAccumulator.supplier, LongAccumulator.adder, LongAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Long], AnyAccumulator.unboxedLongAdder, AnyAccumulator.merger[Long]).asInstanceOf[C1] + else if (info.companion == LongAccumulator) longAcc.asInstanceOf[C1] + else if (stream.isParallel) longAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Long]].asScala) + } + } + + implicit class DoubleStreamHasToScala(stream: DoubleStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `DoubleStream` to a primitive + * [[scala.jdk.DoubleAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Double, C1])(implicit info: AccumulatorFactoryInfo[Double, C1]): C1 = { + def doubleAcc = stream.collect(DoubleAccumulator.supplier, DoubleAccumulator.adder, DoubleAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Double], AnyAccumulator.unboxedDoubleAdder, AnyAccumulator.merger[Double]).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) doubleAcc.asInstanceOf[C1] + else if (stream.isParallel) doubleAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Double]].asScala) + } + } +} + +object StreamExtensions { + /** An implicit StreamShape instance connects element types with the corresponding specialized + * Stream and Stepper types. This is used in `asJavaStream` extension methods to create + * generic or primitive streams according to the element type. + */ + sealed trait StreamShape[T, S <: BaseStream[_, _], St <: Stepper[_]] { + final def fromStepper(st: St, par: Boolean): S = mkStream(st, par) + protected def mkStream(st: St, par: Boolean): S + } + + object StreamShape extends StreamShapeLowPriority1 { + // primitive + implicit val intStreamShape : StreamShape[Int , IntStream , IntStepper] = mkIntStreamShape[Int] + implicit val longStreamShape : StreamShape[Long , LongStream , LongStepper] = mkLongStreamShape[Long] + implicit val doubleStreamShape: StreamShape[Double, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Double] + + // widening + implicit val byteStreamShape : StreamShape[Byte , IntStream , IntStepper] = mkIntStreamShape[Byte] + implicit val shortStreamShape: StreamShape[Short, IntStream , IntStepper] = mkIntStreamShape[Short] + implicit val charStreamShape : StreamShape[Char , IntStream , IntStepper] = mkIntStreamShape[Char] + implicit val floatStreamShape: StreamShape[Float, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Float] + + // boxed java primitives + + implicit val jIntegerStreamShape : StreamShape[jl.Integer , IntStream , IntStepper ] = mkIntStreamShape[jl.Integer] + implicit val jLongStreamShape : StreamShape[jl.Long , LongStream , LongStepper ] = mkLongStreamShape[jl.Long] + implicit val jDoubleStreamShape : StreamShape[jl.Double , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Double] + implicit val jByteStreamShape : StreamShape[jl.Byte , IntStream , IntStepper ] = mkIntStreamShape[jl.Byte] + implicit val jShortStreamShape : StreamShape[jl.Short , IntStream , IntStepper ] = mkIntStreamShape[jl.Short] + implicit val jCharacterStreamShape : StreamShape[jl.Character, IntStream , IntStepper ] = mkIntStreamShape[jl.Character] + implicit val jFloatStreamShape : StreamShape[jl.Float , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Float] + + private def mkIntStreamShape[T]: StreamShape[T, IntStream, IntStepper] = new StreamShape[T, IntStream, IntStepper] { + protected def mkStream(st: IntStepper, par: Boolean): IntStream = StreamSupport.intStream(st.spliterator, par) + } + + private def mkLongStreamShape[T]: StreamShape[T, LongStream, LongStepper] = new StreamShape[T, LongStream, LongStepper] { + protected def mkStream(st: LongStepper, par: Boolean): LongStream = StreamSupport.longStream(st.spliterator, par) + } + + private def mkDoubleStreamShape[T]: StreamShape[T, DoubleStream, DoubleStepper] = new StreamShape[T, DoubleStream, DoubleStepper] { + protected def mkStream(st: DoubleStepper, par: Boolean): DoubleStream = StreamSupport.doubleStream(st.spliterator, par) + } + } + + trait StreamShapeLowPriority1 { + // reference + implicit def anyStreamShape[T]: StreamShape[T, Stream[T], Stepper[T]] = anyStreamShapePrototype.asInstanceOf[StreamShape[T, Stream[T], Stepper[T]]] + + private[this] val anyStreamShapePrototype: StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] = new StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] { + def mkStream(s: Stepper[AnyRef], par: Boolean): Stream[AnyRef] = StreamSupport.stream(s.spliterator.asInstanceOf[Spliterator[AnyRef]], par) + } + } + + /** Connects a stream element type `A` to the corresponding, potentially specialized, Stream type. + * Used in the `stream.asJavaPrimitiveStream` extension method. + */ + sealed trait StreamUnboxer[A, S] { + def apply(s: Stream[A]): S + } + object StreamUnboxer { + implicit val intStreamUnboxer: StreamUnboxer[Int, IntStream] = new StreamUnboxer[Int, IntStream] { + def apply(s: Stream[Int]): IntStream = s.mapToInt(x => x) + } + implicit val javaIntegerStreamUnboxer: StreamUnboxer[jl.Integer, IntStream] = intStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Integer, IntStream]] + + implicit val longStreamUnboxer: StreamUnboxer[Long, LongStream] = new StreamUnboxer[Long, LongStream] { + def apply(s: Stream[Long]): LongStream = s.mapToLong(x => x) + } + implicit val javaLongStreamUnboxer: StreamUnboxer[jl.Long, LongStream] = longStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Long, LongStream]] + + implicit val doubleStreamUnboxer: StreamUnboxer[Double, DoubleStream] = new StreamUnboxer[Double, DoubleStream] { + def apply(s: Stream[Double]): DoubleStream = s.mapToDouble(x => x) + } + implicit val javaDoubleStreamUnboxer: StreamUnboxer[jl.Double, DoubleStream] = doubleStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Double, DoubleStream]] + } + + + + /** An implicit `AccumulatorFactoryInfo` connects primitive element types to the corresponding + * specialized [[scala.jdk.Accumulator]] factory. This is used in the `stream.toScala` extension methods + * to ensure collecting a primitive stream into a primitive accumulator does not box. + * + * When converting to a collection other than `Accumulator`, the generic + * `noAccumulatorFactoryInfo` is passed. + */ + trait AccumulatorFactoryInfo[A, C] { + val companion: AnyRef + } + trait LowPriorityAccumulatorFactoryInfo { + implicit def noAccumulatorFactoryInfo[A, C]: AccumulatorFactoryInfo[A, C] = noAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, C]] + private val noAccumulatorFactoryInfoPrototype: AccumulatorFactoryInfo[AnyRef, AnyRef] = new AccumulatorFactoryInfo[AnyRef, AnyRef] { + val companion: AnyRef = null + } + } + object AccumulatorFactoryInfo extends LowPriorityAccumulatorFactoryInfo { + implicit def anyAccumulatorFactoryInfo[A]: AccumulatorFactoryInfo[A, AnyAccumulator[A]] = anyAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, AnyAccumulator[A]]] + + private object anyAccumulatorFactoryInfoPrototype extends AccumulatorFactoryInfo[AnyRef, AnyAccumulator[AnyRef]] { + val companion: AnyRef = AnyAccumulator + } + + implicit val intAccumulatorFactoryInfo: AccumulatorFactoryInfo[Int, IntAccumulator] = new AccumulatorFactoryInfo[Int, IntAccumulator] { + val companion: AnyRef = IntAccumulator + } + + implicit val longAccumulatorFactoryInfo: AccumulatorFactoryInfo[Long, LongAccumulator] = new AccumulatorFactoryInfo[Long, LongAccumulator] { + val companion: AnyRef = LongAccumulator + } + + implicit val doubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[Double, DoubleAccumulator] = new AccumulatorFactoryInfo[Double, DoubleAccumulator] { + val companion: AnyRef = DoubleAccumulator + } + + implicit val jIntegerAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Integer, IntAccumulator] = intAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Integer, IntAccumulator]] + implicit val jLongAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Long, IntAccumulator] = longAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Long, IntAccumulator]] + implicit val jDoubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Double, IntAccumulator] = doubleAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Double, IntAccumulator]] + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala new file mode 100644 index 000000000000..ba51c7a5a353 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala @@ -0,0 +1,80 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + +private[collection] class ObjectArrayStepper[A <: Object](underlying: Array[A], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], ObjectArrayStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): ObjectArrayStepper[A] = new ObjectArrayStepper[A](underlying, i0, half) +} + +private[collection] class BoxedBooleanArrayStepper(underlying: Array[Boolean], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[Boolean], BoxedBooleanArrayStepper](_i0, _iN) + with AnyStepper[Boolean] { + def nextStep(): Boolean = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): BoxedBooleanArrayStepper = new BoxedBooleanArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedByteArrayStepper(underlying: Array[Byte], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedByteArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedByteArrayStepper = new WidenedByteArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedCharArrayStepper(underlying: Array[Char], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedCharArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedCharArrayStepper = new WidenedCharArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedShortArrayStepper(underlying: Array[Short], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedShortArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedShortArrayStepper = new WidenedShortArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedFloatArrayStepper(underlying: Array[Float], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, WidenedFloatArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedFloatArrayStepper = new WidenedFloatArrayStepper(underlying, i0, half) +} + +private[collection] class DoubleArrayStepper(underlying: Array[Double], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleArrayStepper = new DoubleArrayStepper(underlying, i0, half) +} + +private[collection] class IntArrayStepper(underlying: Array[Int], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntArrayStepper = new IntArrayStepper(underlying, i0, half) +} + +private[collection] class LongArrayStepper(underlying: Array[Long], _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongArrayStepper](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongArrayStepper = new LongArrayStepper(underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala new file mode 100644 index 000000000000..8b2f604b0977 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala @@ -0,0 +1,249 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + + +private[collection] object BinaryTreeStepper { + val emptyStack = new Array[AnyRef](0) +} + + +/** A generic stepper that can traverse ordered binary trees. + * The tree is assumed to have all the stuff on the left first, then the root, then everything on the right. + * + * Splits occur at the root of whatever has not yet been traversed (the substepper steps up to but + * does not include the root). + * + * The stepper maintains an internal stack, not relying on the tree traversal to be reversible. Trees with + * nodes that maintain a parent pointer may be traversed slightly faster without a stack, but splitting is + * more awkward. + * + * Algorithmically, this class implements a simple state machine that unrolls the left-leaning links in + * a binary tree onto a stack. At all times, the machine should be in one of these states: + * 1. Empty: `myCurrent` is `null` and `index` is `-1`. `stack` should also be `Array.empty` then. + * 2. Ready: `myCurrent` is not `null` and contains the next `A` to be extracted + * 3. Pending: `myCurrent` is `null` and `stack(index)` contains the next node to visit + * + * Subclasses should allow this class to do all the work of maintaining state; `next` should simply + * reduce `maxLength` by one, and consume `myCurrent` and set it to `null` if `hasNext` is true. + */ +private[collection] abstract class BinaryTreeStepperBase[A, T >: Null <: AnyRef, Sub >: Null, Semi <: Sub with BinaryTreeStepperBase[A, T, _, _]]( + protected var maxLength: Int, protected var myCurrent: T, protected var stack: Array[AnyRef], protected var index: Int, + protected val left: T => T, protected val right: T => T +) +extends EfficientSplit { + /** Unrolls a subtree onto the stack starting from a particular node, returning + * the last node found. This final node is _not_ placed on the stack, and + * may have things to its right. + */ + @tailrec protected final def unroll(from: T): T = { + val l = left(from) + if (l eq null) from + else { + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = from + unroll(l) + } + } + + /** Takes a subtree whose left side, if any, has already been visited, and unrolls + * the right side of the tree onto the stack, thereby detaching that node of + * the subtree from the stack entirely (so it is ready to use). It returns + * the node that is being detached. Note that the node must _not_ already be + * on the stack. + */ + protected final def detach(node: T): node.type = { + val r = right(node) + if (r ne null) { + val last = unroll(r) + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = last + } + node + } + + /** Given an empty state and the root of a new tree, initialize the tree properly + * to be in an (appropriate) ready state. Will do all sorts of wrong stuff if the + * tree is not already empty. + * + * Right now overwrites everything so could allow reuse, but isn't used for it. + */ + private[impl] final def initialize(root: T, size: Int): Unit = + if (root eq null) { + maxLength = 0 + myCurrent = null + stack = BinaryTreeStepper.emptyStack + index = -1 + } + else { + maxLength = size + index = -1 + myCurrent = detach(unroll(root)) + } + + protected def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): Semi + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = if (hasStep) maxLength else 0 + + def hasStep: Boolean = (myCurrent ne null) || (maxLength > 0 && { + if (index < 0) { maxLength = 0; stack = BinaryTreeStepper.emptyStack; false } + else { + val ans = stack(index).asInstanceOf[T] + index -= 1 + myCurrent = detach(ans) + true + } + }) + + /** Splits the tree at the root by giving everything unrolled on the stack to a new stepper, + * detaching the root, and leaving the right-hand side of the root unrolled. + * + * If the tree is empty or only has one element left, it returns `null` instead of splitting. + */ + def trySplit(): Sub = + if (!hasStep || index < 0) null + else { + val root = stack(0).asInstanceOf[T] + val leftStack = + if (index > 0) java.util.Arrays.copyOfRange(stack, 1, index+1) + else BinaryTreeStepper.emptyStack + val leftIndex = index - 1 + val leftCurrent = myCurrent + var leftMax = maxLength + index = -1 + detach(root) + myCurrent = root + leftMax -= 2+index + maxLength -= 2+leftIndex + semiclone(leftMax, leftCurrent, leftStack, leftIndex) + } +} + + +private[collection] final class AnyBinaryTreeStepper[A, T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => A +) +extends BinaryTreeStepperBase[A, T, AnyStepper[A], AnyBinaryTreeStepper[A, T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): AnyBinaryTreeStepper[A, T] = + new AnyBinaryTreeStepper[A, T](maxL, myC, stk, ix, left, right, extract) +} +private[collection] object AnyBinaryTreeStepper { + def from[A, T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => A): AnyBinaryTreeStepper[A, T] = { + val ans = new AnyBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class DoubleBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Double +) +extends BinaryTreeStepperBase[Double, T, DoubleStepper, DoubleBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): DoubleBinaryTreeStepper[T] = + new DoubleBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object DoubleBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Double): DoubleBinaryTreeStepper[T] = { + val ans = new DoubleBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class IntBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Int +) +extends BinaryTreeStepperBase[Int, T, IntStepper, IntBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): IntBinaryTreeStepper[T] = + new IntBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object IntBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Int): IntBinaryTreeStepper[T] = { + val ans = new IntBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + + +private[collection] final class LongBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Long +) +extends BinaryTreeStepperBase[Long, T, LongStepper, LongBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): LongBinaryTreeStepper[T] = + new LongBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object LongBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Long): LongBinaryTreeStepper[T] = { + val ans = new LongBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + diff --git a/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala new file mode 100644 index 000000000000..16801089c39f --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala @@ -0,0 +1,119 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{BitSetOps, IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable + + +private[collection] final class BitSetStepper( + private var underlying: BitSetOps[_], + private var cache0: Long, private var cache1: Long, + _i0: Int, _iN: Int, + private var cacheIndex: Int +) +extends InOrderStepperBase[IntStepper, BitSetStepper](_i0, _iN) +with IntStepper { + import BitSetOps.{WordLength, LogWL} + + // When `found` is set, `i0` is an element that exists + protected var found: Boolean = false + + @annotation.tailrec + protected def findNext(): Boolean = + if (i0 >= iN) false + else { + val ix = i0 >> LogWL + if (ix == cacheIndex || ix == cacheIndex+1) { + val i = scanLong(if (ix == cacheIndex) cache0 else cache1, i0 & (WordLength - 1)) + if (i >= 0) { + i0 = (i0 & ~(WordLength - 1)) | i + found = (i0 < iN) + found + } + else { + i0 = (i0 & ~(WordLength - 1)) + WordLength + findNext() + } + } + else if (underlying eq null) { + i0 = iN + found = false + found + } + else { + cacheIndex = ix + cache0 = underlying.word(cacheIndex) + cache1 = if ((iN - 1) >> LogWL == ix) -1L else underlying.word(cacheIndex+1) + findNext() + } + } + + def semiclone(half: Int): BitSetStepper = + if (underlying == null) { + val ans = new BitSetStepper(null, cache0, cache1, i0, half, cacheIndex) + ans.found = found + i0 = half + found = false + ans + } + else { + // Set up new stepper + val ixNewN = (half - 1) >> LogWL + val ans = + new BitSetStepper(if (ixNewN <= cacheIndex + 1) null else underlying, cache0, cache1, i0, half, cacheIndex) + if (found) ans.found = true + + // Advance old stepper to breakpoint + val ixOld0 = half >> LogWL + if (ixOld0 > cacheIndex + 1) { + cache0 = underlying.word(ixOld0) + cache1 = if (((iN - 1) >> LogWL) == ixOld0) -1L else underlying.word(ixOld0+1) + cacheIndex = ixOld0 + i0 = half + found = false + } + + // Return new stepper + ans + } + + @annotation.tailrec + private[this] def scanLong(bits: Long, from: Int): Int = + if (from >= WordLength) -1 + else if ((bits & (1L << from)) != 0) from + else scanLong(bits, from + 1) + + def nextStep(): Int = + if (found || findNext()) { + found = false + val ans = i0 + i0 += 1 + ans + } + else Stepper.throwNSEE() +} + +private[collection] object BitSetStepper { + def from(bs: scala.collection.BitSetOps[_]): IntStepper with EfficientSplit = + new BitSetStepper( + if (bs.nwords <= 2) null else bs, + if (bs.nwords <= 0) -1L else bs.word(0), + if (bs.nwords <= 1) -1L else bs.word(1), + 0, + bs.nwords * BitSetOps.WordLength, + 0 + ) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala new file mode 100644 index 000000000000..12fb471ea768 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala @@ -0,0 +1,246 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.immutable.Node +// import language.experimental.captureChecking // TODO enable + +/** A stepper that is a slightly elaborated version of the ChampBaseIterator; + * the main difference is that it knows when it should stop instead of running + * to the end of all trees. + */ +private[collection] abstract class ChampStepperBase[ + A, T <: Node[T], Sub >: Null, Semi <: Sub with ChampStepperBase[A, T, _, _] +](protected var maxSize: Int) +extends EfficientSplit { + import Node.MaxDepth + + // Much of this code is identical to ChampBaseIterator. If you change that, look here too! + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private var currentStackLevel: Int = -1 + private var nodeCursorsAndLengths: Array[Int] = _ + private var nodes: Array[T] = _ + + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + def initRoot(rootNode: T): Unit = { + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + false + } + + def characteristics: Int = 0 + + def estimateSize: Long = if (hasStep) maxSize else 0L + + def semiclone(): Semi + + final def hasStep: Boolean = maxSize > 0 && { + val ans = (currentValueCursor < currentValueLength) || searchNextValueNode() + if (!ans) maxSize = 0 + ans + } + + final def trySplit(): Sub = + if (!hasStep) null + else { + var fork = 0 + while (fork <= currentStackLevel && nodeCursorsAndLengths(2*fork) >= nodeCursorsAndLengths(2*fork + 1)) fork += 1 + if (fork > currentStackLevel && currentValueCursor > currentValueLength -2) null + else { + val semi = semiclone() + semi.maxSize = maxSize + semi.currentValueCursor = currentValueCursor + semi.currentValueNode = currentValueNode + if (fork > currentStackLevel) { + // Just need to finish the current node + semi.currentStackLevel = -1 + val i = (currentValueCursor + currentValueLength) >>> 1 + semi.currentValueLength = i + currentValueCursor = i + } + else { + // Need (at least some of) the full stack, so make an identical copy + semi.nodeCursorsAndLengths = java.util.Arrays.copyOf(nodeCursorsAndLengths, nodeCursorsAndLengths.length) + semi.nodes = java.util.Arrays.copyOf(nodes.asInstanceOf[Array[Node[T]]], nodes.length).asInstanceOf[Array[T]] + semi.currentStackLevel = currentStackLevel + semi.currentValueLength = currentValueLength + + // Split the top level of the stack where there's still something to split + // Could make this more efficient by duplicating code from searchNextValueNode + // instead of setting up for it to run normally. But splits tend to be rare, + // so it's not critically important. + // + // Note that this split can be kind of uneven; if we knew how many child nodes there + // were we could do better. + val i = (nodeCursorsAndLengths(2*fork) + nodeCursorsAndLengths(2*fork + 1)) >>> 1 + semi.nodeCursorsAndLengths(2*fork + 1) = i + var j = currentStackLevel + while (j > fork) { + nodeCursorsAndLengths(2*j) = nodeCursorsAndLengths(2*j + 1) + j -= 1 + } + nodeCursorsAndLengths(2*fork) = i + searchNextValueNode() + } + semi + } + } +} + + +private[collection] final class AnyChampStepper[A, T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => A) +extends ChampStepperBase[A, T, AnyStepper[A], AnyChampStepper[A, T]](_maxSize) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): AnyChampStepper[A, T] = new AnyChampStepper[A, T](0, extract) +} +private[collection] object AnyChampStepper { + def from[A, T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => A): AnyChampStepper[A, T] = { + val ans = new AnyChampStepper[A, T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class DoubleChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Double) +extends ChampStepperBase[Double, T, DoubleStepper, DoubleChampStepper[T]](_maxSize) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): DoubleChampStepper[T] = new DoubleChampStepper[T](0, extract) +} +private[collection] object DoubleChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Double): DoubleChampStepper[T] = { + val ans = new DoubleChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class IntChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Int) +extends ChampStepperBase[Int, T, IntStepper, IntChampStepper[T]](_maxSize) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): IntChampStepper[T] = new IntChampStepper[T](0, extract) +} +private[collection] object IntChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Int): IntChampStepper[T] = { + val ans = new IntChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class LongChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Long) +extends ChampStepperBase[Long, T, LongStepper, LongChampStepper[T]](_maxSize) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): LongChampStepper[T] = new LongChampStepper[T](0, extract) +} +private[collection] object LongChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Long): LongChampStepper[T] = { + val ans = new LongChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala new file mode 100644 index 000000000000..7140c7d673d0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala @@ -0,0 +1,54 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit +// import language.experimental.captureChecking // TODO enable + +/** Abstracts all the generic operations of stepping over a collection + * that has an indexable ordering but may have gaps. + * + * For collections that are guaranteed to not have gaps, use `IndexedStepperBase` instead. + */ +private[convert] abstract class InOrderStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) +extends EfficientSplit { + /** Set `true` if the element at `i0` is known to be there. `false` if either not known or is a gap. + */ + protected def found: Boolean + + /** Advance `i0` over any gaps, updating internal state so `found` is correct at the new position. + * Returns the new value of `found`. + */ + protected def findNext(): Boolean + + protected def semiclone(half: Int): Semi + + final def hasStep: Boolean = found || findNext() + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala new file mode 100644 index 000000000000..1e2983fde50d --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala @@ -0,0 +1,45 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + +private[collection] class AnyIndexedSeqStepper[A](underlying: collection.IndexedSeqOps[A, AnyConstr, _], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], AnyIndexedSeqStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): AnyIndexedSeqStepper[A] = new AnyIndexedSeqStepper[A](underlying, i0, half) +} + +private[collection] class DoubleIndexedSeqStepper[CC <: collection.IndexedSeqOps[Double, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleIndexedSeqStepper[CC]](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleIndexedSeqStepper[CC] = new DoubleIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class IntIndexedSeqStepper[CC <: collection.IndexedSeqOps[Int, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntIndexedSeqStepper[CC]](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntIndexedSeqStepper[CC] = new IntIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class LongIndexedSeqStepper[CC <: collection.IndexedSeqOps[Long, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongIndexedSeqStepper[CC]](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongIndexedSeqStepper[CC] = new LongIndexedSeqStepper[CC](underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala new file mode 100644 index 000000000000..cae3809ab077 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit +// import language.experimental.captureChecking // TODO enable + +/** Abstracts all the generic operations of stepping over an indexable collection */ +private[convert] abstract class IndexedStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) + extends EfficientSplit { + protected def semiclone(half: Int): Semi + + def hasStep: Boolean = i0 < iN + + def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0+iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala new file mode 100644 index 000000000000..393e988959eb --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala @@ -0,0 +1,130 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.{AnyStepper, DoubleStepper, IntStepper, LongStepper, Stepper} +import scala.jdk.{AnyAccumulator, DoubleAccumulator, IntAccumulator, LongAccumulator} +// import language.experimental.captureChecking // TODO enable + +private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A]) + extends IteratorStepperBase[A, AnyStepper[A], AnyIteratorStepper[A]](_underlying) + with AnyStepper[A] { + protected def semiclone(): AnyIteratorStepper[A] = new AnyIteratorStepper(null) + + def nextStep(): A = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): AnyStepper[A] = if (proxied ne null) proxied.trySplit() else { + val acc = new AnyAccumulator[A] + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class DoubleIteratorStepper(_underlying: Iterator[Double]) + extends IteratorStepperBase[Double, DoubleStepper, DoubleIteratorStepper](_underlying) + with DoubleStepper { + protected def semiclone(): DoubleIteratorStepper = new DoubleIteratorStepper(null) + + def nextStep(): Double = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): DoubleStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new DoubleAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class IntIteratorStepper(_underlying: Iterator[Int]) + extends IteratorStepperBase[Int, IntStepper, IntIteratorStepper](_underlying) + with IntStepper { + protected def semiclone(): IntIteratorStepper = new IntIteratorStepper(null) + + def nextStep(): Int = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): IntStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new IntAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class LongIteratorStepper(_underlying: Iterator[Long]) + extends IteratorStepperBase[Long, LongStepper, LongIteratorStepper](_underlying) + with LongStepper { + protected def semiclone(): LongIteratorStepper = new LongIteratorStepper(null) + + def nextStep(): Long = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): LongStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new LongAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +/** Common functionality for Steppers that step through an Iterator, caching the results as needed when a split is requested. */ +private[convert] abstract class IteratorStepperBase[A, SP >: Null <: Stepper[A], Semi <: SP](final protected val underlying: Iterator[A]) { + final protected var nextChunkSize = 16 + final protected var proxied: SP = null + protected def semiclone(): Semi // Must initialize with null iterator! + def characteristics: Int = if (proxied ne null) Spliterator.ORDERED | Spliterator.SIZED | Spliterator.SUBSIZED else Spliterator.ORDERED + def estimateSize: Long = if (proxied ne null) proxied.estimateSize else Long.MaxValue + def hasStep: Boolean = if (proxied ne null) proxied.hasStep else underlying.hasNext +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala new file mode 100644 index 000000000000..7c122f901839 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{AnyStepper, IntStepper, LongStepper, Stepper} +import scala.collection.immutable.NumericRange +// import language.experimental.captureChecking // TODO enable + +private[collection] class AnyNumericRangeStepper[A](underlying: NumericRange[A], _i0: Int, _iN: Int) +extends IndexedStepperBase[AnyStepper[A], AnyNumericRangeStepper[A]](_i0, _iN) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new AnyNumericRangeStepper[A](underlying, i0, half) +} + +private[collection] class IntNumericRangeStepper(underlying: NumericRange[Int], _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, IntNumericRangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new IntNumericRangeStepper(underlying, i0, half) +} + +private[collection] class LongNumericRangeStepper(underlying: NumericRange[Long], _i0: Int, _iN: Int) +extends IndexedStepperBase[LongStepper, LongNumericRangeStepper](_i0, _iN) +with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new LongNumericRangeStepper(underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala new file mode 100644 index 000000000000..50ab623a014e --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable + +/** Implements Stepper on an integer Range. You don't actually need the Range to do this, + * so only the relevant parts are included. Because the arguments are protected, they are + * not error-checked; `Range` is required to provide valid arguments. + */ +private[collection] final class RangeStepper(protected var myNext: Int, myStep: Int, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, RangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = myNext + myNext += myStep + i0 += 1 + ans + } + else Stepper.throwNSEE() + protected def semiclone(half: Int): RangeStepper = new RangeStepper(myNext, myStep, i0, half) + override def trySplit(): IntStepper = { + val old_i0 = i0 + val ans = super.trySplit() + myNext += (i0 - old_i0) * myStep + ans + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala new file mode 100644 index 000000000000..fe127b857c45 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala @@ -0,0 +1,59 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.lang.Character.{charCount, isLowSurrogate} +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable + +/** Implements `Stepper` on a `String` where you step through chars packed into `Int`. + */ +private[collection] final class CharStringStepper(underlying: String, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, CharStringStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { val j = i0; i0 += 1; underlying.charAt(j) } + else Stepper.throwNSEE() + + def semiclone(half: Int): CharStringStepper = new CharStringStepper(underlying, i0, half) +} + +/** Implements `Stepper` on a `String` where you step through code points. + */ +private[collection] final class CodePointStringStepper(underlying: String, private var i0: Int, private var iN: Int) +extends IntStepper with EfficientSplit { + def characteristics: Int = Spliterator.IMMUTABLE | Spliterator.NONNULL | Spliterator.ORDERED + def estimateSize: Long = iN - i0 + def hasStep: Boolean = i0 < iN + def nextStep(): Int = { + if (hasStep) { + val cp = underlying.codePointAt(i0) + i0 += charCount(cp) + cp + } + else Stepper.throwNSEE() + } + def trySplit(): CodePointStringStepper = + if (iN - 3 > i0) { + var half = (i0 + iN) >>> 1 + if (isLowSurrogate(underlying.charAt(half))) half -= 1 + val ans = new CodePointStringStepper(underlying, i0, half) + i0 = half + ans + } + else null +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala new file mode 100644 index 000000000000..6329d83bc2a0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala @@ -0,0 +1,139 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + +private[collection] abstract class TableStepperBase[A, I >: Null <: AnyRef, Sub >: Null, Semi <: Sub with TableStepperBase[A, I, _, _]]( + protected var maxLength: Int, protected val table: Array[I], protected var i0: Int, protected val iN: Int +) +extends EfficientSplit { + // Always holds table(i0); if `null` it is time to switch to the next element + protected var myCurrent: I = if (i0 < iN) table(i0) else null + + // Only call this when `myCurrent` is null (meaning we need to advance) + @annotation.tailrec + protected final def findNextCurrent(): Boolean = + if (i0 < iN) { + i0 += 1 + if (i0 >= iN) false + else { + myCurrent = table(i0) + if (myCurrent eq null) findNextCurrent() + else true + } + } + else false + + protected def semiclone(half: Int): Semi + + def characteristics: Int = 0 + + def estimateSize: Long = if (!hasStep) { maxLength = 0; 0 } else maxLength + + def hasStep: Boolean = (myCurrent ne null) || findNextCurrent() + + def trySplit(): Sub = { + if (iN-1 > i0 && maxLength > 0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + ans.myCurrent = myCurrent + myCurrent = table(half) + var inLeft = if (ans.myCurrent ne null) 1 else 0 + var inRight = if (myCurrent ne null) 1 else 0 + if (iN - i0 < 32) { + var i = i0+1 + while (i < half && (table(i) ne null)) { i += 1; inLeft += 1 } + i = half+1 + while (i < iN && (table(i) ne null)) { i += 1; inRight += 1 } + } + maxLength -= inLeft + ans.maxLength -= inRight + i0 = half + ans + } + else null + } +} + + +private[collection] final class AnyTableStepper[A, I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => A, _i0: Int, _iN: Int +) +extends TableStepperBase[A, I, AnyStepper[A], AnyTableStepper[A, I]](_maxLength, _table, _i0, _iN) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): AnyTableStepper[A, I] = new AnyTableStepper[A, I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class DoubleTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Double, _i0: Int, _iN: Int +) +extends TableStepperBase[Double, I, DoubleStepper, DoubleTableStepper[I]](_maxLength, _table, _i0, _iN) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): DoubleTableStepper[I] = new DoubleTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class IntTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Int, _i0: Int, _iN: Int +) +extends TableStepperBase[Int, I, IntStepper, IntTableStepper[I]](_maxLength, _table, _i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): IntTableStepper[I] = new IntTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class LongTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Long, _i0: Int, _iN: Int +) +extends TableStepperBase[Long, I, LongStepper, LongTableStepper[I]](_maxLength, _table, _i0, _iN) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): LongTableStepper[I] = new LongTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + diff --git a/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala new file mode 100644 index 000000000000..504e0dac63ea --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala @@ -0,0 +1,132 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + +private[convert] abstract class VectorStepperBase[Sub >: Null, Semi <: Sub]( + _i0: Int, + _iN: Int, + protected val displayN: Int, + protected val trunk: Array[AnyRef] +) +extends IndexedStepperBase[Sub, Semi](_i0, _iN) { + protected var index: Int = 32 // Force an advanceData on the first element + protected var leaves: Array[AnyRef] = null + protected var index1: Int = 32 // Force advanceData to defer to initTo on the first element + protected var twigs: Array[AnyRef] = null + + protected final def advanceData(iX: Int): Unit = { + index1 += 1 + if (index1 >= 32) initTo(iX) + else { + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = 0 + } + } + protected final def initTo(iX: Int): Unit = displayN match { + case 0 => + leaves = trunk + index = iX + case 1 => + twigs = trunk + index1 = iX >>> 5 + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + case _ => + var n = displayN + var dataN = trunk + while (n > 2) { + dataN = dataN((iX >> (5*n)) & 0x1F).asInstanceOf[Array[AnyRef]] + n -= 1 + } + twigs = dataN((iX >>> 10) & 0x1F).asInstanceOf[Array[AnyRef]] + index1 = (iX >> 5) & 0x1F + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + } +} + +private[collection] class AnyVectorStepper[A](_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[AnyStepper[A], AnyVectorStepper[A]](_i0, _iN, _displayN, _trunk) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[A] + } else Stepper.throwNSEE() + def semiclone(half: Int): AnyVectorStepper[A] = { + val ans = new AnyVectorStepper[A](i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class DoubleVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[DoubleStepper, DoubleVectorStepper](_i0, _iN, _displayN, _trunk) +with DoubleStepper { + def nextStep(): Double = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Double] + } else Stepper.throwNSEE() + def semiclone(half: Int): DoubleVectorStepper = { + val ans = new DoubleVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class IntVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[IntStepper, IntVectorStepper](_i0, _iN, _displayN, _trunk) +with IntStepper { + def nextStep(): Int = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Int] + } else Stepper.throwNSEE() + def semiclone(half: Int): IntVectorStepper = { + val ans = new IntVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class LongVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[LongStepper, LongVectorStepper](_i0, _iN, _displayN, _trunk) +with LongStepper { + def nextStep(): Long = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Long] + } else Stepper.throwNSEE() + def semiclone(half: Int): LongVectorStepper = { + val ans = new LongVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala new file mode 100644 index 000000000000..5740490223b2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable +import language.experimental.captureChecking + +/** + * A generic trait for ordered mutable maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll mutable Seq map + * @define Coll `mutable.SeqMap` + */ + +trait SeqMap[K, V] extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[SeqMap](LinkedHashMap)