From 4971581667089bab06f303ff8b695d85c0547558 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filip=20Zyba=C5=82a?= Date: Thu, 26 Aug 2021 13:33:57 +0200 Subject: [PATCH 001/465] Add Symbol.isSuperAccessor to reflection API --- .../quoted/runtime/impl/QuotesImpl.scala | 1 + library/src/scala/quoted/Quotes.scala | 4 ++ .../scaladoc/tasty/SyntheticSupport.scala | 1 + .../run-tasty-inspector/isSuperAccessor.check | 1 + .../run-tasty-inspector/isSuperAccessor.scala | 39 +++++++++++++++++++ .../stdlibExperimentalDefinitions.scala | 4 +- 6 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 tests/run-tasty-inspector/isSuperAccessor.check create mode 100644 tests/run-tasty-inspector/isSuperAccessor.scala diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 04d8d7bc51a0..318e6f131e1b 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2675,6 +2675,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def isAnonymousFunction: Boolean = self.denot.isAnonymousFunction def isAbstractType: Boolean = self.denot.isAbstractType def isClassConstructor: Boolean = self.denot.isClassConstructor + def isSuperAccessor = self.name.is(dotc.core.NameKinds.SuperAccessorName) def isType: Boolean = self.isType def isTerm: Boolean = self.isTerm def isPackageDef: Boolean = self.is(dotc.core.Flags.Package) diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index fa96b73551d1..3e3ffb8983d5 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -3998,6 +3998,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is this the constructor of a class? */ def isClassConstructor: Boolean + /** Is this the super accessor? */ + @experimental // TODO when stable, remove `dotty.tools.scaladoc.tasty.ClassLikeSupport.isSuperBridgeMethod` and use this method + def isSuperAccessor: Boolean + /** Is this the definition of a type? */ def isType: Boolean diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala index b33d5f61faac..2f85345baddc 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala @@ -10,6 +10,7 @@ object SyntheticsSupport: import reflect._ s.flags.is(Flags.Synthetic) || s.flags.is(Flags.FieldAccessor) || s.isDefaultHelperMethod + // TODO remove and use `SymbolMethods.isSuperAccessor` def isSuperBridgeMethod: Boolean = s.name.contains("$super$") def isDefaultHelperMethod: Boolean = ".*\\$default\\$\\d+$".r.matches(s.name) diff --git a/tests/run-tasty-inspector/isSuperAccessor.check b/tests/run-tasty-inspector/isSuperAccessor.check new file mode 100644 index 000000000000..4c90083e2b6b --- /dev/null +++ b/tests/run-tasty-inspector/isSuperAccessor.check @@ -0,0 +1 @@ +method SyncIterator$$super$next diff --git a/tests/run-tasty-inspector/isSuperAccessor.scala b/tests/run-tasty-inspector/isSuperAccessor.scala new file mode 100644 index 000000000000..3b950f8c79cf --- /dev/null +++ b/tests/run-tasty-inspector/isSuperAccessor.scala @@ -0,0 +1,39 @@ +import scala.quoted.* +import scala.tasty.inspector.* + +@main def Test = { + // Artefact of the current test infrastructure + // TODO improve infrastructure to avoid needing this code on each test + val classpath = dotty.tools.dotc.util.ClasspathFromClassloader(this.getClass.getClassLoader).split(java.io.File.pathSeparator).find(_.contains("runWithCompiler")).get + val allTastyFiles = dotty.tools.io.Path(classpath).walkFilter(_.extension == "tasty").map(_.toString).toList + val tastyFiles = allTastyFiles.filter(_.contains("SyncIterator")) + + TastyInspector.inspectTastyFiles(tastyFiles)(new MyInspector) +} + +class MyInspector extends Inspector: + + override def inspect(using Quotes)(tastys: List[Tasty[quotes.type]]): Unit = + import quotes.reflect.* + class Traverser extends TreeTraverser: + override def traverseTree(tree: Tree)(owner: Symbol) = + tree match + case tree: DefDef if tree.symbol.isSuperAccessor => + println(tree.symbol) + case _ => + super.traverseTree(tree)(owner) + end Traverser + + val traverser = new Traverser + tastys.foreach { tasty => + traverser.traverseTree(tasty.ast)(tasty.ast.symbol) + } + + +trait IntIterator { + def next: Int + def drop(n: Int): Unit +} +trait SyncIterator extends IntIterator { + abstract override def next: Int = super.next +} diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 111c8c0bada3..74d1124777aa 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -72,7 +72,9 @@ val experimentalDefinitionInLibrary = Set( "scala.annotation.init$.region", //// New APIs: Quotes - // Can be stabilized in 3.4.0 (unsure) or later + // Can be stabilized in 3.5.0 or later + "scala.quoted.Quotes.reflectModule.SymbolMethods.isSuperAccessor", + // Can be stabilized in 3.5.0 (unsure) or later "scala.quoted.Quotes.reflectModule.CompilationInfoModule.XmacroSettings", // Cant be stabilized yet. // Need newClass variant that can add constructor parameters. From cd04d004e0cb1592ed79854b990686fb7c2ea843 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 12 Mar 2024 17:01:50 +0100 Subject: [PATCH 002/465] Reduce projections of type aliases with class type prefixes Projections P # X are types that we would like to avoid. If X is a class type, there's nothing we can do. If X is an abstract type, we use skolemization and rewrite to (x?: P).X. If X is an alias type we should simply dealias but this was not done before. This caused an exonential blowup in #19892, where we costructed types of the form ZPartialServerEndpoint[R, A, B, I, E, O, -C] # EndpointType[A, I, E, T, R] ... # EndpointType[A, I, E, T, R] When the were 5 or more such selections, sompile times blew up (33s for 5, timeout after 5 minutes for 6). I am still not qute sure where the blowup happened. Looking at stacktraces of random interrups it seemed to be in a deep recursion of memberDenot and asSeenFrom calls.I believe it would still be interesting to find out more about this, in case there are other similar situations where combinations of deep projections with wide applications cannot be avoided. But for this precise problem, eagerly dealising fixes it. --- .../src/dotty/tools/dotc/core/Types.scala | 16 +++++++++--- .../test/dotc/pos-test-pickling.blacklist | 4 +++ tests/pos/i19892.scala | 26 +++++++++++++++++++ 3 files changed, 42 insertions(+), 4 deletions(-) create mode 100644 tests/pos/i19892.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index e38fbbb4b355..b70f286b70ed 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2708,13 +2708,21 @@ object Types extends TypeUtils { case _ => true } - /** Reduce a type-ref `T { X = U; ... } # X` to `U` - * provided `U` does not refer with a RecThis to the - * refinement type `T { X = U; ... }` + /** Reduce a type ref P # X, where X is a type alias and P is a refined type or + * a class type. If P is a refined type `T { X = U; ... }`, reduce P to U, + * provided U does not refer with a RecThis to the same refined type. If P is a + * class type, reduce it to the dealiasd version of P # X. This means that at typer + * we create projections only for inner classes with class prefixes, since projections + * of P # X where X is an abstract type are handled by skolemization. At later phases + * these projections might arise, though. */ def reduceProjection(using Context): Type = val reduced = prefix.lookupRefined(name) - if reduced.exists then reduced else this + if reduced.exists then reduced + else prefix.stripTypeVar match + case pre: (AppliedType | TypeRef) + if prefix.typeSymbol.isClass && this.symbol.isAliasType => dealias + case _ => this /** Guard against cycles that can arise if given `op` * follows info. The problematic cases are a type alias to itself or diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 32f8cdef1386..3785f8fa6e06 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -114,3 +114,7 @@ java-inherited-type1 # recursion limit exceeded i7445b.scala + +# more aggresive reduce projection makes a difference +i15525.scala + diff --git a/tests/pos/i19892.scala b/tests/pos/i19892.scala new file mode 100644 index 000000000000..6f3e0bd6d06c --- /dev/null +++ b/tests/pos/i19892.scala @@ -0,0 +1,26 @@ +abstract class ZPartialServerEndpoint[R, A, B, I, E, O, -C] + extends EndpointOps[A, I, E, O, C]{ + override type ThisType[-_R] = ZPartialServerEndpoint[R, A, B, I, E, O, _R] + override type EndpointType[_A, _I, _E, _O, -_R] =ZPartialServerEndpoint[R, _A, B, _I, _E, _O, _R] +} + +trait EndpointOps[A, I, E, O, -R] { + type EndpointType[_A, _I, _E, _O, -_R] + type ThisType[-_R] + def out[T]: EndpointType[A, I, E, T, R] + def description(d: String): ThisType[R] +} + +object Test { + def basicEndpoint[R](): ZPartialServerEndpoint[R, Any, Any, Unit, Any, Unit, Any] = ??? + + // commonts next to `.out[Any]` contain information about compilation time when chaining up to N `out` functions + val case1 = + basicEndpoint() // 1.5s + .out[Any] // 1.6s + .out[Any] // 1.7s + .out[Any] // 2s + .out[Any] // 4s + .out[Any] // 33s + .out[Any] // aborted after 5 min +} \ No newline at end of file From 291e84f4bf4ef70c50facb09c42ab75c115c2080 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 13 Mar 2024 16:58:26 +0100 Subject: [PATCH 003/465] Ignore failing tests in TypeDocumentHighlightSuite --- .../pc/tests/highlight/TypeDocumentHighlightSuite.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/highlight/TypeDocumentHighlightSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/highlight/TypeDocumentHighlightSuite.scala index 35ca8232dc1e..71adb819d7c7 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/highlight/TypeDocumentHighlightSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/highlight/TypeDocumentHighlightSuite.scala @@ -2,7 +2,7 @@ package dotty.tools.pc.tests.highlight import dotty.tools.pc.base.BaseDocumentHighlightSuite -import org.junit.Test +import org.junit.{Test, Ignore} class TypeDocumentHighlightSuite extends BaseDocumentHighlightSuite: @@ -147,7 +147,7 @@ class TypeDocumentHighlightSuite extends BaseDocumentHighlightSuite: |}""".stripMargin ) - @Test def `projection1` = + @Ignore @Test def `projection1` = check( """| |class A { @@ -158,7 +158,7 @@ class TypeDocumentHighlightSuite extends BaseDocumentHighlightSuite: |}""".stripMargin ) - @Test def `projection2` = + @Ignore @Test def `projection2` = check( """| |class A { From 5d6a0a83bceefefacdcd633449bb75ceec432547 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 28 Nov 2023 11:22:11 +0100 Subject: [PATCH 004/465] Remove the findClass method from ClassPath the method was never used, and not well defined, e.g. with branches to search in both tasty files and class files, which could be severely inefficient. --- .../dotc/classpath/AggregateClassPath.scala | 19 ------------- .../dotc/classpath/DirectoryClassPath.scala | 22 ++------------- .../classpath/VirtualDirectoryClassPath.scala | 8 ++---- .../ZipAndJarFileLookupFactory.scala | 8 +----- .../dotc/classpath/ZipArchiveFileLookup.scala | 9 ------ compiler/src/dotty/tools/io/ClassPath.scala | 28 ++----------------- .../dotc/classpath/JrtClassPathTest.scala | 1 - .../ZipAndJarFileLookupFactoryTest.scala | 6 ++-- 8 files changed, 11 insertions(+), 90 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 4c5b632bf6ab..cd44ba27df96 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -33,25 +33,6 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) } - override def findClass(className: String): Option[ClassRepresentation] = { - val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) - - def findEntry(isSource: Boolean): Option[ClassRepresentation] = - aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst { - case Some(s: SourceFileEntry) if isSource => s - case Some(s: BinaryFileEntry) if !isSource => s - } - - val classEntry = findEntry(isSource = false) - val sourceEntry = findEntry(isSource = true) - - (classEntry, sourceEntry) match { - case (Some(c: BinaryFileEntry), Some(s: SourceFileEntry)) => Some(BinaryAndSourceFilesEntry(c, s)) - case (c @ Some(_), _) => c - case (_, s) => s - } - } - override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index c5b267bc774d..212f7123198a 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -274,17 +274,12 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas } case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[BinaryFileEntry] with NoSourcePaths { - override def findClass(className: String): Option[ClassRepresentation] = - findClassFile(className).map(BinaryFileEntry(_)) def findClassFile(className: String): Option[AbstractFile] = { val relativePath = FileUtils.dirPath(className) - val tastyFile = new JFile(dir, relativePath + ".tasty") - if tastyFile.exists then Some(tastyFile.toPath.toPlainFile) - else - val classFile = new JFile(dir, relativePath + ".class") - if classFile.exists then Some(classFile.toPath.toPlainFile) - else None + val classFile = new JFile(dir, relativePath + ".class") + if classFile.exists then Some(classFile.toPath.toPlainFile) + else None } protected def createFileEntry(file: AbstractFile): BinaryFileEntry = BinaryFileEntry(file) @@ -301,16 +296,5 @@ case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFi protected def createFileEntry(file: AbstractFile): SourceFileEntry = SourceFileEntry(file) protected def isMatchingFile(f: JFile): Boolean = endsScalaOrJava(f.getName) - override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className).map(SourceFileEntry(_)) - - private def findSourceFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) - val sourceFile = LazyList("scala", "java") - .map(ext => new JFile(dir, relativePath + "." + ext)) - .collectFirst { case file if file.exists() => file } - - sourceFile.map(_.toPath.toPlainFile) - } - private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) } diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 93583c85fff7..9a2f49a786f4 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -38,16 +38,12 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi def asURLs: Seq[URL] = Seq(new URI(dir.name).toURL) def asClassPathStrings: Seq[String] = Seq(dir.path) - override def findClass(className: String): Option[ClassRepresentation] = - findClassFile(className).map(BinaryFileEntry(_)) - def findClassFile(className: String): Option[AbstractFile] = { val pathSeq = FileUtils.dirPath(className).split(java.io.File.separator) val parentDir = lookupPath(dir)(pathSeq.init.toSeq, directory = true) - if parentDir == null then return None + if parentDir == null then None else - Option(lookupPath(parentDir)(pathSeq.last + ".tasty" :: Nil, directory = false)) - .orElse(Option(lookupPath(parentDir)(pathSeq.last + ".class" :: Nil, directory = false))) + Option(lookupPath(parentDir)(pathSeq.last + ".class" :: Nil, directory = false)) } private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index dac156c5f647..3a725ad6e052 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -45,14 +45,8 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { with NoSourcePaths { override def findClassFile(className: String): Option[AbstractFile] = - findClass(className).map(_.file) - - // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. - override def findClass(className: String): Option[BinaryFileEntry] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - val binaries = files(PackageName(pkg), simpleClassName + ".tasty", simpleClassName + ".class") - binaries.find(_.file.isTasty).orElse(binaries.find(_.file.isClass)) - } + file(PackageName(pkg), simpleClassName + ".class").map(_.file) override private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index ca8636e3884f..4595f7978999 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -43,15 +43,6 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie } yield createFileEntry(entry) - protected def files(inPackage: PackageName, names: String*): Seq[FileEntryType] = - for { - dirEntry <- findDirEntry(inPackage).toSeq - name <- names - entry <- Option(dirEntry.lookupName(name, directory = false)) - if isRequiredFileType(entry) - } - yield createFileEntry(entry) - protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = for { dirEntry <- findDirEntry(inPackage) diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index 5344e2cf7e35..f77bc1efca91 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -56,32 +56,8 @@ trait ClassPath { private[dotty] def list(inPackage: PackageName): ClassPathEntries /** - * Returns the class file and / or source file for a given external name, e.g., "java.lang.String". - * If there is both a class file and source file, the compiler can decide whether to read the - * class file or compile the source file. - * - * Internally this seems to be used only by `ScriptRunner`, but only to call `.isDefined`. That - * could probably be implemented differently. - * - * Externally, it is used by sbt's compiler interface: - * https://github.com/sbt/sbt/blob/v0.13.15/compile/interface/src/main/scala/xsbt/CompilerInterface.scala#L249 - * Jason has some improvements for that in the works (https://github.com/scala/bug/issues/10289#issuecomment-310022699) - */ - def findClass(className: String): Option[ClassRepresentation] = { - // A default implementation which should be overridden, if we can create the more efficient - // solution for a given type of ClassPath - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - - val packageName = PackageName(pkg) - val foundClassFromClassFiles = classes(packageName).find(_.name == simpleClassName) - def findClassInSources = sources(packageName).find(_.name == simpleClassName) - - foundClassFromClassFiles orElse findClassInSources - } - - /** - * Returns the classfile for an external name, e.g., "java.lang.String". This method does not - * return source files. + * Returns *only* the classfile for an external name, e.g., "java.lang.String". This method does not + * return source files, tasty files,. * * This method is used by the classfile parser. When parsing a Java class, its own inner classes * are entered with a `ClassfileLoader` that parses the classfile returned by this method. diff --git a/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala b/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala index b676bb100320..a06698c1d513 100644 --- a/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala +++ b/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala @@ -38,7 +38,6 @@ class JrtClassPathTest { assertEquals("java/lang/Object", AsmUtils.readClass(jl_Object.file.toByteArray).name) assertTrue(cp.list("java.lang").packages.exists(_.name == "java.lang.annotation")) assertTrue(cp.list("java.lang").classesAndSources.exists(_.name == "Object")) - assertTrue(cp.findClass("java.lang.Object").isDefined) assertTrue(cp.findClassFile("java.lang.Object").isDefined) } } diff --git a/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala b/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala index 84973b8d3d71..db14ff3b1fb4 100644 --- a/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala +++ b/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala @@ -27,7 +27,7 @@ class ZipAndJarFileLookupFactoryTest { createZip(f, Array(), "p2/X.class") createZip(f, Array(), "p3/Y.class") val cp1 = createCp - assert(cp1.findClass("p1.C").isDefined) + assert(cp1.findClassFile("p1.C").isDefined) // We expect get a cache hit as the underlying zip hasn't changed val cp2 = createCp @@ -46,8 +46,8 @@ class ZipAndJarFileLookupFactoryTest { val cp3 = createCp assert(cp1 ne cp3, (System.identityHashCode(cp1), System.identityHashCode(cp3))) // And that instance should see D, not C, in package p1. - assert(cp3.findClass("p1.C").isEmpty) - assert(cp3.findClass("p1.D").isDefined) + assert(cp3.findClassFile("p1.C").isEmpty) + assert(cp3.findClassFile("p1.D").isDefined) } finally Files.delete(f) } From 8168d1e3f496f8d1b5c808765aeb257bb376264d Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 28 Nov 2023 12:05:34 +0100 Subject: [PATCH 005/465] use an enum to store file extensions, This caches common file extensions, while still being extensible. Also fixes many operations with unexpected behavior (manipulation of file extensions where toLowerCase behaves differently with certain locales.) --- .../dotty/tools/dotc/CompilationUnit.scala | 13 +-- compiler/src/dotty/tools/dotc/Driver.scala | 8 +- .../tools/dotc/classpath/ClassPath.scala | 3 +- .../dotc/classpath/DirectoryClassPath.scala | 2 +- .../tools/dotc/classpath/FileUtils.scala | 41 +++++----- .../classpath/VirtualDirectoryClassPath.scala | 2 +- .../ZipAndJarFileLookupFactory.scala | 2 +- .../dotty/tools/dotc/config/Settings.scala | 30 +++---- .../dotty/tools/dotc/core/SymbolLoaders.scala | 4 +- .../dotc/core/classfile/ClassfileParser.scala | 4 +- .../tools/dotc/core/tasty/TastyPrinter.scala | 3 +- .../dotty/tools/dotc/fromtasty/Debug.scala | 2 +- .../dotty/tools/dotc/fromtasty/TASTYRun.scala | 10 +-- .../tools/dotc/fromtasty/TastyFileUtil.scala | 3 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 4 +- .../tools/dotc/sbt/ExtractDependencies.scala | 12 +-- .../src/dotty/tools/dotc/util/EnumFlags.scala | 14 ++++ .../src/dotty/tools/io/AbstractFile.scala | 19 +++-- compiler/src/dotty/tools/io/File.scala | 2 - .../src/dotty/tools/io/FileExtension.scala | 79 +++++++++++++++++++ compiler/src/dotty/tools/io/Jar.scala | 2 +- compiler/src/dotty/tools/io/JarArchive.scala | 4 +- compiler/src/dotty/tools/io/Path.scala | 47 +++++++---- .../dotc/core/tasty/CommentPicklingTest.scala | 2 +- .../tools/dotc/printing/PrintingTest.scala | 2 +- .../transform/PatmatExhaustivityTest.scala | 6 +- .../languageserver/DottyLanguageServer.scala | 2 +- .../tools/pc/completions/Completions.scala | 2 +- .../tasty/inspector/TastyInspector.scala | 2 +- .../tasty/inspector/TastyInspector.scala | 2 +- .../scala2-library-test.scala | 2 +- .../scala2-library-from-tasty-jar.scala | 2 +- .../scala2-library-from-tasty.scala | 2 +- 33 files changed, 228 insertions(+), 106 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/util/EnumFlags.scala create mode 100644 compiler/src/dotty/tools/io/FileExtension.scala diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 78773a518b67..2358739ebd74 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -28,13 +28,16 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn var tpdTree: tpd.Tree = tpd.EmptyTree /** Is this the compilation unit of a Java file */ - def isJava: Boolean = source.file.name.endsWith(".java") + def isJava: Boolean = source.file.ext.isJava /** Is this the compilation unit of a Java file, or TASTy derived from a Java file */ - def typedAsJava = isJava || { - val infoNN = info - infoNN != null && infoNN.tastyInfo.exists(_.attributes.isJava) - } + def typedAsJava = + val ext = source.file.ext + ext.isJavaOrTasty && (ext.isJava || tastyInfo.exists(_.attributes.isJava)) + + def tastyInfo: Option[TastyInfo] = + val local = info + if local == null then None else local.tastyInfo /** The source version for this unit, as determined by a language import */ diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 196752aceb29..ae2219a4f049 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -6,7 +6,7 @@ import core.Comments.{ContextDoc, ContextDocstrings} import core.Contexts.* import core.{MacroClassLoader, TypeError} import dotty.tools.dotc.ast.Positioned -import dotty.tools.io.AbstractFile +import dotty.tools.io.{AbstractFile, FileExtension} import reporting.* import core.Decorators.* import config.Feature @@ -97,9 +97,9 @@ class Driver { if !file.exists then report.error(em"File does not exist: ${file.path}") None - else file.extension match - case "jar" => Some(file.path) - case "tasty" => + else file.ext match + case FileExtension.Jar => Some(file.path) + case FileExtension.Tasty => TastyFileUtil.getClassPath(file) match case Some(classpath) => Some(classpath) case _ => diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala index 3210c6221a78..5f545e1b93a5 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala @@ -6,6 +6,7 @@ package dotty.tools.dotc.classpath import dotty.tools.dotc.classpath.FileUtils.isTasty import dotty.tools.io.AbstractFile import dotty.tools.io.ClassRepresentation +import dotty.tools.io.FileExtension case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) @@ -52,7 +53,7 @@ sealed trait BinaryFileEntry extends ClassRepresentation { object BinaryFileEntry { def apply(file: AbstractFile): BinaryFileEntry = if file.isTasty then - if file.resolveSiblingWithExtension("class") != null then TastyWithClassFileEntry(file) + if file.resolveSiblingWithExtension(FileExtension.Class) != null then TastyWithClassFileEntry(file) else StandaloneTastyFileEntry(file) else ClassFileEntry(file) diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 212f7123198a..252f046ab548 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -285,7 +285,7 @@ case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[BinaryFil protected def createFileEntry(file: AbstractFile): BinaryFileEntry = BinaryFileEntry(file) protected def isMatchingFile(f: JFile): Boolean = - f.isTasty || (f.isClass && f.classToTasty.isEmpty) + f.isTasty || (f.isClass && !f.hasSiblingTasty) private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) } diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index b8cb9a2155dc..030b0b61044a 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -17,21 +17,20 @@ object FileUtils { extension (file: AbstractFile) { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - def isClass: Boolean = !file.isDirectory && hasClassExtension && !file.name.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 + def isClass: Boolean = !file.isDirectory && hasClassExtension - def hasClassExtension: Boolean = file.hasExtension("class") + def hasClassExtension: Boolean = file.ext.isClass - def hasTastyExtension: Boolean = file.hasExtension("tasty") + def hasTastyExtension: Boolean = file.ext.isTasty def isTasty: Boolean = !file.isDirectory && hasTastyExtension def isScalaBinary: Boolean = file.isClass || file.isTasty - def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) + def isScalaOrJavaSource: Boolean = !file.isDirectory && file.ext.isScalaOrJava // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? - def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") + def isJarOrZip: Boolean = file.ext.isJarOrZip /** * Safe method returning a sequence containing one URL representing this file, when underlying file exists, @@ -39,27 +38,31 @@ object FileUtils { */ def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) - /** Returns the tasty file associated with this class file */ - def classToTasty: Option[AbstractFile] = - assert(file.isClass, s"non-class: $file") - val tastyName = classNameToTasty(file.name) - Option(file.resolveSibling(tastyName)) + /** + * Returns if there is an existing sibling `.tasty` file. + */ + def hasSiblingTasty: Boolean = + assert(file.hasClassExtension, s"non-class: $file") + file.resolveSibling(classNameToTasty(file.name)) != null } extension (file: JFile) { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) - def isClass: Boolean = file.isFile && file.getName.endsWith(SUFFIX_CLASS) && !file.getName.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 + def isClass: Boolean = file.isFile && hasClassExtension + + def hasClassExtension: Boolean = file.getName.endsWith(SUFFIX_CLASS) def isTasty: Boolean = file.isFile && file.getName.endsWith(SUFFIX_TASTY) - /** Returns the tasty file associated with this class file */ - def classToTasty: Option[JFile] = - assert(file.isClass, s"non-class: $file") - val tastyName = classNameToTasty(file.getName.stripSuffix(".class")) - val tastyPath = file.toPath.resolveSibling(tastyName) - if java.nio.file.Files.exists(tastyPath) then Some(tastyPath.toFile) else None + /** + * Returns if there is an existing sibling `.tasty` file. + */ + def hasSiblingTasty: Boolean = + assert(file.hasClassExtension, s"non-class: $file") + val path = file.toPath + val tastyPath = path.resolveSibling(classNameToTasty(file.getName)) + java.nio.file.Files.exists(tastyPath) } diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 9a2f49a786f4..0616d6c14ba6 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -51,5 +51,5 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi protected def createFileEntry(file: AbstractFile): BinaryFileEntry = BinaryFileEntry(file) protected def isMatchingFile(f: AbstractFile): Boolean = - f.isTasty || (f.isClass && f.classToTasty.isEmpty) + f.isTasty || (f.isClass && !f.hasSiblingTasty) } diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index 3a725ad6e052..d5473e6b26c3 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -53,7 +53,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { override protected def createFileEntry(file: FileZipArchive#Entry): BinaryFileEntry = BinaryFileEntry(file) override protected def isRequiredFileType(file: AbstractFile): Boolean = - file.isTasty || (file.isClass && file.classToTasty.isEmpty) + file.isTasty || (file.isClass && !file.hasSiblingTasty) } /** diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index a65072427ba7..816d85e6c6fd 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -64,7 +64,7 @@ object Settings: @unshared val settingCharacters = "[a-zA-Z0-9_\\-]*".r - def validateSettingString(name: String): Unit = + def validateSettingString(name: String): Unit = assert(settingCharacters.matches(name), s"Setting string $name contains invalid characters") @@ -83,7 +83,7 @@ object Settings: deprecationMsg: Option[String] = None, // kept only for -Ykind-projector option compatibility legacyArgs: Boolean = false)(private[Settings] val idx: Int) { - + validateSettingString(prefix.getOrElse(name)) aliases.foreach(validateSettingString) assert(name.startsWith(s"-${category.prefixLetter}"), s"Setting $name does not start with category -$category") @@ -92,7 +92,7 @@ object Settings: // Example: -opt Main.scala would be interpreted as -opt:Main.scala, and the source file would be ignored. assert(!(summon[ClassTag[T]] == ListTag && ignoreInvalidArgs), s"Ignoring invalid args is not supported for multivalue settings: $name") - val allFullNames: List[String] = s"$name" :: s"-$name" :: aliases + val allFullNames: List[String] = s"$name" :: s"-$name" :: aliases def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] @@ -105,7 +105,7 @@ object Settings: def isMultivalue: Boolean = summon[ClassTag[T]] == ListTag def acceptsNoArg: Boolean = summon[ClassTag[T]] == BooleanTag || summon[ClassTag[T]] == OptionTag || choices.exists(_.contains("")) - + def legalChoices: String = choices match { case Some(xs) if xs.isEmpty => "" @@ -168,17 +168,17 @@ object Settings: update(x, args) catch case _: NumberFormatException => fail(s"$argValue is not an integer argument for $name", args) - - def setOutput(argValue: String, args: List[String]) = + + def setOutput(argValue: String, args: List[String]) = val path = Directory(argValue) - val isJar = path.extension == "jar" + val isJar = path.ext.isJar if (!isJar && !path.isDirectory) fail(s"'$argValue' does not exist or is not a directory or .jar file", args) else { val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) update(output, args) } - + def setVersion(argValue: String, args: List[String]) = ScalaVersion.parse(argValue) match { case Success(v) => update(v, args) @@ -193,7 +193,7 @@ object Settings: case _ => update(strings, args) - def doSet(argRest: String) = + def doSet(argRest: String) = ((summon[ClassTag[T]], args): @unchecked) match { case (BooleanTag, _) => setBoolean(argRest, args) @@ -224,16 +224,16 @@ object Settings: case _ => missingArg - def matches(argName: String): Boolean = + def matches(argName: String): Boolean = (allFullNames).exists(_ == argName.takeWhile(_ != ':')) || prefix.exists(arg.startsWith) - def argValRest: String = + def argValRest: String = if(prefix.isEmpty) arg.dropWhile(_ != ':').drop(1) else arg.drop(prefix.get.length) - - if matches(arg) then + + if matches(arg) then if deprecationMsg.isDefined then warn(s"Option $name is deprecated: ${deprecationMsg.get}", args) - else + else doSet(argValRest) else state @@ -375,7 +375,7 @@ object Settings: def OptionSetting[T: ClassTag](category: SettingCategory, name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = publish(Setting(category, prependName(name), descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases)) - + def DeprecatedSetting(category: SettingCategory, name: String, descr: String, deprecationMsg: String): Setting[Boolean] = publish(Setting(category, prependName(name), descr, false, deprecationMsg = Some(deprecationMsg))) } diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 75c610b29140..8b5a7ddfa65c 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -7,7 +7,7 @@ import java.nio.channels.ClosedByInterruptException import scala.util.control.NonFatal -import dotty.tools.dotc.classpath.FileUtils.isTasty +import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions @@ -198,7 +198,7 @@ object SymbolLoaders { enterToplevelsFromSource(owner, nameOf(classRep), src) case (Some(bin), _) => val completer = - if bin.isTasty then ctx.platform.newTastyLoader(bin) + if bin.hasTastyExtension then ctx.platform.newTastyLoader(bin) else ctx.platform.newClassLoader(bin) enterClassAndModule(owner, nameOf(classRep), completer) } diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 894d430fe54b..22a43dd524e1 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -23,7 +23,7 @@ import scala.annotation.switch import typer.Checking.checkNonCyclic import io.{AbstractFile, ZipArchive} import scala.util.control.NonFatal -import dotty.tools.dotc.classpath.FileUtils.classToTasty +import dotty.tools.dotc.classpath.FileUtils.hasSiblingTasty import scala.compiletime.uninitialized @@ -1143,7 +1143,7 @@ class ClassfileParser( if (scan(tpnme.TASTYATTR)) { val hint = - if classfile.classToTasty.isDefined then "This is likely a bug in the compiler. Please report." + if classfile.hasSiblingTasty then "This is likely a bug in the compiler. Please report." else "This `.tasty` file is missing. Try cleaning the project to fix this issue." report.error(s"Loading Scala 3 binary from $classfile. It should have been loaded from `.tasty` file. $hint", NoSourcePosition) return None diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index af2097f347ba..6850d87d1f4d 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -18,6 +18,7 @@ import scala.collection.immutable.BitSet import scala.compiletime.uninitialized import dotty.tools.tasty.TastyBuffer.Addr import dotty.tools.dotc.core.Names.TermName +import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension object TastyPrinter: @@ -56,7 +57,7 @@ object TastyPrinter: else if arg.endsWith(".jar") then val jar = JarArchive.open(Path(arg), create = false) try - for file <- jar.iterator() if file.name.endsWith(".tasty") do + for file <- jar.iterator() if file.hasTastyExtension do printTasty(s"$arg ${file.path}", file.toByteArray) finally jar.close() else diff --git a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala index 979fae239e59..2e6b699b4e36 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala @@ -40,7 +40,7 @@ object Debug { val tastyFiles = Directory(fromSourcesOut).walk - .filter(x => x.isFile && "tasty".equalsIgnoreCase(x.extension)) + .filter(x => x.isFile && x.ext.isTasty) .map(_.toString) .toList diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index 98ab8e2b6226..8ad9afb7d512 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -4,7 +4,7 @@ package fromtasty import scala.language.unsafeNulls -import io.{JarArchive, AbstractFile, Path} +import io.{JarArchive, AbstractFile, Path, FileExtension} import core.Contexts.* import core.Decorators.em import java.io.File @@ -19,14 +19,14 @@ class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { val fromTastyIgnoreList = ctx.settings.YfromTastyIgnoreList.value.toSet // Resolve class names of tasty and jar files val classNames = files.flatMap { file => - file.extension match - case "jar" => + file.ext match + case FileExtension.Jar => JarArchive.open(Path(file.path), create = false).allFileNames() .map(_.stripPrefix("/")) // change paths from absolute to relative - .filter(e => Path.extension(e) == "tasty" && !fromTastyIgnoreList(e.replace("/", File.separator))) + .filter(e => Path.fileExtension(e).isTasty && !fromTastyIgnoreList(e.replace("/", File.separator))) .map(e => e.stripSuffix(".tasty").replace("/", ".")) .toList - case "tasty" => TastyFileUtil.getClassName(file) + case FileExtension.Tasty => TastyFileUtil.getClassName(file) case _ => report.error(em"File extension is not `tasty` or `jar`: ${file.path}") Nil diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala index bc04cc648a65..d3a9550c4491 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala @@ -6,6 +6,7 @@ import scala.language.unsafeNulls import dotty.tools.dotc.core.tasty.TastyClassName import dotty.tools.dotc.core.StdNames.nme.EMPTY_PACKAGE import dotty.tools.io.AbstractFile +import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension object TastyFileUtil { /** Get the class path of a tasty file @@ -34,7 +35,7 @@ object TastyFileUtil { */ def getClassName(file: AbstractFile): Option[String] = { assert(file.exists) - assert(file.extension == "tasty") + assert(file.hasTastyExtension) val bytes = file.toByteArray val names = new TastyClassName(bytes).readName() names.map { case (packageName, className) => diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index dafb44d525e4..506ebc81b23d 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -18,7 +18,7 @@ import Names.* import NameOps.* import inlines.Inlines import transform.ValueClasses -import dotty.tools.io.File +import dotty.tools.io.{File, FileExtension} import java.io.PrintWriter @@ -76,7 +76,7 @@ class ExtractAPI extends Phase { if (ctx.settings.YdumpSbtInc.value) { // Append to existing file that should have been created by ExtractDependencies - val pw = new PrintWriter(File(sourceFile.file.jpath).changeExtension("inc").toFile + val pw = new PrintWriter(File(sourceFile.file.jpath).changeExtension(FileExtension.Inc).toFile .bufferedWriter(append = true), true) try { classes.foreach(source => pw.println(DefaultShowAPI(source))) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index a35628dc52e4..352636f681c3 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -8,7 +8,7 @@ import java.nio.file.Path import java.util.{Arrays, EnumSet} import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.classpath.FileUtils.{isTasty, hasClassExtension, hasTastyExtension} +import dotty.tools.dotc.classpath.FileUtils.{hasClassExtension, hasTastyExtension} import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Flags.* @@ -21,7 +21,7 @@ import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.util.{SrcPos, NoSourcePosition} import dotty.tools.io -import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive, NoAbstractFile} +import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive, NoAbstractFile, FileExtension} import xsbti.UseScope import xsbti.api.DependencyContext import xsbti.api.DependencyContext.* @@ -84,7 +84,7 @@ class ExtractDependencies extends Phase { Arrays.sort(deps) Arrays.sort(names) - val pw = io.File(unit.source.file.jpath).changeExtension("inc").toFile.printWriter() + val pw = io.File(unit.source.file.jpath).changeExtension(FileExtension.Inc).toFile.printWriter() // val pw = Console.out try { pw.println("Used Names:") @@ -495,7 +495,7 @@ class DependencyRecorder { if depFile != null then { // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) def allowLocal = depCtx == DependencyByInheritance || depCtx == LocalDependencyByInheritance - val isTasty = depFile.hasTastyExtension + val isTastyOrSig = depFile.hasTastyExtension def processExternalDependency() = { val binaryClassName = depClass.binaryClassName @@ -506,13 +506,13 @@ class DependencyRecorder { binaryDependency(zip.jpath, binaryClassName) case _ => case pf: PlainFile => // The dependency comes from a class file, Zinc handles JRT filesystem - binaryDependency(if isTasty then cachedSiblingClass(pf) else pf.jpath, binaryClassName) + binaryDependency(if isTastyOrSig then cachedSiblingClass(pf) else pf.jpath, binaryClassName) case _ => internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", fromClass.srcPos) } } - if isTasty || depFile.hasClassExtension then + if isTastyOrSig || depFile.hasClassExtension then processExternalDependency() else if allowLocal || depFile != sourceFile.file then // We cannot ignore dependencies coming from the same source file because diff --git a/compiler/src/dotty/tools/dotc/util/EnumFlags.scala b/compiler/src/dotty/tools/dotc/util/EnumFlags.scala new file mode 100644 index 000000000000..a833af7632de --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/EnumFlags.scala @@ -0,0 +1,14 @@ +package dotty.tools.dotc.util + +object EnumFlags: + + opaque type FlagSet[E <: reflect.Enum] = Int + + object FlagSet: + + extension [E <: reflect.Enum](set: FlagSet[E]) + def is(flag: E): Boolean = (set & (1 << flag.ordinal)) != 0 + def |(flag: E): FlagSet[E] = (set | (1 << flag.ordinal)) + + def empty[E <: reflect.Enum]: FlagSet[E] = + 0 diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index 243dc2953d2e..233b1ca8fb62 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -97,11 +97,16 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Returns the path of this abstract file in a canonical form. */ def canonicalPath: String = if (jpath == null) path else jpath.normalize.toString - /** Checks extension case insensitively. TODO: change to enum */ - def hasExtension(other: String): Boolean = extension == other.toLowerCase + /** Checks extension case insensitively. */ + @deprecated("prefer queries on ext") + def hasExtension(other: String): Boolean = ext.toLowerCase.equalsIgnoreCase(other) - /** Returns the extension of this abstract file. TODO: store as an enum to avoid costly comparisons */ - val extension: String = Path.extension(name) + /** Returns the extension of this abstract file. */ + val ext: FileExtension = Path.fileExtension(name) + + /** Returns the extension of this abstract file as a String. */ + @deprecated("use ext instead.") + def extension: String = ext.toLowerCase /** The absolute file, if this is a relative file. */ def absolute: AbstractFile @@ -129,7 +134,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { } /** Does this abstract file represent something which can contain classfiles? */ - def isClassContainer: Boolean = isDirectory || (jpath != null && (extension == "jar" || extension == "zip")) + def isClassContainer: Boolean = isDirectory || (jpath != null && ext.isJarOrZip) /** Create a file on disk, if one does not exist already. */ def create(): Unit @@ -258,8 +263,8 @@ abstract class AbstractFile extends Iterable[AbstractFile] { final def resolveSibling(name: String): AbstractFile | Null = container.lookupName(name, directory = false) - final def resolveSiblingWithExtension(extension: String): AbstractFile | Null = - resolveSibling(name.stripSuffix(this.extension) + extension) + final def resolveSiblingWithExtension(extension: FileExtension): AbstractFile | Null = + resolveSibling(Path.fileName(name) + "." + extension) private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = lookupName(name, isDir) match { diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala index 22a0e04b2b48..59e4a2ee451b 100644 --- a/compiler/src/dotty/tools/io/File.scala +++ b/compiler/src/dotty/tools/io/File.scala @@ -39,8 +39,6 @@ object File { */ class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) with Streamable.Chars { override val creationCodec: io.Codec = constructorCodec - - override def addExtension(ext: String): File = super.addExtension(ext).toFile override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile override def toDirectory: Directory = new Directory(jpath) override def toFile: File = this diff --git a/compiler/src/dotty/tools/io/FileExtension.scala b/compiler/src/dotty/tools/io/FileExtension.scala new file mode 100644 index 000000000000..9d239477aed3 --- /dev/null +++ b/compiler/src/dotty/tools/io/FileExtension.scala @@ -0,0 +1,79 @@ +package dotty.tools.io + +import dotty.tools.uncheckedNN +import dotty.tools.dotc.util.EnumFlags.FlagSet + +enum FileExtension(val toLowerCase: String): + case Tasty extends FileExtension("tasty") + case Class extends FileExtension("class") + case Jar extends FileExtension("jar") + case Scala extends FileExtension("scala") + case ScalaScript extends FileExtension("sc") + case Java extends FileExtension("java") + case Zip extends FileExtension("zip") + case Inc extends FileExtension("inc") + case Empty extends FileExtension("") + + /** Fallback extension */ + case External(override val toLowerCase: String) extends FileExtension(toLowerCase) + + /** represents an empty file extension. */ + def isEmpty: Boolean = this == Empty + + override def toString: String = toLowerCase + + /** represents `".tasty"` */ + def isTasty = this == Tasty + /** represents `".class"` */ + def isClass = this == Class + /** represents `".scala"` */ + def isScala = this == Scala + /** represents `".sc"` */ + def isScalaScript = this == ScalaScript + /** represents `".java"` */ + def isJava = this == Java + /** represents `".jar"` */ + def isJar: Boolean = this == Jar + /** represents `".zip"` */ + def isZip: Boolean = this == Zip + /** represents `".jar"` or `".zip"` */ + def isJarOrZip: Boolean = FileExtension.JarOrZip.is(this) + /** represents `".scala"` or `".java"` */ + def isScalaOrJava: Boolean = FileExtension.ScalaOrJava.is(this) + /** represents `".java"` or `.tasty` */ + def isJavaOrTasty: Boolean = FileExtension.JavaOrTasty.is(this) + +object FileExtension: + + private val JarOrZip: FlagSet[FileExtension] = FlagSet.empty | Zip | Jar + private val ScalaOrJava: FlagSet[FileExtension] = FlagSet.empty | Scala | Java + private val JavaOrTasty: FlagSet[FileExtension] = FlagSet.empty | Java | Tasty + + // this will be optimised to a single hashcode + equality check, and then fallback to slowLookup, + // keep in sync with slowLookup. + private def initialLookup(s: String): FileExtension = s match + case "tasty" => Tasty + case "class" => Class + case "jar" => Jar + case "scala" => Scala + case "sc" => ScalaScript + case "java" => Java + case "zip" => Zip + case "inc" => Inc + case _ => slowLookup(s) + + // slower than initialLookup, keep in sync with initialLookup + private def slowLookup(s: String): FileExtension = + if s.equalsIgnoreCase("tasty") then Tasty + else if s.equalsIgnoreCase("class") then Class + else if s.equalsIgnoreCase("jar") then Jar + else if s.equalsIgnoreCase("scala") then Scala + else if s.equalsIgnoreCase("sc") then ScalaScript + else if s.equalsIgnoreCase("java") then Java + else if s.equalsIgnoreCase("zip") then Zip + else if s.equalsIgnoreCase("inc") then Inc + else External(s) + + def from(s: String): FileExtension = + if s.isEmpty then Empty + else initialLookup(s) diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala index 3e65d2f7635d..dd33b1229610 100644 --- a/compiler/src/dotty/tools/io/Jar.scala +++ b/compiler/src/dotty/tools/io/Jar.scala @@ -165,7 +165,7 @@ object Jar { def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true) def isJarOrZip(f: Path, examineFile: Boolean): Boolean = - f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f)) + f.ext.isJarOrZip || (examineFile && magicNumberIsZip(f)) def create(file: File, sourceDir: Directory, mainClass: String): Unit = { val writer = new Jar(file).jarWriter(Name.MAIN_CLASS -> mainClass) diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index f42f68e745ed..e95dbe97bb19 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -20,14 +20,14 @@ class JarArchive private (root: Directory) extends PlainDirectory(root) { object JarArchive { /** Create a new jar file. Overwrite if file already exists */ def create(path: Path): JarArchive = { - require(path.extension == "jar") + require(path.ext.isJar) path.delete() open(path, create = true) } /** Create a jar file. */ def open(path: Path, create: Boolean = false): JarArchive = { - require(path.extension == "jar") + require(path.ext.isJar) // creating a new zip file system by using the JAR URL syntax: // https://docs.oracle.com/javase/7/docs/technotes/guides/io/fsp/zipfilesystemprovider.html diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala index c8420c5e381d..6f97e03ca4d7 100644 --- a/compiler/src/dotty/tools/io/Path.scala +++ b/compiler/src/dotty/tools/io/Path.scala @@ -33,17 +33,20 @@ import scala.util.Random.alphanumeric */ object Path { def isExtensionJarOrZip(jpath: JPath): Boolean = isExtensionJarOrZip(jpath.getFileName.toString) - def isExtensionJarOrZip(name: String): Boolean = { - val ext = extension(name) - ext == "jar" || ext == "zip" + def isExtensionJarOrZip(name: String): Boolean = fileExtension(name).isJarOrZip + def fileExtension(name: String): FileExtension = { + val i = name.lastIndexOf('.') + if (i < 0) FileExtension.Empty + else FileExtension.from(name.substring(i + 1)) } - def extension(name: String): String = { - var i = name.length - 1 - while (i >= 0 && name.charAt(i) != '.') - i -= 1 + @deprecated("use fileExtension instead.") + def extension(name: String): String = fileExtension(name).toLowerCase - if (i < 0) "" - else name.substring(i + 1).toLowerCase + /** strip anything after and including trailing the extension */ + def fileName(name: String): String = { + val i = name.lastIndexOf('.') + if (i < 0) name + else name.substring(0, i).nn } def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs.filter(_.isDirectory).map(_.toDirectory) @@ -160,22 +163,36 @@ class Path private[io] (val jpath: JPath) { val p = parent if (p isSame this) Nil else p :: p.parents } + + def ext: FileExtension = Path.fileExtension(name) + // if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise "" - def extension: String = Path.extension(name) + @deprecated("use ext instead.") + def extension: String = ext.toLowerCase + // compares against extensions in a CASE INSENSITIVE way. + @deprecated("consider using queries on ext instead.") def hasExtension(ext: String, exts: String*): Boolean = { - val lower = extension.toLowerCase - ext.toLowerCase == lower || exts.exists(_.toLowerCase == lower) + val lower = ext.toLowerCase + lower.equalsIgnoreCase(ext) || exts.exists(lower.equalsIgnoreCase) } // returns the filename without the extension. - def stripExtension: String = name stripSuffix ("." + extension) + def stripExtension: String = Path.fileName(name) // returns the Path with the extension. def addExtension(ext: String): Path = new Path(jpath.resolveSibling(name + ext)) + + // changes the existing extension out for a new one, or adds it + // if the current path has none. + def changeExtension(ext: FileExtension): Path = + changeExtension(ext.toLowerCase) + // changes the existing extension out for a new one, or adds it // if the current path has none. def changeExtension(ext: String): Path = - if (extension == "") addExtension(ext) - else new Path(jpath.resolveSibling(stripExtension + "." + ext)) + val name0 = name + val dropExtension = Path.fileName(name0) + if dropExtension eq name0 then addExtension(ext) + else new Path(jpath.resolveSibling(dropExtension + "." + ext)) // conditionally execute def ifFile[T](f: File => T): Option[T] = if (isFile) Some(f(toFile)) else None diff --git a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala index 4daaf86f2fb0..db58ff36ac42 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala @@ -98,7 +98,7 @@ class CommentPicklingTest { Main.process(options.all, reporter) assertFalse("Compilation failed.", reporter.hasErrors) - val tastyFiles = Path.onlyFiles(out.walkFilter(_.extension == "tasty")).toList + val tastyFiles = Path.onlyFiles(out.walkFilter(_.ext.isTasty)).toList val unpicklingOptions = unpickleOptions .withClasspath(out.toAbsolute.toString) .and("dummy") // Need to pass a dummy source file name diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index 2c970e93f573..73118216d6fa 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -51,7 +51,7 @@ class PrintingTest { def testIn(testsDir: String, phase: String) = val res = Directory(testsDir).list.toList - .filter(f => f.extension == "scala") + .filter(f => f.ext.isScala) .map { f => compileFile(f.jpath, phase) } val failed = res.filter(!_) diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index f538d9534cd9..4ed59db5c10e 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -52,7 +52,7 @@ class PatmatExhaustivityTest { /** A single test with multiple files grouped in a folder */ private def compileDir(path: JPath): Boolean = { val files = Directory(path).list.toList - .filter(f => f.extension == "scala" || f.extension == "java" ) + .filter(_.ext.isScalaOrJava) .map(_.jpath) val actualLines = compile(files) @@ -65,7 +65,7 @@ class PatmatExhaustivityTest { def patmatExhaustivity: Unit = { val blacklisted = TestSources.patmatExhaustivityScala2LibraryTastyBlacklisted.toSet val res = Directory(testsDir).list.toList - .filter(f => f.extension == "scala" || f.isDirectory) + .filter(f => f.ext.isScala || f.isDirectory) .filter { f => val path = if f.isDirectory then f.path + "/" else f.path Properties.testsFilter.isEmpty || Properties.testsFilter.exists(path.contains) @@ -74,7 +74,7 @@ class PatmatExhaustivityTest { .map(f => if f.isDirectory then compileDir(f.jpath) else compileFile(f.jpath)) val failed = res.filter(!_) - val ignored = Directory(testsDir).list.toList.filter(_.extension == "ignore") + val ignored = Directory(testsDir).list.toList.filter(_.ext.toLowerCase.equalsIgnoreCase("ignore")) val msg = s"Total: ${res.length + ignored.length}, Failed: ${failed.length}, Ignored: ${ignored.length}" diff --git a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala index 3604e38375e7..e878866be81e 100644 --- a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala +++ b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala @@ -753,7 +753,7 @@ object DottyLanguageServer { /** Does this sourcefile represent a worksheet? */ private def isWorksheet(sourcefile: SourceFile): Boolean = - sourcefile.file.extension == "sc" + sourcefile.file.ext.isScalaScript /** Wrap the source of a worksheet inside an `object`. */ private def wrapWorksheet(source: String): String = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index abb15d45f88a..81a543701817 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -115,7 +115,7 @@ class Completions( val allAdvanced = advanced ++ keywords path match // should not show completions for toplevel - case Nil | (_: PackageDef) :: _ if completionPos.originalCursorPosition.source.file.extension != "sc" => + case Nil | (_: PackageDef) :: _ if !completionPos.originalCursorPosition.source.file.ext.isScalaScript => (allAdvanced, SymbolSearch.Result.COMPLETE) case Select(qual, _) :: _ if qual.typeOpt.isErroneous => (allAdvanced, SymbolSearch.Result.COMPLETE) diff --git a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala index 14e5f019b433..03b3aadedc4d 100644 --- a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala +++ b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala @@ -52,7 +52,7 @@ object TastyInspector: def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = def checkFile(fileName: String, ext: String): Unit = val file = dotty.tools.io.Path(fileName) - if file.extension != ext then + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") else if !file.exists then throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") diff --git a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala index e70d2d4f6dc5..ea3f0a95dded 100644 --- a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +++ b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala @@ -49,7 +49,7 @@ object TastyInspector: def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = def checkFile(fileName: String, ext: String): Unit = val file = dotty.tools.io.Path(fileName) - if file.extension != ext then + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") else if !file.exists then throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") diff --git a/tests/run-tasty-inspector/scala2-library-test.scala b/tests/run-tasty-inspector/scala2-library-test.scala index 15a251427d70..37dc55e20d1f 100644 --- a/tests/run-tasty-inspector/scala2-library-test.scala +++ b/tests/run-tasty-inspector/scala2-library-test.scala @@ -32,7 +32,7 @@ def scalaLibClassesPath = lazy val scalaLibTastyPaths = new Directory(scalaLibClassesPath).deepFiles - .filter(_.`extension` == "tasty") + .filter(_.ext.isTasty) .map(_.normalize.path.stripPrefix(scalaLibClassesPath.toString + separator)) .toList diff --git a/tests/run-with-compiler/scala2-library-from-tasty-jar.scala b/tests/run-with-compiler/scala2-library-from-tasty-jar.scala index 913cf7dc24fc..f6d7b7b87d5b 100644 --- a/tests/run-with-compiler/scala2-library-from-tasty-jar.scala +++ b/tests/run-with-compiler/scala2-library-from-tasty-jar.scala @@ -32,7 +32,7 @@ def scalaLibClassesPath = lazy val scalaLibTastyPaths = new Directory(scalaLibClassesPath).deepFiles - .filter(_.`extension` == "tasty") + .filter(_.ext.isTasty) .map(_.normalize.path.stripPrefix(scalaLibClassesPath.toString + separator)) .toList diff --git a/tests/run-with-compiler/scala2-library-from-tasty.scala b/tests/run-with-compiler/scala2-library-from-tasty.scala index ee2ec8951701..c3a52ea95ae1 100644 --- a/tests/run-with-compiler/scala2-library-from-tasty.scala +++ b/tests/run-with-compiler/scala2-library-from-tasty.scala @@ -29,7 +29,7 @@ def scalaLibClassesPath = lazy val scalaLibTastyPaths = new Directory(scalaLibClassesPath).deepFiles - .filter(_.`extension` == "tasty") + .filter(_.ext.isTasty) .map(_.normalize.path.stripPrefix(scalaLibClassesPath.toString + separator)) .toList From a975398310ddcb90ff15fca543330c4fb18b3587 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 17 Aug 2023 15:01:07 +0200 Subject: [PATCH 006/465] Notify Zinc about non-local classes early For pipelining Zinc needs to know about non-local classes early. e.g. it enables Zinc to disable pipelining if a non-local class contains macros. The changes in this commit are based of changes made originally in Zinc: https://github.com/sbt/zinc/commit/856d4162127927cb9a6c37a1649cc42d1871a815 --- .../src/dotty/tools/backend/jvm/CodeGen.scala | 9 ++- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 62 ++++++++++++++++++- .../sbt/interfaces/IncrementalCallback.java | 7 +++ .../dotty/tools/xsbt/IncrementalCallback.java | 10 +++ .../tools/xsbt/OldIncrementalCallback.java | 10 +++ .../compactify/src/main/scala/Nested.scala | 54 +++++++++------- 6 files changed, 121 insertions(+), 31 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index b48df60d4c1a..2286ad6c2c25 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -125,17 +125,16 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( // Creates a callback that will be evaluated in PostProcessor after creating a file private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: util.SourceFile)(using Context): AbstractFile => Unit = { - val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { - (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) + val isLocal = atPhase(sbtExtractDependenciesPhase) { + claszSymbol.isLocal } clsFile => { val className = cls.name.replace('/', '.') if (ctx.compilerCallback != null) ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - ctx.withIncCallback: cb => - if (isLocal) cb.generatedLocalClass(sourceFile, clsFile.jpath) - else cb.generatedNonLocalClass(sourceFile, clsFile.jpath, className, fullClassName) + if isLocal then + ctx.withIncCallback(_.generatedLocalClass(sourceFile, clsFile.jpath)) } } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 506ebc81b23d..079687ac3122 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -15,12 +15,15 @@ import Trees.* import Types.* import Symbols.* import Names.* +import StdNames.str import NameOps.* import inlines.Inlines import transform.ValueClasses -import dotty.tools.io.{File, FileExtension} +import dotty.tools.io.{File, FileExtension, JarArchive} +import util.{Property, SourceFile} import java.io.PrintWriter +import ExtractAPI.NonLocalClassSymbolsInCurrentUnits import scala.collection.mutable import scala.util.hashing.MurmurHash3 @@ -64,13 +67,62 @@ class ExtractAPI extends Phase { // definitions, and `PostTyper` does not change definitions). override def runsAfter: Set[String] = Set(transform.PostTyper.name) + override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = + val nonLocalClassSymbols = new mutable.HashSet[Symbol] + val ctx0 = ctx.withProperty(NonLocalClassSymbolsInCurrentUnits, Some(nonLocalClassSymbols)) + val units0 = super.runOn(units)(using ctx0) + ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) + units0 + end runOn + + private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit = + for cls <- nonLocalClassSymbols do + val sourceFile = cls.source + if sourceFile.exists && cls.isDefinedInCurrentRun then + recordNonLocalClass(cls, sourceFile, cb) + cb.apiPhaseCompleted() + cb.dependencyPhaseCompleted() + + private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit = + def registerProductNames(fullClassName: String, binaryClassName: String) = + val pathToClassFile = s"${binaryClassName.replace('.', java.io.File.separatorChar)}.class" + + val classFile = { + ctx.settings.outputDir.value match { + case jar: JarArchive => + // important detail here, even on Windows, Zinc expects the separator within the jar + // to be the system default, (even if in the actual jar file the entry always uses '/'). + // see https://github.com/sbt/zinc/blob/dcddc1f9cfe542d738582c43f4840e17c053ce81/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala#L47 + new java.io.File(s"$jar!$pathToClassFile") + case outputDir => + new java.io.File(outputDir.file, pathToClassFile) + } + } + + cb.generatedNonLocalClass(sourceFile, classFile.toPath(), binaryClassName, fullClassName) + end registerProductNames + + val fullClassName = atPhase(sbtExtractDependenciesPhase) { + ExtractDependencies.classNameAsString(cls) + } + val binaryClassName = cls.binaryClassName + registerProductNames(fullClassName, binaryClassName) + + // Register the names of top-level module symbols that emit two class files + val isTopLevelUniqueModule = + cls.owner.is(PackageClass) && cls.is(ModuleClass) && cls.companionClass == NoSymbol + if isTopLevelUniqueModule then + registerProductNames(fullClassName, binaryClassName.stripSuffix(str.MODULE_SUFFIX)) + end recordNonLocalClass + override def run(using Context): Unit = { val unit = ctx.compilationUnit val sourceFile = unit.source ctx.withIncCallback: cb => cb.startSource(sourceFile) - val apiTraverser = new ExtractAPICollector + val nonLocalClassSymbols = ctx.property(NonLocalClassSymbolsInCurrentUnits).get + val apiTraverser = ExtractAPICollector(nonLocalClassSymbols) val classes = apiTraverser.apiSource(unit.tpdTree) val mainClasses = apiTraverser.mainClasses @@ -94,6 +146,8 @@ object ExtractAPI: val name: String = "sbt-api" val description: String = "sends a representation of the API of classes to sbt" + private val NonLocalClassSymbolsInCurrentUnits: Property.Key[mutable.HashSet[Symbol]] = Property.Key() + /** Extracts full (including private members) API representation out of Symbols and Types. * * The exact representation used for each type is not important: the only thing @@ -136,7 +190,7 @@ object ExtractAPI: * without going through an intermediate representation, see * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html#Hashing+an+API+representation */ -private class ExtractAPICollector(using Context) extends ThunkHolder { +private class ExtractAPICollector(nonLocalClassSymbols: mutable.HashSet[Symbol])(using Context) extends ThunkHolder { import tpd.* import xsbti.api @@ -254,6 +308,8 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { childrenOfSealedClass, topLevel, tparams) allNonLocalClassesInSrc += cl + if !sym.isLocal then + nonLocalClassSymbols += sym if (sym.isStatic && !sym.is(Trait) && ctx.platform.hasMainMethod(sym)) { // If sym is an object, all main methods count, otherwise only @static ones count. diff --git a/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java b/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java index 4c6afa113f4f..ebdb1b7b24d4 100644 --- a/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java +++ b/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java @@ -7,6 +7,7 @@ /* User code should not implement this interface, it is intended to be a wrapper around xsbti.AnalysisCallback. */ public interface IncrementalCallback { + default void api(SourceFile sourceFile, xsbti.api.ClassLike classApi) { } @@ -36,4 +37,10 @@ default void generatedLocalClass(SourceFile source, Path classFile) { default void generatedNonLocalClass(SourceFile source, Path classFile, String binaryClassName, String srcClassName) { } + + default void apiPhaseCompleted() { + } + + default void dependencyPhaseCompleted() { + } } diff --git a/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java index 3c3d33c1c1fe..6e19c62b10d0 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java +++ b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java @@ -57,4 +57,14 @@ public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) { delegate.generatedNonLocalClass(asVirtualFile.apply(source), classFile, binaryClassName, srcClassName); } + + @Override + public void apiPhaseCompleted() { + delegate.apiPhaseCompleted(); + } + + @Override + public void dependencyPhaseCompleted() { + delegate.dependencyPhaseCompleted(); + } } diff --git a/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java index 597a964eb944..30e25194736d 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java +++ b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java @@ -71,4 +71,14 @@ public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) { delegate.generatedNonLocalClass(asJavaFile(source), classFile.toFile(), binaryClassName, srcClassName); } + + @Override + public void apiPhaseCompleted() { + delegate.apiPhaseCompleted(); + } + + @Override + public void dependencyPhaseCompleted() { + delegate.dependencyPhaseCompleted(); + } } diff --git a/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala b/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala index 4b1597d287d4..b2d53cedee05 100644 --- a/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala +++ b/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala @@ -2,35 +2,35 @@ package test object TopLevelModule1 { - object InnerModule1 - { - object InnerModule2 - { - trait Z { def q = 3 } - def x = 3 - } - } - class InnerClass1 - { - class InnerClass2 - { - val z = new TopLevelModule1.InnerClass2 - } - object InnerModule3 - { - val y = new TopLevel1 with InnerModule1.InnerModule2.Z { val x = 4 } - } - } - class InnerClass2 + object InnerModule1 + { + object InnerModule2 + { + trait Z { def q = 3 } + def x = 3 + } + } + class InnerClass1 + { + class InnerClass2 + { + val z = new TopLevelModule1.InnerClass2 + } + object InnerModule3 + { + val y = new TopLevel1 with InnerModule1.InnerModule2.Z { val x = 4 } + } + } + class InnerClass2 } class TopLevel1 { - object Inner1_1 + object Inner1_1 } object TopLevel1 { - class Inner1_2 - object Inner1_2 + class Inner1_2 + object Inner1_2 } object TopLevel2 @@ -41,3 +41,11 @@ object TopLevel3 class TopLevel4 object TopLevelModuleSuffix$ + +// will generate a package object wrapper +val topLevelVal = 23 + +// explicit package object +package object inner { + val innerVal = 23 +} From 1327f1d153eaa38bd380bb99a1187bf7c9c2b35c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 6 Jun 2023 01:32:48 -0700 Subject: [PATCH 007/465] No warn when case class uses deprecated members --- .../tools/dotc/typer/CrossVersionChecks.scala | 67 +++++++++++++------ compiler/test-resources/repl/reset-command | 2 +- compiler/test-resources/repl/settings-command | 2 +- tests/pos/i11022.scala | 2 + tests/warn/i11022.check | 8 +++ tests/warn/i11022.scala | 10 ++- 6 files changed, 68 insertions(+), 23 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 1e0907ee74a6..790ffb2ad343 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -3,6 +3,7 @@ package dotc package transform import core.* +import Annotations.Annotation import Symbols.*, Types.*, Contexts.*, Flags.*, Decorators.*, reporting.* import util.SrcPos import config.{ScalaVersion, NoScalaVersion, Feature, ScalaRelease} @@ -161,29 +162,42 @@ object CrossVersionChecks: /** If @deprecated is present, and the point of reference is not enclosed * in either a deprecated member or a scala bridge method, issue a warning. + * + * Also check for deprecation of the companion class for synthetic methods in the companion module. */ private[CrossVersionChecks] def checkDeprecatedRef(sym: Symbol, pos: SrcPos)(using Context): Unit = - - // Also check for deprecation of the companion class for synthetic methods - val toCheck = sym :: (if sym.isAllOf(SyntheticMethod) then sym.owner.companionClass :: Nil else Nil) - for sym <- toCheck; annot <- sym.getAnnotation(defn.DeprecatedAnnot) do - if !skipWarning(sym) then - val msg = annot.argumentConstant(0).map(": " + _.stringValue).getOrElse("") - val since = annot.argumentConstant(1).map(" since " + _.stringValue).getOrElse("") - report.deprecationWarning(em"${sym.showLocated} is deprecated${since}${msg}", pos) - - /** Skip warnings for synthetic members of case classes during declaration and - * scan the chain of outer declaring scopes from the current context - * a deprecation warning will be skipped if one the following holds - * for a given declaring scope: - * - the symbol associated with the scope is also deprecated. - * - if and only if `sym` is an enum case, the scope is either - * a module that declares `sym`, or the companion class of the - * module that declares `sym`. + def maybeWarn(annotee: Symbol, annot: Annotation) = if !skipWarning(sym) then + val message = annot.argumentConstantString(0).filter(!_.isEmpty).map(": " + _).getOrElse("") + val since = annot.argumentConstantString(1).filter(!_.isEmpty).map(" since " + _).getOrElse("") + report.deprecationWarning(em"${annotee.showLocated} is deprecated${since}${message}", pos) + sym.getAnnotation(defn.DeprecatedAnnot) match + case Some(annot) => maybeWarn(sym, annot) + case _ => + if sym.isAllOf(SyntheticMethod) then + val companion = sym.owner.companionClass + if companion.is(CaseClass) then companion.getAnnotation(defn.DeprecatedAnnot).foreach(maybeWarn(companion, _)) + + /** Decide whether the deprecation of `sym` should be ignored in this context. + * + * The warning is skipped if any symbol in the context owner chain is deprecated, + * that is, an enclosing scope is associated with a deprecated symbol. + * + * Further exclusions are needed for enums and case classes, + * since they typically need to refer to deprecated members + * even if the enclosing enum or case class is not deprecated. + * + * If and only if `sym` is an enum case, the warning is skipped + * if an enclosing scope is either a module that declares `sym`, + * or the companion class of the module that declares `sym`. + * + * For a deprecated case class or case class element, + * the warning is skipped for synthetic sites where the enclosing + * class (or its companion) is either the deprecated case class + * or the case class of the deprecated element. */ private def skipWarning(sym: Symbol)(using Context): Boolean = - /** is the owner an enum or its companion and also the owner of sym */ + // is the owner an enum or its companion and also the owner of sym def isEnumOwner(owner: Symbol)(using Context) = // pre: sym is an enumcase if owner.isEnumClass then owner.companionClass eq sym.owner @@ -194,6 +208,19 @@ object CrossVersionChecks: // pre: sym is an enumcase owner.isDeprecated || isEnumOwner(owner) - (ctx.owner.is(Synthetic) && sym.is(CaseClass)) - || ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated) + def siteIsEnclosedByDeprecatedElement = + ctx.owner.ownersIterator.exists: + if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated + + def siteIsSyntheticCaseClassMember = + val owner = ctx.owner + def symIsCaseOrMember = + val enclosing = owner.enclosingClass + val companion = enclosing.companionClass + // deprecated sym is either enclosing case class or a sibling member + def checkSym(k: Symbol) = sym == k || sym.owner == k + (enclosing.is(CaseClass) || companion.is(CaseClass)) && (checkSym(enclosing) || checkSym(companion)) + owner.is(Synthetic) && symIsCaseOrMember + + siteIsSyntheticCaseClassMember || siteIsEnclosedByDeprecatedElement end skipWarning diff --git a/compiler/test-resources/repl/reset-command b/compiler/test-resources/repl/reset-command index 0adf0d93a0d8..3977504d4f5a 100644 --- a/compiler/test-resources/repl/reset-command +++ b/compiler/test-resources/repl/reset-command @@ -12,7 +12,7 @@ scala> def f(thread: Thread) = thread.stop() -- Deprecation Warning: -------------------------------------------------------- 1 | def f(thread: Thread) = thread.stop() | ^^^^^^^^^^^ - |method stop in class Thread is deprecated since : see corresponding Javadoc for more information. + |method stop in class Thread is deprecated: see corresponding Javadoc for more information. def f(thread: Thread): Unit scala> def resetNoArgsStillWorks = 1 diff --git a/compiler/test-resources/repl/settings-command b/compiler/test-resources/repl/settings-command index 5e9912384435..dce782b92836 100644 --- a/compiler/test-resources/repl/settings-command +++ b/compiler/test-resources/repl/settings-command @@ -11,7 +11,7 @@ scala> def f(thread: Thread) = thread.stop() -- Deprecation Warning: -------------------------------------------------------- 1 | def f(thread: Thread) = thread.stop() | ^^^^^^^^^^^ - |method stop in class Thread is deprecated since : see corresponding Javadoc for more information. + |method stop in class Thread is deprecated: see corresponding Javadoc for more information. def f(thread: Thread): Unit scala> diff --git a/tests/pos/i11022.scala b/tests/pos/i11022.scala index aa211426387d..6cccea0ac77c 100644 --- a/tests/pos/i11022.scala +++ b/tests/pos/i11022.scala @@ -1,3 +1,5 @@ //> using options -Werror -deprecation @deprecated("no CaseClass") case class CaseClass(rgb: Int) + +case class K(@deprecated("don't use k, ok?","0.1") k: Int) diff --git a/tests/warn/i11022.check b/tests/warn/i11022.check index 4257bb64652c..79b8b990ed59 100644 --- a/tests/warn/i11022.check +++ b/tests/warn/i11022.check @@ -1,3 +1,7 @@ +-- Deprecation Warning: tests/warn/i11022.scala:19:22 ------------------------------------------------------------------ +19 | def usage(k: K) = k.k // warn + | ^^^ + | value k in class K is deprecated since 0.1: don't use k, ok? -- Deprecation Warning: tests/warn/i11022.scala:10:7 ------------------------------------------------------------------- 10 |val a: CaseClass = CaseClass(42) // warn: deprecated type // warn: deprecated apply method | ^^^^^^^^^ @@ -18,3 +22,7 @@ 12 |val c: Unit = CaseClass(42).magic() // warn: deprecated apply method | ^^^^^^^^^ | class CaseClass is deprecated: no CaseClass +-- Deprecation Warning: tests/warn/i11022.scala:14:4 ------------------------------------------------------------------- +14 |val CaseClass(rgb) = b // warn + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass diff --git a/tests/warn/i11022.scala b/tests/warn/i11022.scala index 8e2de9d8f519..60a7299a49b1 100644 --- a/tests/warn/i11022.scala +++ b/tests/warn/i11022.scala @@ -10,4 +10,12 @@ object CaseClass: val a: CaseClass = CaseClass(42) // warn: deprecated type // warn: deprecated apply method val b: CaseClass = new CaseClass(42) // warn: deprecated type // warn: deprecated class val c: Unit = CaseClass(42).magic() // warn: deprecated apply method -val d: Unit = CaseClass.notDeprecated() // compiles \ No newline at end of file +val d: Unit = CaseClass.notDeprecated() // compiles +val CaseClass(rgb) = b // warn + +case class K(@deprecated("don't use k, ok?","0.1") k: Int) + +object K: + def usage(k: K) = k.k // warn + +val s: String = CaseClass.toString From 5ff0ddcb83c7af8029583a99b8fbc480292ff014 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Wed, 20 Mar 2024 14:17:10 +0100 Subject: [PATCH 008/465] Add nightly releases for lts-3.3 branch --- .github/workflows/ci.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a51f7337017e..673c0cd95ec7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -602,6 +602,9 @@ jobs: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - ${{ github.workspace }}/../../cache/general:/root/.cache + strategy: + matrix: + branch: [main, lts-3.3] needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8] if: "(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && github.repository == 'lampepfl/dotty'" env: @@ -627,6 +630,8 @@ jobs: - name: Git Checkout uses: actions/checkout@v4 + with: + ref: ${{ matrix.branch }} - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true From 0bf43b2fbb1ad0faa64cd1da97a9682b02724c49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 25 Mar 2024 17:15:15 +0100 Subject: [PATCH 009/465] Fix #19746: Do not follow param term refs in `isConcrete`. Term refs that reference term parameters can be substituted later by more precise ones, which can lead to different instantiations of type captures. They must therefore be considered as non concrete when following `baseType`s to captures in variant positions, like we do for type param refs and other substitutable references. We actually rewrite `isConcrete` in the process to be more based on an "allow list" of things we know to be concrete, rather than an "exclusion list" of things we know to be non-concrete. That should make it more straightforward to evaluate the validity of the algorithm. --- .../dotty/tools/dotc/core/TypeComparer.scala | 57 +++++++++++-------- tests/neg/i19746.check | 7 +++ tests/neg/i19746.scala | 15 +++++ tests/pos/TupleReverse.scala | 5 +- tests/pos/TupleReverseOnto.scala | 7 ++- 5 files changed, 61 insertions(+), 30 deletions(-) create mode 100644 tests/neg/i19746.check create mode 100644 tests/neg/i19746.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 302ad7987889..cb58ebf8c025 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3360,37 +3360,44 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { * * See notably neg/wildcard-match.scala for examples of this. * - * See neg/i13780.scala and neg/i13780-1.scala for ClassCastException - * reproducers if we disable this check. + * See neg/i13780.scala, neg/i13780-1.scala and neg/i19746.scala for + * ClassCastException reproducers if we disable this check. */ - def followEverythingConcrete(tp: Type): Type = - val widenedTp = tp.widenDealias - val tp1 = widenedTp.normalized - - def followTp1: Type = - // If both widenDealias and normalized did something, start again - if (tp1 ne widenedTp) && (widenedTp ne tp) then followEverythingConcrete(tp1) - else tp1 + def isConcrete(tp: Type): Boolean = + val tp1 = tp.normalized tp1 match case tp1: TypeRef => - tp1.info match - case TypeAlias(tl: HKTypeLambda) => tl - case MatchAlias(tl: HKTypeLambda) => tl - case _ => followTp1 - case tp1 @ AppliedType(tycon, args) => - val concreteTycon = followEverythingConcrete(tycon) - if concreteTycon eq tycon then followTp1 - else followEverythingConcrete(concreteTycon.applyIfParameterized(args)) + if tp1.symbol.isClass then true + else + tp1.info match + case info: AliasingBounds => isConcrete(info.alias) + case _ => false + case tp1: AppliedType => + isConcrete(tp1.tycon) && isConcrete(tp1.superType) + case tp1: HKTypeLambda => + true + case tp1: TermRef => + !tp1.symbol.is(Param) && isConcrete(tp1.underlying) + case tp1: TermParamRef => + false + case tp1: SingletonType => + isConcrete(tp1.underlying) + case tp1: ExprType => + isConcrete(tp1.underlying) + case tp1: AnnotatedType => + isConcrete(tp1.parent) + case tp1: RefinedType => + isConcrete(tp1.underlying) + case tp1: RecType => + isConcrete(tp1.underlying) + case tp1: AndOrType => + isConcrete(tp1.tp1) && isConcrete(tp1.tp2) case _ => - followTp1 - end followEverythingConcrete - - def isConcrete(tp: Type): Boolean = - followEverythingConcrete(tp) match - case tp1: AndOrType => isConcrete(tp1.tp1) && isConcrete(tp1.tp2) - case tp1 => tp1.underlyingClassRef(refinementOK = true).exists + val tp2 = tp1.stripped.stripLazyRef + (tp2 ne tp) && isConcrete(tp2) + end isConcrete // Actual matching logic diff --git a/tests/neg/i19746.check b/tests/neg/i19746.check new file mode 100644 index 000000000000..6be8700bb550 --- /dev/null +++ b/tests/neg/i19746.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i19746.scala:9:30 ------------------------------------------------------------- +9 | def asX(w: W[Any]): w.X = self // error: Type Mismatch + | ^^^^ + | Found: (self : Any) + | Required: w.X + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i19746.scala b/tests/neg/i19746.scala new file mode 100644 index 000000000000..b2969d4d3fef --- /dev/null +++ b/tests/neg/i19746.scala @@ -0,0 +1,15 @@ +trait V: + type X = this.type match + case W[x] => x + +trait W[+Y] extends V + +object Test: + extension (self: Any) def as[T]: T = + def asX(w: W[Any]): w.X = self // error: Type Mismatch + asX(new W[T] {}) + + def main(args: Array[String]): Unit = + val b = 0.as[Boolean] // java.lang.ClassCastException if the code is allowed to compile + println(b) +end Test diff --git a/tests/pos/TupleReverse.scala b/tests/pos/TupleReverse.scala index 9b83280afcf1..a5ef2ed69f0c 100644 --- a/tests/pos/TupleReverse.scala +++ b/tests/pos/TupleReverse.scala @@ -12,5 +12,6 @@ def test[T1, T2, T3, T4] = def test2[Tup <: Tuple] = summon[Reverse[Tup] =:= Reverse[Tup]] -def test3[T1, T2, T3, T4](tup1: (T1, T2, T3, T4)) = - summon[Reverse[tup1.type] =:= (T4, T3, T2, T1)] +def test3[T1, T2, T3, T4](tup1: (T1, T2, T3, T4)): Unit = + val tup11: (T1, T2, T3, T4) = tup1 + summon[Reverse[tup11.type] =:= (T4, T3, T2, T1)] diff --git a/tests/pos/TupleReverseOnto.scala b/tests/pos/TupleReverseOnto.scala index 09d5a323cb29..eca8a3e3033c 100644 --- a/tests/pos/TupleReverseOnto.scala +++ b/tests/pos/TupleReverseOnto.scala @@ -13,6 +13,7 @@ def test2[Tup1 <: Tuple, Tup2 <: Tuple] = summon[ReverseOnto[EmptyTuple, Tup1] =:= Tup1] summon[ReverseOnto[Tup1, EmptyTuple] =:= Reverse[Tup1]] -def test3[T1, T2, T3, T4](tup1: (T1, T2), tup2: (T3, T4)) = - summon[ReverseOnto[tup1.type, tup2.type] <:< (T2, T1, T3, T4)] - summon[ReverseOnto[tup1.type, tup2.type] =:= T2 *: T1 *: tup2.type] +def test3[T1, T2, T3, T4](tup1: (T1, T2), tup2: (T3, T4)): Unit = + val tup11: (T1, T2) = tup1 + summon[ReverseOnto[tup11.type, tup2.type] <:< (T2, T1, T3, T4)] + summon[ReverseOnto[tup11.type, tup2.type] =:= T2 *: T1 *: tup2.type] From 7b69d3304972b94fecd01788784def2db00ba269 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 15 Mar 2024 12:29:10 +0100 Subject: [PATCH 010/465] Add regression tests --- tests/neg/i19949.scala | 9 +++++++++ tests/pos/i19950.scala | 10 ++++++++++ 2 files changed, 19 insertions(+) create mode 100644 tests/neg/i19949.scala create mode 100644 tests/pos/i19950.scala diff --git a/tests/neg/i19949.scala b/tests/neg/i19949.scala new file mode 100644 index 000000000000..96a22e42e079 --- /dev/null +++ b/tests/neg/i19949.scala @@ -0,0 +1,9 @@ + +trait T[N]: + type M = N match + case 0 => Any + +val t: T[Double] = new T[Double] {} +val x: t.M = "hello" // error + +val z: T[Double]#M = "hello" // error diff --git a/tests/pos/i19950.scala b/tests/pos/i19950.scala new file mode 100644 index 000000000000..349140f43ff5 --- /dev/null +++ b/tests/pos/i19950.scala @@ -0,0 +1,10 @@ + +trait Apply[F[_]]: + extension [T <: NonEmptyTuple](tuple: T)(using toMap: Tuple.IsMappedBy[F][T]) + def mapN[B](f: Tuple.InverseMap[T, F] => B): F[B] = ??? + +given Apply[Option] = ??? +given Apply[List] = ??? +given Apply[util.Try] = ??? + +@main def Repro = (Option(1), Option(2), Option(3)).mapN(_ + _ + _) \ No newline at end of file From 7460ab3ca001eafaa544dca152b751a328b4be06 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 15 Mar 2024 12:46:16 +0100 Subject: [PATCH 011/465] Revert "ErrorType instead of throwing in match type "no cases"" This reverts commit 9ae1598e Note that the changes in Typer: ``` val unsimplifiedType = result.tpe simplify(result, pt, locked) result.tpe.stripTypeVar match case e: ErrorType if !unsimplifiedType.isErroneous => errorTree(xtree, e.msg, xtree.srcPos) case _ => result ``` cannot be reverted yet since the MatchReducer now also reduces to an `ErrorType` for MatchTypeLegacyPatterns, introduced after 9ae1598e. --- .../tools/dotc/core/MatchTypeTrace.scala | 12 +++++ .../dotty/tools/dotc/core/TypeComparer.scala | 19 ++----- .../dotty/tools/dotc/core/TypeErrors.scala | 3 ++ .../dotty/tools/dotc/typer/Implicits.scala | 2 +- tests/neg-macros/toexproftuple.scala | 49 +++++++++++++++++-- tests/neg/i12049.check | 20 ++++---- tests/neg/i13757-match-type-anykind.scala | 8 +-- tests/neg/matchtype-seq.check | 8 +-- 8 files changed, 82 insertions(+), 39 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index fb278ab92dc9..145c7cf5e9eb 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -12,6 +12,7 @@ object MatchTypeTrace: private enum TraceEntry: case TryReduce(scrut: Type) + case NoMatches(scrut: Type, cases: List[MatchTypeCaseSpec]) case Stuck(scrut: Type, stuckCase: MatchTypeCaseSpec, otherCases: List[MatchTypeCaseSpec]) case NoInstance(scrut: Type, stuckCase: MatchTypeCaseSpec, fails: List[(Name, TypeBounds)]) case EmptyScrutinee(scrut: Type) @@ -50,6 +51,12 @@ object MatchTypeTrace: case _ => case _ => + /** Record a failure that scrutinee `scrut` does not match any case in `cases`. + * Only the first failure is recorded. + */ + def noMatches(scrut: Type, cases: List[MatchTypeCaseSpec])(using Context) = + matchTypeFail(NoMatches(scrut, cases)) + /** Record a failure that scrutinee `scrut` does not match `stuckCase` but is * not disjoint from it either, which means that the remaining cases `otherCases` * cannot be visited. Only the first failure is recorded. @@ -95,6 +102,11 @@ object MatchTypeTrace: private def explainEntry(entry: TraceEntry)(using Context): String = entry match case TryReduce(scrut: Type) => i" trying to reduce $scrut" + case NoMatches(scrut, cases) => + i""" failed since selector $scrut + | matches none of the cases + | + | ${casesText(cases)}""" case EmptyScrutinee(scrut) => i""" failed since selector $scrut | is uninhabited (there are no values of that type).""" diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 302ad7987889..c26512232c6b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3595,22 +3595,9 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { NoType case Nil => val casesText = MatchTypeTrace.noMatchesText(scrut, cases) - ErrorType(reporting.MatchTypeNoCases(casesText)) - - inFrozenConstraint { - if scrut.isError then - // if the scrutinee is an error type - // then just return that as the result - // not doing so will result in the first type case matching - // because ErrorType (as a FlexType) is <:< any type case - // this situation can arise from any kind of nesting of match types, - // e.g. neg/i12049 `Tuple.Concat[Reverse[ts], (t2, t1)]` - // if Reverse[ts] fails with no matches, - // the error type should be the reduction of the Concat too - scrut - else - recur(cases) - } + throw MatchTypeReductionError(em"Match type reduction $casesText") + + inFrozenConstraint(recur(cases)) } } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 240bc4eebd84..eda3910f44fc 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -53,6 +53,9 @@ object TypeError: def toMessage(using Context) = msg end TypeError +class MatchTypeReductionError(msg: Message)(using Context) extends TypeError: + def toMessage(using Context) = msg + class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name])(using Context) extends TypeError: def toMessage(using Context) = em"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5162b3fed1b9..f3abe87ed765 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -663,7 +663,7 @@ trait ImplicitRunInfo: traverseChildren(t) case t => traverseChildren(t) - traverse(t.normalized) + traverse(try t.normalized catch case _: MatchTypeReductionError => t) catch case ex: Throwable => handleRecursive("collectParts of", t.show, ex) def apply(tp: Type): collection.Set[Type] = diff --git a/tests/neg-macros/toexproftuple.scala b/tests/neg-macros/toexproftuple.scala index 7b69c578be70..20ae2f08ff8d 100644 --- a/tests/neg-macros/toexproftuple.scala +++ b/tests/neg-macros/toexproftuple.scala @@ -1,8 +1,33 @@ -import scala.quoted._, scala.deriving.* +import scala.quoted._, scala.deriving.* // error +// ^ +// Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) +// matches none of the cases +// +// case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] +// case EmptyTuple => EmptyTuple -inline def mcr: Any = ${mcrImpl} +inline def mcr: Any = ${mcrImpl} // error +// ^ +// Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) +// matches none of the cases +// +// case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] +// case EmptyTuple => EmptyTuple -def mcrImpl(using ctx: Quotes): Expr[Any] = { +def mcrImpl(using ctx: Quotes): Expr[Any] = { // error // error + //^ + // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) + // matches none of the cases + // + // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] + // case EmptyTuple => EmptyTuple + + // ^ + // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) + // matches none of the cases + // + // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] + // case EmptyTuple => EmptyTuple val tpl: (Expr[1], Expr[2], Expr[3]) = ('{1}, '{2}, '{3}) '{val res: (1, 3, 3) = ${Expr.ofTuple(tpl)}; res} // error @@ -11,7 +36,7 @@ def mcrImpl(using ctx: Quotes): Expr[Any] = { // Required: quoted.Expr[((1 : Int), (3 : Int), (3 : Int))] val tpl2: (Expr[1], 2, Expr[3]) = ('{1}, 2, '{3}) - '{val res = ${Expr.ofTuple(tpl2)}; res} // error + '{val res = ${Expr.ofTuple(tpl2)}; res} // error // error // error // error // ^ // Cannot prove that (quoted.Expr[(1 : Int)], (2 : Int), quoted.Expr[(3 : Int)]) =:= scala.Tuple.Map[ // scala.Tuple.InverseMap[ @@ -19,4 +44,20 @@ def mcrImpl(using ctx: Quotes): Expr[Any] = { // , quoted.Expr] // , quoted.Expr]. + // ^ + // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) + // matches none of the cases + // + // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] + // case EmptyTuple => EmptyTuple + + // ^ + // Cyclic reference involving val res + + // ^ + // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) + // matches none of the cases + // + // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] + // case EmptyTuple => EmptyTuple } diff --git a/tests/neg/i12049.check b/tests/neg/i12049.check index b44eb612f627..11c648e35a57 100644 --- a/tests/neg/i12049.check +++ b/tests/neg/i12049.check @@ -15,17 +15,17 @@ | case B => String | | longer explanation available when compiling with `-explain` --- [E184] Type Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------- +-- Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------------------- 14 |val y3: String = ??? : Last[Int *: Int *: Boolean *: String *: EmptyTuple] // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Match type reduction failed since selector EmptyTuple + | ^ + | Match type reduction failed since selector EmptyTuple.type | matches none of the cases | | case _ *: _ *: t => Last[t] | case t *: EmptyTuple => t --- [E184] Type Error: tests/neg/i12049.scala:22:26 --------------------------------------------------------------------- +-- Error: tests/neg/i12049.scala:22:26 --------------------------------------------------------------------------------- 22 |val z3: (A, B, A) = ??? : Reverse[(A, B, A)] // error - | ^^^^^^^^^^^^^^^^^^ + | ^ | Match type reduction failed since selector A *: EmptyTuple.type | matches none of the cases | @@ -45,17 +45,17 @@ | Therefore, reduction cannot advance to the remaining case | | case B => String --- [E184] Type Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------- +-- Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------------------- 25 |val _ = summon[String =:= Last[Int *: Int *: Boolean *: String *: EmptyTuple]] // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Match type reduction failed since selector EmptyTuple + | ^ + | Match type reduction failed since selector EmptyTuple.type | matches none of the cases | | case _ *: _ *: t => Last[t] | case t *: EmptyTuple => t --- [E184] Type Error: tests/neg/i12049.scala:26:29 --------------------------------------------------------------------- +-- Error: tests/neg/i12049.scala:26:29 --------------------------------------------------------------------------------- 26 |val _ = summon[(A, B, A) =:= Reverse[(A, B, A)]] // error - | ^^^^^^^^^^^^^^^^^^ + | ^ | Match type reduction failed since selector A *: EmptyTuple.type | matches none of the cases | diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala index a80e8b2b289b..d8273e546dab 100644 --- a/tests/neg/i13757-match-type-anykind.scala +++ b/tests/neg/i13757-match-type-anykind.scala @@ -1,16 +1,16 @@ object Test: - type AnyKindMatchType1[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded + type AnyKindMatchType1[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error case Option[a] => Int type AnyKindMatchType2[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case Option => Int // error: Missing type parameter for Option - type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded + type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error case _ => Int - type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded + type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded // error case _ => Int - type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded + type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded // error case _ => Int end Test diff --git a/tests/neg/matchtype-seq.check b/tests/neg/matchtype-seq.check index 980329d585dc..b72200868d81 100644 --- a/tests/neg/matchtype-seq.check +++ b/tests/neg/matchtype-seq.check @@ -1,14 +1,14 @@ --- [E184] Type Error: tests/neg/matchtype-seq.scala:9:11 --------------------------------------------------------------- +-- Error: tests/neg/matchtype-seq.scala:9:11 --------------------------------------------------------------------------- 9 | identity[T1[3]]("") // error - | ^^^^^ + | ^ | Match type reduction failed since selector (3 : Int) | matches none of the cases | | case (1 : Int) => Int | case (2 : Int) => String --- [E184] Type Error: tests/neg/matchtype-seq.scala:10:11 -------------------------------------------------------------- +-- Error: tests/neg/matchtype-seq.scala:10:11 -------------------------------------------------------------------------- 10 | identity[T1[3]](1) // error - | ^^^^^ + | ^ | Match type reduction failed since selector (3 : Int) | matches none of the cases | From 5becaace6495a8e1af69951d9ffef54406f722d1 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 15 Mar 2024 13:17:42 +0100 Subject: [PATCH 012/465] Update check-files and remove i18488.scala i18488.scala was only passing because of the bug in the MatchReducer, as we can see in the subtyping trace: ``` ==> isSubType TableQuery[BaseCrudRepository.this.EntityTable] <:< Query[BaseCrudRepository.this.EntityTable, E[Option]]? ==> isSubType Query[BaseCrudRepository.this.EntityTable, Extract[BaseCrudRepository.this.EntityTable]] <:< Query[BaseCrudRepository.this.EntityTable, E[Option]] (left is approximated)? ==> isSubType E[Option] <:< Extract[BaseCrudRepository.this.EntityTable]? ==> isSubType [T[_$1]] =>> Any <:< Extract? ==> isSubType Any <:< Extract[T]? ==> isSubType Any <:< T match { case AbstractTable[t] => t } <: t (right is approximated)? ==> isSubType Any <:< t> (right is approximated)? <== isSubType Any <:< t> (right is approximated) = true <== isSubType Any <:< T match { case AbstractTable[t] => t } <: t (right is approximated) = true <== isSubType Any <:< Extract[T] = true <== isSubType [T[_$1]] =>> Any <:< Extract = true ... <== isSubType Extract[BaseCrudRepository.this.EntityTable] <:< E[Option] = true <== isSubType Query[BaseCrudRepository.this.EntityTable, Extract[BaseCrudRepository.this.EntityTable]] <:< Query[BaseCrudRepository.this.EntityTable, E[Option]] (left is approximated) = true <== isSubType TableQuery[BaseCrudRepository.this.EntityTable] <:< Query[BaseCrudRepository.this.EntityTable, E[Option]] = true ``` --- tests/neg/i12049.check | 4 ++-- tests/neg/i13757-match-type-anykind.scala | 4 ++-- tests/pos/i18488.scala | 15 --------------- 3 files changed, 4 insertions(+), 19 deletions(-) delete mode 100644 tests/pos/i18488.scala diff --git a/tests/neg/i12049.check b/tests/neg/i12049.check index 11c648e35a57..4977b8d8c591 100644 --- a/tests/neg/i12049.check +++ b/tests/neg/i12049.check @@ -18,7 +18,7 @@ -- Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------------------- 14 |val y3: String = ??? : Last[Int *: Int *: Boolean *: String *: EmptyTuple] // error | ^ - | Match type reduction failed since selector EmptyTuple.type + | Match type reduction failed since selector EmptyTuple | matches none of the cases | | case _ *: _ *: t => Last[t] @@ -48,7 +48,7 @@ -- Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------------------- 25 |val _ = summon[String =:= Last[Int *: Int *: Boolean *: String *: EmptyTuple]] // error | ^ - | Match type reduction failed since selector EmptyTuple.type + | Match type reduction failed since selector EmptyTuple | matches none of the cases | | case _ *: _ *: t => Last[t] diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala index d8273e546dab..3feb9907fb69 100644 --- a/tests/neg/i13757-match-type-anykind.scala +++ b/tests/neg/i13757-match-type-anykind.scala @@ -1,11 +1,11 @@ object Test: - type AnyKindMatchType1[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + type AnyKindMatchType1[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case Option[a] => Int type AnyKindMatchType2[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case Option => Int // error: Missing type parameter for Option - type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded // error diff --git a/tests/pos/i18488.scala b/tests/pos/i18488.scala deleted file mode 100644 index c225a2c20711..000000000000 --- a/tests/pos/i18488.scala +++ /dev/null @@ -1,15 +0,0 @@ -trait AbstractTable[T] - -trait Query[E, U] - -class TableQuery[E <: AbstractTable[?]] extends Query[E, Extract[E]] - -type Extract[E] = E match - case AbstractTable[t] => t - -trait BaseCrudRepository[E[T[_]]]: - - type EntityTable <: AbstractTable[E[Option]] - - def filterById: Query[EntityTable, Extract[EntityTable]] = - new TableQuery[EntityTable] From d7946bf928bef748ee509cc709570b308e94fba7 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Sat, 16 Mar 2024 18:28:26 +0100 Subject: [PATCH 013/465] Make match types with no matching cases not an error Modify the MatchReducer to return NoType in the case of no matches, rather than throwing a MatchTypeReductionError. This makes it consistent with the other match type reduction failures, where being stuck does not result in an error, but simply in an unreduced match type. We still get the explanations of the underlying error in the MatchTypeTrace, but in positions which need the reduction for conformance, rather than at application site of the match type. --- compiler/src/dotty/tools/dotc/core/TypeComparer.scala | 4 ++-- compiler/src/dotty/tools/dotc/core/TypeErrors.scala | 3 --- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 2 +- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index c26512232c6b..b07ec01aa75b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3594,8 +3594,8 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { MatchTypeTrace.emptyScrutinee(scrut) NoType case Nil => - val casesText = MatchTypeTrace.noMatchesText(scrut, cases) - throw MatchTypeReductionError(em"Match type reduction $casesText") + MatchTypeTrace.noMatches(scrut, cases) + NoType inFrozenConstraint(recur(cases)) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index eda3910f44fc..240bc4eebd84 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -53,9 +53,6 @@ object TypeError: def toMessage(using Context) = msg end TypeError -class MatchTypeReductionError(msg: Message)(using Context) extends TypeError: - def toMessage(using Context) = msg - class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name])(using Context) extends TypeError: def toMessage(using Context) = em"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index f3abe87ed765..5162b3fed1b9 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -663,7 +663,7 @@ trait ImplicitRunInfo: traverseChildren(t) case t => traverseChildren(t) - traverse(try t.normalized catch case _: MatchTypeReductionError => t) + traverse(t.normalized) catch case ex: Throwable => handleRecursive("collectParts of", t.show, ex) def apply(tp: Type): collection.Set[Type] = From 61c28329e4850ea7f0a0e93e354ea10deccc727b Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Sat, 16 Mar 2024 18:30:57 +0100 Subject: [PATCH 014/465] Update check-files and error annotations The diff in neg/10349.scala is quite interesting. With a few intermediate values: ```scala type First[X] = X match case Map[_, v] => First[Option[v]] def first[X](x: X): First[X] = x match case x: Map[k, v] => val hdOpt: Option[v] = x.values.headOption first(hdOpt): First[Option[v]] // error only before changes ``` This now type-checks but will fail at runtime because of the in-exhaustivity of the match expression. Perhaps we should add some additional condition in `isMatchTypeShaped` to account for this, or at least emit a warning ? --- .../dotty/tools/dotc/core/TypeComparer.scala | 4 + tests/neg-macros/toexproftuple.scala | 61 +------------ tests/neg/10349.scala | 2 +- tests/neg/10747.scala | 3 +- tests/neg/i12049.check | 88 ++++++++++++++----- tests/neg/i13757-match-type-anykind.scala | 4 +- tests/neg/matchtype-seq.check | 40 ++++++--- tests/{neg => warn}/12974.scala | 2 +- 8 files changed, 106 insertions(+), 98 deletions(-) rename tests/{neg => warn}/12974.scala (94%) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index b07ec01aa75b..73b45117cc2d 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3594,6 +3594,10 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { MatchTypeTrace.emptyScrutinee(scrut) NoType case Nil => + /* TODO warn ? then re-enable warn/12974.scala:26 + val noCasesText = MatchTypeTrace.noMatchesText(scrut, cases) + report.warning(reporting.MatchTypeNoCases(noCasesText), pos = ???) + */ MatchTypeTrace.noMatches(scrut, cases) NoType diff --git a/tests/neg-macros/toexproftuple.scala b/tests/neg-macros/toexproftuple.scala index 20ae2f08ff8d..f33bfd5f6dfb 100644 --- a/tests/neg-macros/toexproftuple.scala +++ b/tests/neg-macros/toexproftuple.scala @@ -1,63 +1,10 @@ -import scala.quoted._, scala.deriving.* // error -// ^ -// Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) -// matches none of the cases -// -// case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] -// case EmptyTuple => EmptyTuple - -inline def mcr: Any = ${mcrImpl} // error -// ^ -// Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) -// matches none of the cases -// -// case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] -// case EmptyTuple => EmptyTuple - -def mcrImpl(using ctx: Quotes): Expr[Any] = { // error // error - //^ - // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) - // matches none of the cases - // - // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] - // case EmptyTuple => EmptyTuple - - // ^ - // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) - // matches none of the cases - // - // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] - // case EmptyTuple => EmptyTuple +import scala.quoted._, scala.deriving.* +inline def mcr: Any = ${mcrImpl} +def mcrImpl(using ctx: Quotes): Expr[Any] = { val tpl: (Expr[1], Expr[2], Expr[3]) = ('{1}, '{2}, '{3}) '{val res: (1, 3, 3) = ${Expr.ofTuple(tpl)}; res} // error - // ^^^^^^^^^^^^^^^^^ - // Found: quoted.Expr[(1 : Int) *: (2 : Int) *: (3 : Int) *: EmptyTuple] - // Required: quoted.Expr[((1 : Int), (3 : Int), (3 : Int))] val tpl2: (Expr[1], 2, Expr[3]) = ('{1}, 2, '{3}) - '{val res = ${Expr.ofTuple(tpl2)}; res} // error // error // error // error - // ^ - // Cannot prove that (quoted.Expr[(1 : Int)], (2 : Int), quoted.Expr[(3 : Int)]) =:= scala.Tuple.Map[ - // scala.Tuple.InverseMap[ - // (quoted.Expr[(1 : Int)], (2 : Int), quoted.Expr[(3 : Int)]) - // , quoted.Expr] - // , quoted.Expr]. - - // ^ - // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) - // matches none of the cases - // - // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] - // case EmptyTuple => EmptyTuple - - // ^ - // Cyclic reference involving val res - - // ^ - // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) - // matches none of the cases - // - // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] - // case EmptyTuple => EmptyTuple + '{val res = ${Expr.ofTuple(tpl2)}; res} // error } diff --git a/tests/neg/10349.scala b/tests/neg/10349.scala index 4ea683f6a8fb..b591c1a79abb 100644 --- a/tests/neg/10349.scala +++ b/tests/neg/10349.scala @@ -4,7 +4,7 @@ object Firsts: case Map[_, v] => First[Option[v]] def first[X](x: X): First[X] = x match - case x: Map[_, _] => first(x.values.headOption) // error + case x: Map[_, _] => first(x.values.headOption) @main def runFirsts2(): Unit = diff --git a/tests/neg/10747.scala b/tests/neg/10747.scala index a299f2a6590c..5275ebc84121 100644 --- a/tests/neg/10747.scala +++ b/tests/neg/10747.scala @@ -2,4 +2,5 @@ type Foo[A] = A match { case Int => String } -type B = Foo[Boolean] // error +type B = Foo[Boolean] +val _: B = "hello" // error diff --git a/tests/neg/i12049.check b/tests/neg/i12049.check index 4977b8d8c591..b9d3a8434015 100644 --- a/tests/neg/i12049.check +++ b/tests/neg/i12049.check @@ -15,22 +15,45 @@ | case B => String | | longer explanation available when compiling with `-explain` --- Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------------------- +-- [E007] Type Mismatch Error: tests/neg/i12049.scala:14:17 ------------------------------------------------------------ 14 |val y3: String = ??? : Last[Int *: Int *: Boolean *: String *: EmptyTuple] // error - | ^ - | Match type reduction failed since selector EmptyTuple - | matches none of the cases + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: Last[EmptyTuple] + | Required: String | - | case _ *: _ *: t => Last[t] - | case t *: EmptyTuple => t --- Error: tests/neg/i12049.scala:22:26 --------------------------------------------------------------------------------- + | Note: a match type could not be fully reduced: + | + | trying to reduce Last[EmptyTuple] + | failed since selector EmptyTuple + | matches none of the cases + | + | case _ *: _ *: t => Last[t] + | case t *: EmptyTuple => t + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i12049.scala:22:20 ------------------------------------------------------------ 22 |val z3: (A, B, A) = ??? : Reverse[(A, B, A)] // error - | ^ - | Match type reduction failed since selector A *: EmptyTuple.type - | matches none of the cases + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: Tuple.Concat[Reverse[A *: EmptyTuple.type], (B, A)] + | Required: (A, B, A) + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Tuple.Concat[Reverse[A *: EmptyTuple.type], (B, A)] + | trying to reduce Reverse[A *: EmptyTuple.type] + | failed since selector A *: EmptyTuple.type + | matches none of the cases + | + | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] + | case EmptyTuple => EmptyTuple + | trying to reduce Reverse[A *: EmptyTuple.type] + | failed since selector A *: EmptyTuple.type + | matches none of the cases | - | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] - | case EmptyTuple => EmptyTuple + | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] + | case EmptyTuple => EmptyTuple + | + | longer explanation available when compiling with `-explain` -- [E172] Type Error: tests/neg/i12049.scala:24:20 --------------------------------------------------------------------- 24 |val _ = summon[M[B]] // error | ^ @@ -45,22 +68,39 @@ | Therefore, reduction cannot advance to the remaining case | | case B => String --- Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i12049.scala:25:78 --------------------------------------------------------------------- 25 |val _ = summon[String =:= Last[Int *: Int *: Boolean *: String *: EmptyTuple]] // error - | ^ - | Match type reduction failed since selector EmptyTuple - | matches none of the cases + | ^ + | Cannot prove that String =:= Last[EmptyTuple]. + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Last[EmptyTuple] + | failed since selector EmptyTuple + | matches none of the cases | - | case _ *: _ *: t => Last[t] - | case t *: EmptyTuple => t --- Error: tests/neg/i12049.scala:26:29 --------------------------------------------------------------------------------- + | case _ *: _ *: t => Last[t] + | case t *: EmptyTuple => t +-- [E172] Type Error: tests/neg/i12049.scala:26:48 --------------------------------------------------------------------- 26 |val _ = summon[(A, B, A) =:= Reverse[(A, B, A)]] // error - | ^ - | Match type reduction failed since selector A *: EmptyTuple.type - | matches none of the cases + | ^ + | Cannot prove that (A, B, A) =:= Tuple.Concat[Reverse[A *: EmptyTuple.type], (B, A)]. + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Tuple.Concat[Reverse[A *: EmptyTuple.type], (B, A)] + | trying to reduce Reverse[A *: EmptyTuple.type] + | failed since selector A *: EmptyTuple.type + | matches none of the cases + | + | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] + | case EmptyTuple => EmptyTuple + | trying to reduce Reverse[A *: EmptyTuple.type] + | failed since selector A *: EmptyTuple.type + | matches none of the cases | - | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] - | case EmptyTuple => EmptyTuple + | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] + | case EmptyTuple => EmptyTuple -- [E008] Not Found Error: tests/neg/i12049.scala:28:21 ---------------------------------------------------------------- 28 |val _ = (??? : M[B]).length // error | ^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala index 3feb9907fb69..a80e8b2b289b 100644 --- a/tests/neg/i13757-match-type-anykind.scala +++ b/tests/neg/i13757-match-type-anykind.scala @@ -8,9 +8,9 @@ object Test: type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int - type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded // error + type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded case _ => Int - type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int end Test diff --git a/tests/neg/matchtype-seq.check b/tests/neg/matchtype-seq.check index b72200868d81..1e786b6714c6 100644 --- a/tests/neg/matchtype-seq.check +++ b/tests/neg/matchtype-seq.check @@ -1,19 +1,35 @@ --- Error: tests/neg/matchtype-seq.scala:9:11 --------------------------------------------------------------------------- +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:9:18 ------------------------------------------------------ 9 | identity[T1[3]]("") // error - | ^ - | Match type reduction failed since selector (3 : Int) - | matches none of the cases + | ^^ + | Found: ("" : String) + | Required: Test.T1[(3 : Int)] | - | case (1 : Int) => Int - | case (2 : Int) => String --- Error: tests/neg/matchtype-seq.scala:10:11 -------------------------------------------------------------------------- + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T1[(3 : Int)] + | failed since selector (3 : Int) + | matches none of the cases + | + | case (1 : Int) => Int + | case (2 : Int) => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:10:18 ----------------------------------------------------- 10 | identity[T1[3]](1) // error - | ^ - | Match type reduction failed since selector (3 : Int) - | matches none of the cases + | ^ + | Found: (1 : Int) + | Required: Test.T1[(3 : Int)] | - | case (1 : Int) => Int - | case (2 : Int) => String + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T1[(3 : Int)] + | failed since selector (3 : Int) + | matches none of the cases + | + | case (1 : Int) => Int + | case (2 : Int) => String + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:11:20 ----------------------------------------------------- 11 | identity[T1[Int]]("") // error | ^^ diff --git a/tests/neg/12974.scala b/tests/warn/12974.scala similarity index 94% rename from tests/neg/12974.scala rename to tests/warn/12974.scala index 90edcc916471..45029602296f 100644 --- a/tests/neg/12974.scala +++ b/tests/warn/12974.scala @@ -23,7 +23,7 @@ object RecMap { def main(args: Array[String]) = import Record._ - val foo: Any = Rec.empty.fetch("foo") // error + val foo: Any = Rec.empty.fetch("foo") // TODO // ^ // Match type reduction failed since selector EmptyTuple.type // matches none of the cases From 232923668274adf1b8902fa05dc5686a090463d4 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 20 Mar 2024 14:59:55 +0100 Subject: [PATCH 015/465] Only record in MatchTypeTrace if not previously attempted `recurseWith` can be called with the same scrutinee (even if match type reduction is cached) if it is an applied match alias For example, `Tuple.Head[Tuple.Tail[T]]` will attempt to reduce `Tuple.Tail[T]` twice: - once as an argument of the match alias `Head`, and - once as a scrutinee in body of `Head` (after the substitution). --- .../tools/dotc/core/MatchTypeTrace.scala | 2 +- tests/neg/i12049.check | 12 -------- tests/neg/i17944.check | 30 ------------------- 3 files changed, 1 insertion(+), 43 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index 145c7cf5e9eb..e16a950aa32a 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -78,7 +78,7 @@ object MatchTypeTrace: */ def recurseWith(scrut: Type)(op: => Type)(using Context): Type = ctx.property(MatchTrace) match - case Some(trace) => + case Some(trace) if !trace.entries.contains(TryReduce(scrut)) => val prev = trace.entries trace.entries = TryReduce(scrut) :: prev val res = op diff --git a/tests/neg/i12049.check b/tests/neg/i12049.check index b9d3a8434015..e0c2d498f119 100644 --- a/tests/neg/i12049.check +++ b/tests/neg/i12049.check @@ -46,12 +46,6 @@ | | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] | case EmptyTuple => EmptyTuple - | trying to reduce Reverse[A *: EmptyTuple.type] - | failed since selector A *: EmptyTuple.type - | matches none of the cases - | - | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] - | case EmptyTuple => EmptyTuple | | longer explanation available when compiling with `-explain` -- [E172] Type Error: tests/neg/i12049.scala:24:20 --------------------------------------------------------------------- @@ -95,12 +89,6 @@ | | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] | case EmptyTuple => EmptyTuple - | trying to reduce Reverse[A *: EmptyTuple.type] - | failed since selector A *: EmptyTuple.type - | matches none of the cases - | - | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] - | case EmptyTuple => EmptyTuple -- [E008] Not Found Error: tests/neg/i12049.scala:28:21 ---------------------------------------------------------------- 28 |val _ = (??? : M[B]).length // error | ^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg/i17944.check b/tests/neg/i17944.check index 80dfaac8c4c8..c969edccb46b 100644 --- a/tests/neg/i17944.check +++ b/tests/neg/i17944.check @@ -14,33 +14,3 @@ | Therefore, reduction cannot advance to the remaining case | | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] - | trying to reduce test.FindField[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String)] - | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] - | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) - | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) - | and cannot be shown to be disjoint from it either. - | Therefore, reduction cannot advance to the remaining case - | - | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] - | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] - | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) - | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) - | and cannot be shown to be disjoint from it either. - | Therefore, reduction cannot advance to the remaining case - | - | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] - | trying to reduce test.FindField[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String)] - | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] - | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) - | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) - | and cannot be shown to be disjoint from it either. - | Therefore, reduction cannot advance to the remaining case - | - | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] - | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] - | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) - | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) - | and cannot be shown to be disjoint from it either. - | Therefore, reduction cannot advance to the remaining case - | - | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] From 2beb67e908941ff71aafce163c3c0df766cb1622 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 26 Mar 2024 12:45:23 +0100 Subject: [PATCH 016/465] Do match type reduction atPhaseNoLater than ElimOpaque If a match type pattern is an opaque type, we use its bounds when checking the validity of the pattern. Following the ElimOpaque phase however, the pattern is beta-reduced (as normal applied type aliases), which may result in an illegal pattern. --- compiler/src/dotty/tools/dotc/core/Phases.scala | 4 ++++ compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- tests/pos/i19434.scala | 11 +++++++++++ 3 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i19434.scala diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index c704846a82da..043c5beb9076 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -220,6 +220,7 @@ object Phases { private var myPatmatPhase: Phase = uninitialized private var myElimRepeatedPhase: Phase = uninitialized private var myElimByNamePhase: Phase = uninitialized + private var myElimOpaquePhase: Phase = uninitialized private var myExtensionMethodsPhase: Phase = uninitialized private var myExplicitOuterPhase: Phase = uninitialized private var myGettersPhase: Phase = uninitialized @@ -245,6 +246,7 @@ object Phases { final def patmatPhase: Phase = myPatmatPhase final def elimRepeatedPhase: Phase = myElimRepeatedPhase final def elimByNamePhase: Phase = myElimByNamePhase + final def elimOpaquePhase: Phase = myElimOpaquePhase final def extensionMethodsPhase: Phase = myExtensionMethodsPhase final def explicitOuterPhase: Phase = myExplicitOuterPhase final def gettersPhase: Phase = myGettersPhase @@ -272,6 +274,7 @@ object Phases { myRefChecksPhase = phaseOfClass(classOf[RefChecks]) myElimRepeatedPhase = phaseOfClass(classOf[ElimRepeated]) myElimByNamePhase = phaseOfClass(classOf[ElimByName]) + myElimOpaquePhase = phaseOfClass(classOf[ElimOpaque]) myExtensionMethodsPhase = phaseOfClass(classOf[ExtensionMethods]) myErasurePhase = phaseOfClass(classOf[Erasure]) myElimErasedValueTypePhase = phaseOfClass(classOf[ElimErasedValueType]) @@ -511,6 +514,7 @@ object Phases { def refchecksPhase(using Context): Phase = ctx.base.refchecksPhase def elimRepeatedPhase(using Context): Phase = ctx.base.elimRepeatedPhase def elimByNamePhase(using Context): Phase = ctx.base.elimByNamePhase + def elimOpaquePhase(using Context): Phase = ctx.base.elimOpaquePhase def extensionMethodsPhase(using Context): Phase = ctx.base.extensionMethodsPhase def explicitOuterPhase(using Context): Phase = ctx.base.explicitOuterPhase def gettersPhase(using Context): Phase = ctx.base.gettersPhase diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index e38fbbb4b355..f6210b3a9883 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5026,7 +5026,7 @@ object Types extends TypeUtils { private def thisMatchType = this - def reduced(using Context): Type = { + def reduced(using Context): Type = atPhaseNoLater(elimOpaquePhase) { def contextInfo(tp: Type): Type = tp match { case tp: TypeParamRef => diff --git a/tests/pos/i19434.scala b/tests/pos/i19434.scala new file mode 100644 index 000000000000..e8595fa252d0 --- /dev/null +++ b/tests/pos/i19434.scala @@ -0,0 +1,11 @@ + +object Test: + + object Named: + opaque type Named[name <: String & Singleton, A] >: A = A + + type DropNames[T <: Tuple] = T match + case Named.Named[_, x] *: xs => x *: DropNames[xs] + case _ => T + + def f[T <: Tuple]: DropNames[T] = ??? \ No newline at end of file From 49571faa98739d209f5c3c6ef88e9c7950c50242 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 19 Mar 2024 18:38:49 +0100 Subject: [PATCH 017/465] Fix validity period of derived SingleDenotations When running: val f: ( => Int) => Int = i => i ; f(1) twice in the REPL, the second time crashed with a ClassCastException. The difference is that in the second run, the denotation for `f.apply` is created from the SymDenotation for `Function1#apply` which already exists and is known to be valid in every phase, but that doesn't mean that the derived denotation for `f.apply` has the same validity: unlike the SymDenotation it needs to be transformed (in particular to run the `ElimByName` transformer). It turns out that there were multiple places in the compiler where we created a new denotation with a validity based on the existing one when this was not legitimate. I've gone through all these places and replaced them by `currentStablePeriod`. Fixes #18756. --- .../src/dotty/tools/dotc/core/Denotations.scala | 14 +++++++------- .../dotty/tools/dotc/core/SymDenotations.scala | 6 +++--- compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- .../dotty/tools/dotc/transform/Erasure.scala | 3 ++- compiler/test-resources/repl/i18756 | 17 +++++++++++++++++ 5 files changed, 30 insertions(+), 12 deletions(-) create mode 100644 compiler/test-resources/repl/i18756 diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index bd92fa814a6e..8610d2095655 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -580,7 +580,7 @@ object Denotations { /** A non-overloaded denotation */ abstract class SingleDenotation(symbol: Symbol, initInfo: Type, isType: Boolean) extends Denotation(symbol, initInfo, isType) { - protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation + protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation final def name(using Context): Name = symbol.name @@ -1162,11 +1162,11 @@ object Denotations { prefix: Type) extends NonSymSingleDenotation(symbol, initInfo, prefix) { validFor = initValidFor override def hasUniqueSym: Boolean = true - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation = if isRefinedMethod then - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + new JointRefDenotation(s, i, currentStablePeriod, pre, isRefinedMethod) else - new UniqueRefDenotation(s, i, validFor, pre) + new UniqueRefDenotation(s, i, currentStablePeriod, pre) } class JointRefDenotation( @@ -1177,15 +1177,15 @@ object Denotations { override val isRefinedMethod: Boolean) extends NonSymSingleDenotation(symbol, initInfo, prefix) { validFor = initValidFor override def hasUniqueSym: Boolean = false - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation = + new JointRefDenotation(s, i, currentStablePeriod, pre, isRefinedMethod) } class ErrorDenotation(using Context) extends NonSymSingleDenotation(NoSymbol, NoType, NoType) { override def exists: Boolean = false override def hasUniqueSym: Boolean = false validFor = Period.allInRun(ctx.runId) - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation = this } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 14ba05568735..5304c9efadc0 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1624,11 +1624,11 @@ object SymDenotations { // ----- copies and transforms ---------------------------------------- - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation = if isRefinedMethod then - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + new JointRefDenotation(s, i, currentStablePeriod, pre, isRefinedMethod) else - new UniqueRefDenotation(s, i, validFor, pre) + new UniqueRefDenotation(s, i, currentStablePeriod, pre) /** Copy this denotation, overriding selective fields */ final def copySymDenotation( diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 701a6360fd3d..b8bd081f20dc 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -905,7 +905,7 @@ object Types extends TypeUtils { def goSuper(tp: SuperType) = go(tp.underlying) match { case d: JointRefDenotation => typr.println(i"redirecting super.$name from $tp to ${d.symbol.showLocated}") - new UniqueRefDenotation(d.symbol, tp.memberInfo(d.symbol), d.validFor, pre) + new UniqueRefDenotation(d.symbol, tp.memberInfo(d.symbol), currentStablePeriod, pre) case d => d } diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 9fdffb0ed537..01fc423b0076 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -13,6 +13,7 @@ import core.Types.* import core.Names.* import core.StdNames.* import core.NameOps.* +import core.Periods.currentStablePeriod import core.NameKinds.{AdaptedClosureName, BodyRetainerName, DirectMethName} import core.Scopes.newScopeWith import core.Decorators.* @@ -132,7 +133,7 @@ class Erasure extends Phase with DenotTransformer { } case ref: JointRefDenotation => new UniqueRefDenotation( - ref.symbol, transformInfo(ref.symbol, ref.symbol.info), ref.validFor, ref.prefix) + ref.symbol, transformInfo(ref.symbol, ref.symbol.info), currentStablePeriod, ref.prefix) case _ => ref.derivedSingleDenotation(ref.symbol, transformInfo(ref.symbol, ref.symbol.info)) } diff --git a/compiler/test-resources/repl/i18756 b/compiler/test-resources/repl/i18756 new file mode 100644 index 000000000000..56be353e67f3 --- /dev/null +++ b/compiler/test-resources/repl/i18756 @@ -0,0 +1,17 @@ +scala> object A { val f: ( => Int) => Int = i => i ; f(1) } +// defined object A + +scala> A.f(1) +val res0: Int = 1 + +scala> A.f(1) +val res1: Int = 1 + +scala> object B { val f: ( => Int) => Int = i => i ; f(1) } +// defined object B + +scala> B.f(1) +val res2: Int = 1 + +scala> B.f(1) +val res3: Int = 1 From 32437191d8985a0fa1146b8ae9e477c3ef608cbe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Tue, 26 Mar 2024 17:21:58 +0100 Subject: [PATCH 018/465] Set base version 3.5.0-RC1 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index e3b4ed789a1f..d9c224790b12 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -85,7 +85,7 @@ object Build { val referenceVersion = "3.4.0" - val baseVersion = "3.4.2-RC1" + val baseVersion = "3.5.0-RC1" // LTS or Next val versionLine = "Next" From 01e715d9a8e33960291b78796dc989a23333ed6e Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 26 Mar 2024 18:48:37 +0100 Subject: [PATCH 019/465] Re-lub also hard union types in simplify Simplify had some elaborate condition that prevented hard union types to be recomputed with a lub. I am not sure why that was. In the concrete scenario of i10693.scala, we had an explicitly union result type `B | A` where `A` and `B` are type parameters. So that is a hard union type. Then `A` was instantiated by `Int | String` and `B` was instantiated by `String | Int`. Re-forming the lub of that union would have eliminated one pair, but since the union type was hard tyat was not done. On the other hand I see no reason why hard unions should not be re-lubbed. Hard unions are about preventing the widening of or types with a join. I don't see a connection with avoiding re-lubbing. Fixes #10693 --- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 11 ++--------- compiler/src/dotty/tools/dotc/typer/Namer.scala | 3 ++- tests/pos/i10693.scala | 8 ++++++++ 3 files changed, 12 insertions(+), 10 deletions(-) create mode 100644 tests/pos/i10693.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 587c52688456..1bec455c5495 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -157,15 +157,8 @@ object TypeOps: tp.derivedAlias(simplify(tp.alias, theMap)) case AndType(l, r) if !ctx.mode.is(Mode.Type) => simplify(l, theMap) & simplify(r, theMap) - case tp @ OrType(l, r) - if !ctx.mode.is(Mode.Type) - && (tp.isSoft || l.isBottomType || r.isBottomType) => - // Normalize A | Null and Null | A to A even if the union is hard (i.e. - // explicitly declared), but not if -Yexplicit-nulls is set. The reason is - // that in this case the normal asSeenFrom machinery is not prepared to deal - // with Nulls (which have no base classes). Under -Yexplicit-nulls, we take - // corrective steps, so no widening is wanted. - simplify(l, theMap) | simplify(r, theMap) + case tp @ OrType(l, r) if !ctx.mode.is(Mode.Type) => + TypeComparer.lub(simplify(l, theMap), simplify(r, theMap), isSoft = tp.isSoft) case tp @ CapturingType(parent, refs) => if !ctx.mode.is(Mode.Type) && refs.subCaptures(parent.captureSet, frozen = true).isOK diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 24721f1cd758..c1bde8d4fd3c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1961,7 +1961,8 @@ class Namer { typer: Typer => else // don't strip @uncheckedVariance annot for default getters TypeOps.simplify(tp.widenTermRefExpr, - if defaultTp.exists then TypeOps.SimplifyKeepUnchecked() else null) match + if defaultTp.exists then TypeOps.SimplifyKeepUnchecked() else null) + match case ctp: ConstantType if sym.isInlineVal => ctp case tp => TypeComparer.widenInferred(tp, pt, widenUnions = true) diff --git a/tests/pos/i10693.scala b/tests/pos/i10693.scala new file mode 100644 index 000000000000..122984484658 --- /dev/null +++ b/tests/pos/i10693.scala @@ -0,0 +1,8 @@ +def test[A, B](a: A, b: B): A | B = a +val v0 = test("string", 1) +val v1 = test(1, "string") +val v2 = test(v0, v1) +val v3 = test(v1, v0) +val v4 = test(v2, v3) +val v5 = test(v3, v2) +val v6 = test(v4, v5) From 5bf3227a7adc7d73e3a6fb25fad109987981ecd8 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 26 Mar 2024 18:57:41 +0100 Subject: [PATCH 020/465] Also re-lub unions in widenUnionWithoutNull --- compiler/src/dotty/tools/dotc/core/Types.scala | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 701a6360fd3d..6c47805997a7 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3598,12 +3598,11 @@ object Types extends TypeUtils { override def widenUnionWithoutNull(using Context): Type = if myUnionPeriod != ctx.period then - myUnion = - if isSoft then - TypeComparer.lub(tp1.widenUnionWithoutNull, tp2.widenUnionWithoutNull, canConstrain = true, isSoft = isSoft) match - case union: OrType => union.join - case res => res - else derivedOrType(tp1.widenUnionWithoutNull, tp2.widenUnionWithoutNull, soft = isSoft) + val union = TypeComparer.lub( + tp1.widenUnionWithoutNull, tp2.widenUnionWithoutNull, canConstrain = isSoft, isSoft = isSoft) + myUnion = union match + case union: OrType if isSoft => union.join + case _ => union if !isProvisional then myUnionPeriod = ctx.period myUnion From 9c80a7cfae4c98b797d2e7101eb551771e2896b1 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 27 Mar 2024 10:20:56 +0100 Subject: [PATCH 021/465] Replace mergeIfSuper Replace mergeIfSuper by a different algorithm that is more efficient. We drop or-summands in both arguments of a lub that are subsumed by the other. This avoids expesnive recusive calls to lub or expensive comparisons with union types on the right. --- .../dotty/tools/dotc/core/TypeComparer.scala | 132 +++++++++--------- tests/semanticdb/metac.expect | 4 +- 2 files changed, 69 insertions(+), 67 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index b677dae3a38b..39e9fcea4e0f 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2352,8 +2352,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } /** The greatest lower bound of two types */ - def glb(tp1: Type, tp2: Type): Type = /*>|>*/ trace(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true) /*<|<*/ { - if (tp1 eq tp2) tp1 + def glb(tp1: Type, tp2: Type): Type = // trace(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true): + if tp1 eq tp2 then tp1 else if !tp1.exists || (tp1 eq WildcardType) then tp2 else if !tp2.exists || (tp2 eq WildcardType) then tp1 else if tp1.isAny && !tp2.isLambdaSub || tp1.isAnyKind || isBottom(tp2) then tp2 @@ -2366,12 +2366,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val tp2a = dropIfSuper(tp2, tp1) if tp2a ne tp2 then glb(tp1, tp2a) else tp2 match // normalize to disjunctive normal form if possible. - case tp2 @ OrType(tp21, tp22) => - lub(tp1 & tp21, tp1 & tp22, isSoft = tp2.isSoft) + case tp2 @ OrType(tp2L, tp2R) => + lub(tp1 & tp2L, tp1 & tp2R, isSoft = tp2.isSoft) case _ => tp1 match - case tp1 @ OrType(tp11, tp12) => - lub(tp11 & tp2, tp12 & tp2, isSoft = tp1.isSoft) + case tp1 @ OrType(tp1L, tp1R) => + lub(tp1L & tp2, tp1R & tp2, isSoft = tp1.isSoft) case tp1: ConstantType => tp2 match case tp2: ConstantType => @@ -2386,8 +2386,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling NothingType case _ => andType(tp1, tp2) case _ => andType(tp1, tp2) + end mergedGlb + mergedGlb(dropExpr(tp1.stripLazyRef), dropExpr(tp2.stripLazyRef)) - } + end glb def widenInUnions(using Context): Boolean = migrateTo3 || ctx.erasedTypes @@ -2396,14 +2398,23 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * @param canConstrain If true, new constraints might be added to simplify the lub. * @param isSoft If the lub is a union, this determines whether it's a soft union. */ - def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true): Type = /*>|>*/ trace(s"lub(${tp1.show}, ${tp2.show}, canConstrain=$canConstrain, isSoft=$isSoft)", subtyping, show = true) /*<|<*/ { - if (tp1 eq tp2) tp1 + def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true): Type = // trace(s"lub(${tp1.show}, ${tp2.show}, canConstrain=$canConstrain, isSoft=$isSoft)", subtyping, show = true): + if tp1 eq tp2 then tp1 else if !tp1.exists || (tp2 eq WildcardType) then tp1 else if !tp2.exists || (tp1 eq WildcardType) then tp2 else if tp1.isAny && !tp2.isLambdaSub || tp1.isAnyKind || isBottom(tp2) then tp1 else if tp2.isAny && !tp1.isLambdaSub || tp2.isAnyKind || isBottom(tp1) then tp2 else - def mergedLub(tp1: Type, tp2: Type): Type = { + def mergedLub(tp1: Type, tp2: Type): Type = + // First, if tp1 and tp2 are the same singleton type, return one of them. + if tp1.isSingleton && isSubType(tp1, tp2, whenFrozen = !canConstrain) then + return tp2 + if tp2.isSingleton && isSubType(tp2, tp1, whenFrozen = !canConstrain) then + return tp1 + + // Second, handle special cases when tp1 and tp2 are disjunctions of + // singleton types. This saves time otherwise spent in + // costly subtype comparisons performed in dropIfSub below. tp1.atoms match case Atoms.Range(lo1, hi1) if !widenInUnions => tp2.atoms match @@ -2413,20 +2424,24 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if (hi1 & hi2).isEmpty then return orType(tp1, tp2, isSoft = isSoft) case none => case none => - val t1 = mergeIfSuper(tp1, tp2, canConstrain) - if (t1.exists) return t1 - - val t2 = mergeIfSuper(tp2, tp1, canConstrain) - if (t2.exists) return t2 - def widen(tp: Type) = if (widenInUnions) tp.widen else tp.widenIfUnstable + // Third, try to simplify after widening as follows: + // 1. Drop all or-factors in tp2 that are subtypes of an or-factor + // in tp1, yielding tp2Final. + // 2. Drop all or-factors in tp1 that are subtypes of an or-factor + // in tp2Final, yielding tp1Final. + // 3. Combine the two final types in an OrType + def widen(tp: Type) = + if widenInUnions then tp.widen else tp.widenIfUnstable val tp1w = widen(tp1) val tp2w = widen(tp2) - if ((tp1 ne tp1w) || (tp2 ne tp2w)) lub(tp1w, tp2w, canConstrain = canConstrain, isSoft = isSoft) - else orType(tp1w, tp2w, isSoft = isSoft) // no need to check subtypes again - } + val tp2Final = dropIfSub(tp2w, tp1w, canConstrain) + val tp1Final = dropIfSub(tp1w, tp2Final, canConstrain) + recombine(tp1Final, tp2Final, orType(_, _, isSoft = isSoft)) + end mergedLub + mergedLub(dropExpr(tp1.stripLazyRef), dropExpr(tp2.stripLazyRef)) - } + end lub /** Try to produce joint arguments for a lub `A[T_1, ..., T_n] | A[T_1', ..., T_n']` using * the following strategies: @@ -2488,60 +2503,47 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling Nil } - private def recombineAnd(tp: AndType, tp1: Type, tp2: Type) = - if (!tp1.exists) tp2 - else if (!tp2.exists) tp1 - else tp.derivedAndType(tp1, tp2) + private def recombine(tp1: Type, tp2: Type, rebuild: (Type, Type) => Type): Type = + if !tp1.exists then tp2 + else if !tp2.exists then tp1 + else rebuild(tp1, tp2) + + private def recombine(tp1: Type, tp2: Type, tp: AndOrType): Type = + recombine(tp1, tp2, tp.derivedAndOrType) /** If some (&-operand of) `tp` is a supertype of `sub` replace it with `NoType`. */ private def dropIfSuper(tp: Type, sub: Type): Type = - if (isSubTypeWhenFrozen(sub, tp)) NoType - else tp match { + + def isSuperOf(sub: Type): Boolean = sub match + case AndType(sub1, sub2) => isSuperOf(sub1) || isSuperOf(sub2) + case sub: TypeVar if sub.isInstantiated => isSuperOf(sub.inst) + case _ => isSubTypeWhenFrozen(sub, tp) + + tp match case tp @ AndType(tp1, tp2) => - recombineAnd(tp, dropIfSuper(tp1, sub), dropIfSuper(tp2, sub)) + recombine(dropIfSuper(tp1, sub), dropIfSuper(tp2, sub), tp) + case tp: TypeVar if tp.isInstantiated => + dropIfSuper(tp.inst, sub) case _ => - tp - } + if isSuperOf(sub) then NoType else tp + end dropIfSuper - /** Merge `t1` into `tp2` if t1 is a subtype of some &-summand of tp2. - */ - private def mergeIfSub(tp1: Type, tp2: Type): Type = - if (isSubTypeWhenFrozen(tp1, tp2)) tp1 - else tp2 match { - case tp2 @ AndType(tp21, tp22) => - val lower1 = mergeIfSub(tp1, tp21) - if (lower1 eq tp21) tp2 - else if (lower1.exists) lower1 & tp22 - else { - val lower2 = mergeIfSub(tp1, tp22) - if (lower2 eq tp22) tp2 - else if (lower2.exists) tp21 & lower2 - else NoType - } - case _ => - NoType - } + private def dropIfSub(tp: Type, sup: Type, canConstrain: Boolean): Type = - /** Merge `tp1` into `tp2` if tp1 is a supertype of some |-summand of tp2. - * @param canConstrain If true, new constraints might be added to make the merge possible. - */ - private def mergeIfSuper(tp1: Type, tp2: Type, canConstrain: Boolean): Type = - if (isSubType(tp2, tp1, whenFrozen = !canConstrain)) tp1 - else tp2 match { - case tp2 @ OrType(tp21, tp22) => - val higher1 = mergeIfSuper(tp1, tp21, canConstrain) - if (higher1 eq tp21) tp2 - else if (higher1.exists) lub(higher1, tp22, isSoft = tp2.isSoft) - else { - val higher2 = mergeIfSuper(tp1, tp22, canConstrain) - if (higher2 eq tp22) tp2 - else if (higher2.exists) lub(tp21, higher2, isSoft = tp2.isSoft) - else NoType - } + def isSubOf(sup: Type): Boolean = sup match + case OrType(sup1, sup2) => isSubOf(sup1) || isSubOf(sup2) + case sup: TypeVar if sup.isInstantiated => isSubOf(sup.inst) + case _ => isSubType(tp, sup, whenFrozen = !canConstrain) + + tp match + case tp @ OrType(tp1, tp2) => + recombine(dropIfSub(tp1, sup, canConstrain), dropIfSub(tp2, sup, canConstrain), tp) + case tp: TypeVar if tp.isInstantiated => + dropIfSub(tp.inst, sup, canConstrain) case _ => - NoType - } + if isSubOf(sup) then NoType else tp + end dropIfSub /** There's a window of vulnerability between ElimByName and Erasure where some * ExprTypes `=> T` that appear as parameters of function types are not yet converted diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index d1eabaa95bf7..2120cc633da8 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2020,7 +2020,7 @@ Symbols: example/InstrumentTyper# => class InstrumentTyper extends Object { self: AnyRef & InstrumentTyper => +5 decls } example/InstrumentTyper#AnnotatedType# => type AnnotatedType = Int @param example/InstrumentTyper#``(). => primary ctor (): InstrumentTyper -example/InstrumentTyper#all(). => method all => List[Float | Double | List[Nothing] | Boolean | Unit | Char | String | LinkOption | Int | Long | Class[Option[Int]]] +example/InstrumentTyper#all(). => method all => List[Char | String | LinkOption | Int | Long | Class[Option[Int]] | Float | Double | Boolean | Unit | List[Nothing]] example/InstrumentTyper#clazzOf. => final val method clazzOf Option[Int] example/InstrumentTyper#singletonType(). => method singletonType (param x: Predef.type): Nothing example/InstrumentTyper#singletonType().(x) => param x: Predef.type @@ -2082,7 +2082,7 @@ Occurrences: [24:37..24:40): Int -> scala/Int# Synthetics: -[8:12..8:16):List => *.apply[Float | Double | List[Nothing] | Boolean | Unit | Char | String | LinkOption | Int | Long | Class[Option[Int]]] +[8:12..8:16):List => *.apply[Char | String | LinkOption | Int | Long | Class[Option[Int]] | Float | Double | Boolean | Unit | List[Nothing]] [20:4..20:8):List => *.apply[Nothing] expect/InventedNames.scala From ad871eea3398fa282ea7d6b6f44ce085ee05404b Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 13 Mar 2024 11:42:05 +0100 Subject: [PATCH 022/465] Remove experimental `MainAnnotation`/`newMain` (replaced with `MacroAnnotation`) `MainAnnotation` and its implementation `newMain` predate `MacroAnnotation`. The `MacroAnnotation` is subsumed feature and allows much more flexibility. `MainAnnotation` and `newMain` could be reimplemented as a macro annotation in an external library. See SIP-63: https://github.com/scala/improvement-proposals/pull/80 --- .../dotty/tools/dotc/ast/MainProxies.scala | 326 +-------------- .../dotty/tools/dotc/core/Definitions.scala | 6 - .../src/dotty/tools/dotc/typer/Checking.scala | 2 +- .../src/dotty/tools/dotc/typer/Namer.scala | 1 - .../reference/experimental/main-annotation.md | 4 +- .../src/scala/annotation/MainAnnotation.scala | 129 ------ library/src/scala/annotation/newMain.scala | 389 ------------------ .../completion/CompletionKeywordSuite.scala | 8 +- tests/neg/main-annotation-currying.scala | 8 - tests/neg/main-annotation-generic.scala | 8 - .../neg/main-annotation-implicit-given.scala | 13 - .../neg/main-annotation-mainannotation.scala | 3 - .../neg/main-annotation-multiple-annot.scala | 8 - tests/neg/main-annotation-nonmethod.scala | 11 - tests/neg/main-annotation-nonstatic.scala | 4 - .../main-annotation-unknown-parser-1.scala | 12 - .../main-annotation-unknown-parser-2.scala | 27 -- .../stdlibExperimentalDefinitions.scala | 22 - tests/run/main-annotation-birthday.scala | 35 -- tests/run/main-annotation-dash-dash.scala | 28 -- .../run/main-annotation-default-value-1.scala | 25 -- .../run/main-annotation-default-value-2.scala | 36 -- tests/run/main-annotation-example.scala | 65 --- tests/run/main-annotation-flags.scala | 44 -- tests/run/main-annotation-help-override.scala | 56 --- tests/run/main-annotation-help.scala | 178 -------- .../main-annotation-homemade-annot-1.scala | 49 --- .../main-annotation-homemade-annot-2.scala | 52 --- .../main-annotation-homemade-annot-3.scala | 26 -- .../main-annotation-homemade-annot-4.scala | 27 -- .../main-annotation-homemade-annot-5.scala | 28 -- .../main-annotation-homemade-annot-6.scala | 65 --- .../main-annotation-homemade-parser-1.scala | 30 -- .../main-annotation-homemade-parser-2.scala | 33 -- .../main-annotation-homemade-parser-3.scala | 29 -- .../main-annotation-homemade-parser-4.scala | 50 --- .../main-annotation-homemade-parser-5.scala | 29 -- tests/run/main-annotation-multiple.scala | 28 -- tests/run/main-annotation-named-params.scala | 30 -- tests/run/main-annotation-newMain.scala | 323 --------------- ...n-annotation-no-parameters-no-parens.scala | 23 -- tests/run/main-annotation-no-parameters.scala | 23 -- tests/run/main-annotation-overload.scala | 35 -- tests/run/main-annotation-param-annot-1.scala | 111 ----- tests/run/main-annotation-param-annot-2.scala | 63 --- ...nnotation-param-annot-invalid-params.scala | 48 --- tests/run/main-annotation-return-type-1.scala | 26 -- tests/run/main-annotation-return-type-2.scala | 29 -- tests/run/main-annotation-short-name.scala | 25 -- tests/run/main-annotation-simple.scala | 22 - tests/run/main-annotation-top-level.scala | 37 -- tests/run/main-annotation-types.scala | 35 -- tests/run/main-annotation-vararg-1.scala | 30 -- tests/run/main-annotation-vararg-2.scala | 33 -- tests/run/main-annotation-wrong-param-1.scala | 26 -- .../main-annotation-wrong-param-names.scala | 26 -- .../main-annotation-wrong-param-number.scala | 26 -- .../main-annotation-wrong-param-type.scala | 28 -- tests/run/main-calculator-example.scala | 67 --- 59 files changed, 7 insertions(+), 2953 deletions(-) delete mode 100644 library/src/scala/annotation/MainAnnotation.scala delete mode 100644 library/src/scala/annotation/newMain.scala delete mode 100644 tests/neg/main-annotation-currying.scala delete mode 100644 tests/neg/main-annotation-generic.scala delete mode 100644 tests/neg/main-annotation-implicit-given.scala delete mode 100644 tests/neg/main-annotation-mainannotation.scala delete mode 100644 tests/neg/main-annotation-multiple-annot.scala delete mode 100644 tests/neg/main-annotation-nonmethod.scala delete mode 100644 tests/neg/main-annotation-nonstatic.scala delete mode 100644 tests/neg/main-annotation-unknown-parser-1.scala delete mode 100644 tests/neg/main-annotation-unknown-parser-2.scala delete mode 100644 tests/run/main-annotation-birthday.scala delete mode 100644 tests/run/main-annotation-dash-dash.scala delete mode 100644 tests/run/main-annotation-default-value-1.scala delete mode 100644 tests/run/main-annotation-default-value-2.scala delete mode 100644 tests/run/main-annotation-example.scala delete mode 100644 tests/run/main-annotation-flags.scala delete mode 100644 tests/run/main-annotation-help-override.scala delete mode 100644 tests/run/main-annotation-help.scala delete mode 100644 tests/run/main-annotation-homemade-annot-1.scala delete mode 100644 tests/run/main-annotation-homemade-annot-2.scala delete mode 100644 tests/run/main-annotation-homemade-annot-3.scala delete mode 100644 tests/run/main-annotation-homemade-annot-4.scala delete mode 100644 tests/run/main-annotation-homemade-annot-5.scala delete mode 100644 tests/run/main-annotation-homemade-annot-6.scala delete mode 100644 tests/run/main-annotation-homemade-parser-1.scala delete mode 100644 tests/run/main-annotation-homemade-parser-2.scala delete mode 100644 tests/run/main-annotation-homemade-parser-3.scala delete mode 100644 tests/run/main-annotation-homemade-parser-4.scala delete mode 100644 tests/run/main-annotation-homemade-parser-5.scala delete mode 100644 tests/run/main-annotation-multiple.scala delete mode 100644 tests/run/main-annotation-named-params.scala delete mode 100644 tests/run/main-annotation-newMain.scala delete mode 100644 tests/run/main-annotation-no-parameters-no-parens.scala delete mode 100644 tests/run/main-annotation-no-parameters.scala delete mode 100644 tests/run/main-annotation-overload.scala delete mode 100644 tests/run/main-annotation-param-annot-1.scala delete mode 100644 tests/run/main-annotation-param-annot-2.scala delete mode 100644 tests/run/main-annotation-param-annot-invalid-params.scala delete mode 100644 tests/run/main-annotation-return-type-1.scala delete mode 100644 tests/run/main-annotation-return-type-2.scala delete mode 100644 tests/run/main-annotation-short-name.scala delete mode 100644 tests/run/main-annotation-simple.scala delete mode 100644 tests/run/main-annotation-top-level.scala delete mode 100644 tests/run/main-annotation-types.scala delete mode 100644 tests/run/main-annotation-vararg-1.scala delete mode 100644 tests/run/main-annotation-vararg-2.scala delete mode 100644 tests/run/main-annotation-wrong-param-1.scala delete mode 100644 tests/run/main-annotation-wrong-param-names.scala delete mode 100644 tests/run/main-annotation-wrong-param-number.scala delete mode 100644 tests/run/main-annotation-wrong-param-type.scala delete mode 100644 tests/run/main-calculator-example.scala diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala index 8ee75cbf364b..7bf83d548c97 100644 --- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala +++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala @@ -12,11 +12,6 @@ import Annotations.Annotation object MainProxies { - /** Generate proxy classes for @main functions and @myMain functions where myMain <:< MainAnnotation */ - def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - mainAnnotationProxies(stats) ++ mainProxies(stats) - } - /** Generate proxy classes for @main functions. * A function like * @@ -35,7 +30,7 @@ object MainProxies { * catch case err: ParseError => showError(err) * } */ - private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { import tpd.* def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => @@ -127,323 +122,4 @@ object MainProxies { result } - private type DefaultValueSymbols = Map[Int, Symbol] - private type ParameterAnnotationss = Seq[Seq[Annotation]] - - /** - * Generate proxy classes for main functions. - * A function like - * - * /** - * * Lorem ipsum dolor sit amet - * * consectetur adipiscing elit. - * * - * * @param x my param x - * * @param ys all my params y - * */ - * @myMain(80) def f( - * @myMain.Alias("myX") x: S, - * y: S, - * ys: T* - * ) = ... - * - * would be translated to something like - * - * final class f { - * static def main(args: Array[String]): Unit = { - * val annotation = new myMain(80) - * val info = new Info( - * name = "f", - * documentation = "Lorem ipsum dolor sit amet consectetur adipiscing elit.", - * parameters = Seq( - * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))), - * new scala.annotation.MainAnnotation.Parameter("y", "S", true, false, "", Seq()), - * new scala.annotation.MainAnnotation.Parameter("ys", "T", false, true, "all my params y", Seq()) - * ) - * ), - * val command = annotation.command(info, args) - * if command.isDefined then - * val cmd = command.get - * val args0: () => S = annotation.argGetter[S](info.parameters(0), cmd(0), None) - * val args1: () => S = annotation.argGetter[S](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) - * val args2: () => Seq[T] = annotation.varargGetter[T](info.parameters(2), cmd.drop(2)) - * annotation.run(() => f(args0(), args1(), args2()*)) - * } - * } - */ - private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd.* - - /** - * Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this - * point of the compilation, they must be explicitly passed by [[mainProxy]]. - */ - def defaultValueSymbols(scope: Tree, funSymbol: Symbol): DefaultValueSymbols = - scope match { - case TypeDef(_, template: Template) => - template.body.flatMap((_: Tree) match { - case dd: DefDef if dd.name.is(DefaultGetterName) && dd.name.firstPart == funSymbol.name => - val DefaultGetterName.NumberedInfo(index) = dd.name.info: @unchecked - List(index -> dd.symbol) - case _ => Nil - }).toMap - case _ => Map.empty - } - - /** Computes the list of main methods present in the code. */ - def mainMethods(scope: Tree, stats: List[Tree]): List[(Symbol, ParameterAnnotationss, DefaultValueSymbols, Option[Comment])] = stats.flatMap { - case stat: DefDef => - val sym = stat.symbol - sym.annotations.filter(_.matches(defn.MainAnnotationClass)) match { - case Nil => - Nil - case _ :: Nil => - val paramAnnotations = stat.paramss.flatMap(_.map( - valdef => valdef.symbol.annotations.filter(_.matches(defn.MainAnnotationParameterAnnotation)) - )) - (sym, paramAnnotations.toVector, defaultValueSymbols(scope, sym), stat.rawComment) :: Nil - case mainAnnot :: others => - report.error(em"method cannot have multiple main annotations", mainAnnot.tree) - Nil - } - case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => - mainMethods(stat, impl.body) - case _ => - Nil - } - - // Assuming that the top-level object was already generated, all main methods will have a scope - mainMethods(EmptyTree, stats).flatMap(mainAnnotationProxy) - } - - private def mainAnnotationProxy(mainFun: Symbol, paramAnnotations: ParameterAnnotationss, defaultValueSymbols: DefaultValueSymbols, docComment: Option[Comment])(using Context): Option[TypeDef] = { - val mainAnnot = mainFun.getAnnotation(defn.MainAnnotationClass).get - def pos = mainFun.sourcePos - - val documentation = new Documentation(docComment) - - /** () => value */ - def unitToValue(value: Tree): Tree = - val defDef = DefDef(nme.ANON_FUN, List(Nil), TypeTree(), value) - Block(defDef, Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) - - /** Generate a list of trees containing the ParamInfo instantiations. - * - * A ParamInfo has the following shape - * ``` - * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))) - * ``` - */ - def parameterInfos(mt: MethodType): List[Tree] = - extension (tree: Tree) def withProperty(sym: Symbol, args: List[Tree]) = - Apply(Select(tree, sym.name), args) - - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val param = paramName.toString - val paramType0 = if formal.isRepeatedParam then formal.argTypes.head.dealias else formal.dealias - val paramType = paramType0.dealias - val paramTypeOwner = paramType.typeSymbol.owner - val paramTypeStr = - if paramTypeOwner == defn.EmptyPackageClass then paramType.show - else paramTypeOwner.showFullName + "." + paramType.show - val hasDefault = defaultValueSymbols.contains(idx) - val isRepeated = formal.isRepeatedParam - val paramDoc = documentation.argDocs.getOrElse(param, "") - val paramAnnots = - val annotationTrees = paramAnnotations(idx).map(instantiateAnnotation).toList - Apply(ref(defn.SeqModule.termRef), annotationTrees) - - val constructorArgs = List(param, paramTypeStr, hasDefault, isRepeated, paramDoc) - .map(value => Literal(Constant(value))) - - New(TypeTree(defn.MainAnnotationParameter.typeRef), List(constructorArgs :+ paramAnnots)) - - end parameterInfos - - /** - * Creates a list of references and definitions of arguments. - * The goal is to create the - * `val args0: () => S = annotation.argGetter[S](0, cmd(0), None)` - * part of the code. - */ - def argValDefs(mt: MethodType): List[ValDef] = - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val argName = nme.args ++ idx.toString - val isRepeated = formal.isRepeatedParam - val formalType = if isRepeated then formal.argTypes.head else formal - val getterName = if isRepeated then nme.varargGetter else nme.argGetter - val defaultValueGetterOpt = defaultValueSymbols.get(idx) match - case None => ref(defn.NoneModule.termRef) - case Some(dvSym) => - val value = unitToValue(ref(dvSym.termRef)) - Apply(ref(defn.SomeClass.companionModule.termRef), value) - val argGetter0 = TypeApply(Select(Ident(nme.annotation), getterName), TypeTree(formalType) :: Nil) - val index = Literal(Constant(idx)) - val paramInfo = Apply(Select(Ident(nme.info), nme.parameters), index) - val argGetter = - if isRepeated then Apply(argGetter0, List(paramInfo, Apply(Select(Ident(nme.cmd), nme.drop), List(index)))) - else Apply(argGetter0, List(paramInfo, Apply(Ident(nme.cmd), List(index)), defaultValueGetterOpt)) - ValDef(argName, TypeTree(), argGetter) - end argValDefs - - - /** Create a list of argument references that will be passed as argument to the main method. - * `args0`, ...`argn*` - */ - def argRefs(mt: MethodType): List[Tree] = - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val argRef = Apply(Ident(nme.args ++ idx.toString), Nil) - if formal.isRepeatedParam then repeated(argRef) else argRef - end argRefs - - - /** Turns an annotation (e.g. `@main(40)`) into an instance of the class (e.g. `new scala.main(40)`). */ - def instantiateAnnotation(annot: Annotation): Tree = - val argss = { - def recurse(t: tpd.Tree, acc: List[List[Tree]]): List[List[Tree]] = t match { - case Apply(t, args: List[tpd.Tree]) => recurse(t, extractArgs(args) :: acc) - case _ => acc - } - - def extractArgs(args: List[tpd.Tree]): List[Tree] = - args.flatMap { - case Typed(SeqLiteral(varargs, _), _) => varargs.map(arg => TypedSplice(arg)) - case arg: Select if arg.name.is(DefaultGetterName) => Nil // Ignore default values, they will be added later by the compiler - case arg => List(TypedSplice(arg)) - } - - recurse(annot.tree, Nil) - } - - New(TypeTree(annot.symbol.typeRef), argss) - end instantiateAnnotation - - def generateMainClass(mainCall: Tree, args: List[Tree], parameterInfos: List[Tree]): TypeDef = - val cmdInfo = - val nameTree = Literal(Constant(mainFun.showName)) - val docTree = Literal(Constant(documentation.mainDoc)) - val paramInfos = Apply(ref(defn.SeqModule.termRef), parameterInfos) - New(TypeTree(defn.MainAnnotationInfo.typeRef), List(List(nameTree, docTree, paramInfos))) - - val annotVal = ValDef( - nme.annotation, - TypeTree(), - instantiateAnnotation(mainAnnot) - ) - val infoVal = ValDef( - nme.info, - TypeTree(), - cmdInfo - ) - val command = ValDef( - nme.command, - TypeTree(), - Apply( - Select(Ident(nme.annotation), nme.command), - List(Ident(nme.info), Ident(nme.args)) - ) - ) - val argsVal = ValDef( - nme.cmd, - TypeTree(), - Select(Ident(nme.command), nme.get) - ) - val run = Apply(Select(Ident(nme.annotation), nme.run), mainCall) - val body0 = If( - Select(Ident(nme.command), nme.isDefined), - Block(argsVal :: args, run), - EmptyTree - ) - val body = Block(List(annotVal, infoVal, command), body0) // TODO add `if (cmd.nonEmpty)` - - val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) - .withFlags(Param) - /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. - * The annotations will be retype-checked in another scope that may not have the same imports. - */ - def insertTypeSplices = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match - case tree: tpd.Ident @unchecked => TypedSplice(tree) - case tree => super.transform(tree) - } - val annots = mainFun.annotations - .filterNot(_.matches(defn.MainAnnotationClass)) - .map(annot => insertTypeSplices.transform(annot.tree)) - val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) - .withFlags(JavaStatic) - .withAnnotations(annots) - val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) - val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) - .withFlags(Final | Invisible) - mainCls.withSpan(mainAnnot.tree.span.toSynthetic) - end generateMainClass - - if (!mainFun.owner.isStaticOwner) - report.error(em"main method is not statically accessible", pos) - None - else mainFun.info match { - case _: ExprType => - Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) - case mt: MethodType => - if (mt.isImplicitMethod) - report.error(em"main method cannot have implicit parameters", pos) - None - else mt.resType match - case restpe: MethodType => - report.error(em"main method cannot be curried", pos) - None - case _ => - Some(generateMainClass(unitToValue(Apply(ref(mainFun.termRef), argRefs(mt))), argValDefs(mt), parameterInfos(mt))) - case _: PolyType => - report.error(em"main method cannot have type parameters", pos) - None - case _ => - report.error(em"main can only annotate a method", pos) - None - } - } - - /** A class responsible for extracting the docstrings of a method. */ - private class Documentation(docComment: Option[Comment]): - import util.CommentParsing.* - - /** The main part of the documentation. */ - lazy val mainDoc: String = _mainDoc - /** The parameters identified by @param. Maps from parameter name to its documentation. */ - lazy val argDocs: Map[String, String] = _argDocs - - private var _mainDoc: String = "" - private var _argDocs: Map[String, String] = Map() - - docComment match { - case Some(comment) => if comment.isDocComment then parseDocComment(comment.raw) else _mainDoc = comment.raw - case None => - } - - private def cleanComment(raw: String): String = - var lines: Seq[String] = raw.trim.nn.split('\n').nn.toSeq - lines = lines.map(l => l.substring(skipLineLead(l, -1), l.length).nn.trim.nn) - var s = lines.foldLeft("") { - case ("", s2) => s2 - case (s1, "") if s1.last == '\n' => s1 // Multiple newlines are kept as single newlines - case (s1, "") => s1 + '\n' - case (s1, s2) if s1.last == '\n' => s1 + s2 - case (s1, s2) => s1 + ' ' + s2 - } - s.replaceAll(raw"\[\[", "").nn.replaceAll(raw"\]\]", "").nn.trim.nn - - private def parseDocComment(raw: String): Unit = - // Positions of the sections (@) in the docstring - val tidx: List[(Int, Int)] = tagIndex(raw) - - // Parse main comment - var mainComment: String = raw.substring(skipLineLead(raw, 0), startTag(raw, tidx)).nn - _mainDoc = cleanComment(mainComment) - - // Parse arguments comments - val argsCommentsSpans: Map[String, (Int, Int)] = paramDocs(raw, "@param", tidx) - val argsCommentsTextSpans = argsCommentsSpans.view.mapValues(extractSectionText(raw, _)) - val argsCommentsTexts = argsCommentsTextSpans.mapValues({ case (beg, end) => raw.substring(beg, end).nn }) - _argDocs = argsCommentsTexts.mapValues(cleanComment(_)).toMap - end Documentation } diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 789e744fbfc9..f187498da1fb 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -926,12 +926,6 @@ class Definitions { @tu lazy val XMLTopScopeModule: Symbol = requiredModule("scala.xml.TopScope") - @tu lazy val MainAnnotationClass: ClassSymbol = requiredClass("scala.annotation.MainAnnotation") - @tu lazy val MainAnnotationInfo: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Info") - @tu lazy val MainAnnotationParameter: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Parameter") - @tu lazy val MainAnnotationParameterAnnotation: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.ParameterAnnotation") - @tu lazy val MainAnnotationCommand: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Command") - @tu lazy val CommandLineParserModule: Symbol = requiredModule("scala.util.CommandLineParser") @tu lazy val CLP_ParseError: ClassSymbol = CommandLineParserModule.requiredClass("ParseError").typeRef.symbol.asClass @tu lazy val CLP_parseArgument: Symbol = CommandLineParserModule.requiredMethod("parseArgument") diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 56f67574a72d..96e2e937927d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -1508,7 +1508,7 @@ trait Checking { val annotCls = Annotations.annotClass(annot) val concreteAnnot = Annotations.ConcreteAnnotation(annot) val pos = annot.srcPos - if (annotCls == defn.MainAnnot || concreteAnnot.matches(defn.MainAnnotationClass)) { + if (annotCls == defn.MainAnnot) { if (!sym.isRealMethod) report.error(em"main annotation cannot be applied to $sym", pos) if (!sym.owner.is(Module) || !sym.owner.isStatic) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 24721f1cd758..5a5a53590d62 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1260,7 +1260,6 @@ class Namer { typer: Typer => && annot.symbol != defn.TailrecAnnot && annot.symbol != defn.MainAnnot && !annot.symbol.derivesFrom(defn.MacroAnnotationClass) - && !annot.symbol.derivesFrom(defn.MainAnnotationClass) }) if forwarder.isType then diff --git a/docs/_docs/reference/experimental/main-annotation.md b/docs/_docs/reference/experimental/main-annotation.md index 7cc105be06f9..98a8479132da 100644 --- a/docs/_docs/reference/experimental/main-annotation.md +++ b/docs/_docs/reference/experimental/main-annotation.md @@ -4,6 +4,8 @@ title: "MainAnnotation" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/main-annotation.html --- +> This feature was removed in https://github.com/scala/scala3/pull/19937. It was subsumed by macro annotations. See SIP-63 https://github.com/scala/improvement-proposals/pull/80. + `MainAnnotation` provides a generic way to define main annotations such as `@main`. When a users annotates a method with an annotation that extends `MainAnnotation` a class with a `main` method will be generated. The main method will contain the code needed to parse the command line arguments and run the application. @@ -93,6 +95,6 @@ import scala.util.CommandLineParser.FromString[T] val result = program() println("result: " + result) println("executed program") - + end myMain ``` diff --git a/library/src/scala/annotation/MainAnnotation.scala b/library/src/scala/annotation/MainAnnotation.scala deleted file mode 100644 index 29e650e50b74..000000000000 --- a/library/src/scala/annotation/MainAnnotation.scala +++ /dev/null @@ -1,129 +0,0 @@ -package scala.annotation - -/** MainAnnotation provides the functionality for a compiler-generated main class. - * It links a compiler-generated main method (call it compiler-main) to a user - * written main method (user-main). - * The protocol of calls from compiler-main is as follows: - * - * - create a `command` with the command line arguments, - * - for each parameter of user-main, a call to `command.argGetter`, - * or `command.varargGetter` if is a final varargs parameter, - * - a call to `command.run` with the closure of user-main applied to all arguments. - * - * Example: - * ```scala sc:nocompile - * /** Sum all the numbers - * * - * * @param first Fist number to sum - * * @param rest The rest of the numbers to sum - * */ - * @myMain def sum(first: Int, second: Int = 0, rest: Int*): Int = first + second + rest.sum - * ``` - * generates - * ```scala sc:nocompile - * object foo { - * def main(args: Array[String]): Unit = { - * val mainAnnot = new myMain() - * val info = new Info( - * name = "foo.main", - * documentation = "Sum all the numbers", - * parameters = Seq( - * new Parameter("first", "scala.Int", hasDefault=false, isVarargs=false, "Fist number to sum"), - * new Parameter("rest", "scala.Int" , hasDefault=false, isVarargs=true, "The rest of the numbers to sum") - * ) - * ) - * val mainArgsOpt = mainAnnot.command(info, args) - * if mainArgsOpt.isDefined then - * val mainArgs = mainArgsOpt.get - * val args0 = mainAnnot.argGetter[Int](info.parameters(0), mainArgs(0), None) // using parser Int - * val args1 = mainAnnot.argGetter[Int](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) // using parser Int - * val args2 = mainAnnot.varargGetter[Int](info.parameters(2), mainArgs.drop(2)) // using parser Int - * mainAnnot.run(() => sum(args0(), args1(), args2()*)) - * } - * } - * ``` - * - * @param Parser The class used for argument string parsing and arguments into a `T` - * @param Result The required result type of the main method. - * If this type is Any or Unit, any type will be accepted. - */ -@experimental -trait MainAnnotation[Parser[_], Result] extends StaticAnnotation: - import MainAnnotation.{Info, Parameter} - - /** Process the command arguments before parsing them. - * - * Return `Some` of the sequence of arguments that will be parsed to be passed to the main method. - * This sequence needs to have the same length as the number of parameters of the main method (i.e. `info.parameters.size`). - * If there is a varags parameter, then the sequence must be at least of length `info.parameters.size - 1`. - * - * Returns `None` if the arguments are invalid and parsing and run should be stopped. - * - * @param info The information about the command (name, documentation and info about parameters) - * @param args The command line arguments - */ - def command(info: Info, args: Seq[String]): Option[Seq[String]] - - /** The getter for the `idx`th argument of type `T` - * - * @param idx The index of the argument - * @param defaultArgument Optional lambda to instantiate the default argument - */ - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using Parser[T]): () => T - - /** The getter for a final varargs argument of type `T*` */ - def varargGetter[T](param: Parameter, args: Seq[String])(using Parser[T]): () => Seq[T] - - /** Run `program` if all arguments are valid if all arguments are valid - * - * @param program A function containing the call to the main method and instantiation of its arguments - */ - def run(program: () => Result): Unit - -end MainAnnotation - -@experimental -object MainAnnotation: - - /** Information about the main method - * - * @param name The name of the main method - * @param documentation The documentation of the main method without the `@param` documentation (see Parameter.documentaion) - * @param parameters Information about the parameters of the main method - */ - @experimental // MiMa does not check scope inherited @experimental - final class Info( - val name: String, - val documentation: String, - val parameters: Seq[Parameter], - ): - - /** If the method ends with a varargs parameter */ - def hasVarargs: Boolean = parameters.nonEmpty && parameters.last.isVarargs - - end Info - - /** Information about a parameter of a main method - * - * @param name The name of the parameter - * @param typeName The name of the parameter's type - * @param hasDefault If the parameter has a default argument - * @param isVarargs If the parameter is a varargs parameter (can only be true for the last parameter) - * @param documentation The documentation of the parameter (from `@param` documentation in the main method) - * @param annotations The annotations of the parameter that extend `ParameterAnnotation` - */ - @experimental // MiMa does not check scope inherited @experimental - final class Parameter( - val name: String, - val typeName: String, - val hasDefault: Boolean, - val isVarargs: Boolean, - val documentation: String, - val annotations: Seq[ParameterAnnotation], - ) - - /** Marker trait for annotations that will be included in the Parameter annotations. */ - @experimental // MiMa does not check scope inherited @experimental - trait ParameterAnnotation extends StaticAnnotation - -end MainAnnotation diff --git a/library/src/scala/annotation/newMain.scala b/library/src/scala/annotation/newMain.scala deleted file mode 100644 index 552e4225a648..000000000000 --- a/library/src/scala/annotation/newMain.scala +++ /dev/null @@ -1,389 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -import scala.collection.mutable -import scala.util.CommandLineParser.FromString -import scala.annotation.meta.param - -/** - * The annotation that designates a main function. - * Main functions are entry points for Scala programs. They can be called through a command line interface by using - * the `scala` command, followed by their name and, optionally, their parameters. - * - * The parameters of a main function may have any type `T`, as long as there exists a - * `given util.CommandLineParser.FromString[T]` in the scope. It will be used for parsing the string given as input - * into the correct argument type. - * These types already have parsers defined: - * - String, - * - Boolean, - * - Byte, Short, Int, Long, Float, Double. - * - * The parameters of a main function may be passed either by position, or by name. Passing an argument positionally - * means that you give the arguments in the same order as the function's signature. Passing an argument by name means - * that you give the argument right after giving its name. Considering the function - * `@newMain def foo(i: Int, str: String)`, we may have arguments passed: - * - by position: `scala foo 1 abc`, - * - by name: `scala foo -i 1 --str abc` or `scala foo --str abc -i 1`. - * - * A mixture of both is also possible: `scala foo --str abc 1` is equivalent to all previous examples. - * - * Note that main function overloading is not currently supported, i.e. you cannot define two main methods that have - * the same name in the same project. - * - * Special arguments are used to display help regarding a main function: `--help` and `-h`. If used as argument, the program - * will display some useful information about the main function. This help directly uses the ScalaDoc comment - * associated with the function, more precisely its description and the description of the parameters documented with - * `@param`. Note that if a parameter is named `help` or `h`, or if one of the parameters has as alias one of those names, - * the help displaying will be disabled for that argument. - * For example, for `@newMain def foo(help: Boolean)`, `scala foo -h` will display the help, but `scala foo --help` will fail, - * as it will expect a Boolean value after `--help`. - * - * Parameters may be given annotations to add functionalities to the main function: - * - `main.alias` adds other names to a parameter. For example, if a parameter `node` has as aliases - * `otherNode` and `n`, it may be addressed using `--node`, `--otherNode` or `-n`. - * - * Here is an example of a main function with annotated parameters: - * `@newMain def foo(@newMain.alias("x") number: Int, @newMain.alias("explanation") s: String)`. The following commands are - * equivalent: - * - `scala foo --number 1 -s abc` - * - `scala foo -x 1 -s abc` - * - `scala foo --number 1 --explanation abc` - * - `scala foo -x 1 --explanation abc` - * - * Boolean parameters are considered flags that do not require the "true" or "false" value to be passed. - * For example, `@newMain def foo(i: Boolean)` can be called as `foo` (where `i=false`) or `foo -i` (where `i=true`). - * - * The special `--` marker can be used to indicate that all following arguments are passed verbatim as positional parameters. - * For example, `@newMain def foo(args: String*)` can be called as `scala foo a b -- -c -d` which implies that `args=Seq("a", "b", "-c", "-d")`. - */ -@experimental -final class newMain extends MainAnnotation[FromString, Any]: - import newMain.* - import MainAnnotation.* - - private val longArgRegex = "--[a-zA-Z][a-zA-Z0-9]+".r - private val shortArgRegex = "-[a-zA-Z]".r - // TODO: what should be considered as an invalid argument? - // Consider argument `-3.14`, `--i`, `-name` - private val illFormedName = "--[a-zA-Z]|-[a-zA-Z][a-zA-Z0-9]+".r - /** After this marker, all arguments are positional */ - private inline val positionArgsMarker = "--" - - extension (param: Parameter) - private def aliasNames: Seq[String] = - param.annotations.collect{ case alias: alias => getNameWithMarker(alias.name) } - private def isFlag: Boolean = - param.typeName == "scala.Boolean" - - private def getNameWithMarker(name: String): String = - if name.length > 1 then s"--$name" - else if name.length == 1 then s"-$name" - else assert(false, "invalid name") - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - val names = Names(info) - if Help.shouldPrintDefaultHelp(names, args) then - Help.printUsage(info) - Help.printExplain(info) - None - else - preProcessArgs(info, names, args).orElse { - Help.printUsage(info) - None - } - end command - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = { - if arg.nonEmpty then parse[T](param, arg) - else - assert(param.hasDefault) - - defaultArgument.get - } - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = { - val getters = args.map(arg => parse[T](param, arg)) - () => getters.map(_()) - } - - def run(execProgram: () => Any): Unit = - if !hasParseErrors then execProgram() - - private def preProcessArgs(info: Info, names: Names, args: Seq[String]): Option[Seq[String]] = - var hasError: Boolean = false - def error(msg: String): Unit = { - hasError = true - println(s"Error: $msg") - } - - val (positionalArgs, byNameArgsMap) = - val positionalArgs = List.newBuilder[String] - val byNameArgs = List.newBuilder[(String, String)] - val flagsAdded = mutable.Set.empty[String] - // TODO: once we settle on a spec, we should implement this in a more elegant way - var i = 0 - while i < args.length do - args(i) match - case name @ (longArgRegex() | shortArgRegex()) => - if names.isFlagName(name) then - val canonicalName = names.canonicalName(name).get - flagsAdded += canonicalName - byNameArgs += ((canonicalName, "true")) - else if i == args.length - 1 then // last argument -x ot --xyz - error(s"missing argument for ${name}") - else args(i + 1) match - case longArgRegex() | shortArgRegex() | `positionArgsMarker` => - error(s"missing argument for ${name}") - case value => - names.canonicalName(name) match - case Some(canonicalName) => - byNameArgs += ((canonicalName, value)) - case None => - error(s"unknown argument name: $name") - i += 1 // consume `value` - case name @ illFormedName() => - error(s"ill-formed argument name: $name") - case `positionArgsMarker` => - i += 1 // skip `--` - // all args after `--` are positional args - while i < args.length do - positionalArgs += args(i) - i += 1 - case value => - positionalArgs += value - i += 1 - end while - - // Add "false" for all flags not present in the arguments - for - param <- info.parameters - if param.isFlag - name = getNameWithMarker(param.name) - if !flagsAdded.contains(name) - do - byNameArgs += ((name, "false")) - - (positionalArgs.result(), byNameArgs.result().groupMap(_._1)(_._2)) - - // List of arguments in the order they should be passed to the main function - val orderedArgs: List[String] = - def rec(params: List[Parameter], acc: List[String], remainingArgs: List[String]): List[String] = - params match - case Nil => - for (remainingArg <- remainingArgs) error(s"unused argument: $remainingArg") - acc.reverse - case param :: tailParams => - if param.isVarargs then // also last arguments - byNameArgsMap.get(param.name) match - case Some(byNameVarargs) => acc.reverse ::: byNameVarargs.toList ::: remainingArgs - case None => acc.reverse ::: remainingArgs - else byNameArgsMap.get(getNameWithMarker(param.name)) match - case Some(argValues) => - assert(argValues.nonEmpty, s"${param.name} present in byNameArgsMap, but it has no argument value") - if argValues.length > 1 then - error(s"more than one value for ${param.name}: ${argValues.mkString(", ")}") - rec(tailParams, argValues.last :: acc, remainingArgs) - - case None => - remainingArgs match - case arg :: rest => - rec(tailParams, arg :: acc, rest) - case Nil => - if !param.hasDefault then - error(s"missing argument for ${param.name}") - rec(tailParams, "" :: acc, Nil) - rec(info.parameters.toList, Nil, positionalArgs) - - if hasError then None - else Some(orderedArgs) - end preProcessArgs - - private var hasParseErrors: Boolean = false - - private def parse[T](param: Parameter, arg: String)(using p: FromString[T]): () => T = - p.fromStringOption(arg) match - case Some(t) => - () => t - case None => - /** Issue an error, and return an uncallable getter */ - println(s"Error: could not parse argument for `${param.name}` of type ${param.typeName.split('.').last}: $arg") - hasParseErrors = true - () => throw new AssertionError("trying to get invalid argument") - - @experimental // MiMa does not check scope inherited @experimental - private object Help: - - /** The name of the special argument to display the method's help. - * If one of the method's parameters is called the same, will be ignored. - */ - private inline val helpArg = "--help" - - /** The short name of the special argument to display the method's help. - * If one of the method's parameters uses the same short name, will be ignored. - */ - private inline val shortHelpArg = "-h" - - private inline val maxUsageLineLength = 120 - - def printUsage(info: Info): Unit = - def argsUsage: Seq[String] = - for (param <- info.parameters) - yield { - val canonicalName = getNameWithMarker(param.name) - val namesPrint = (canonicalName +: param.aliasNames).mkString("[", " | ", "]") - val shortTypeName = param.typeName.split('.').last - if param.isVarargs then s"[<$shortTypeName> [<$shortTypeName> [...]]]" - else if param.hasDefault then s"[$namesPrint <$shortTypeName>]" - else if param.isFlag then s"$namesPrint" - else s"$namesPrint <$shortTypeName>" - } - - def wrapArgumentUsages(argsUsage: Seq[String], maxLength: Int): Seq[String] = { - def recurse(args: Seq[String], currentLine: String, acc: Vector[String]): Seq[String] = - (args, currentLine) match { - case (Nil, "") => acc - case (Nil, l) => (acc :+ l) - case (arg +: t, "") => recurse(t, arg, acc) - case (arg +: t, l) if l.length + 1 + arg.length <= maxLength => recurse(t, s"$l $arg", acc) - case (arg +: t, l) => recurse(t, arg, acc :+ l) - } - - recurse(argsUsage, "", Vector()).toList - } - - val printUsageBeginning = s"Usage: ${info.name} " - val argsOffset = printUsageBeginning.length - val printUsages = wrapArgumentUsages(argsUsage, maxUsageLineLength - argsOffset) - - println(printUsageBeginning + printUsages.mkString("\n" + " " * argsOffset)) - end printUsage - - def printExplain(info: Info): Unit = - def shiftLines(s: Seq[String], shift: Int): String = s.map(" " * shift + _).mkString("\n") - - def wrapLongLine(line: String, maxLength: Int): List[String] = { - def recurse(s: String, acc: Vector[String]): Seq[String] = - val lastSpace = s.trim.nn.lastIndexOf(' ', maxLength) - if ((s.length <= maxLength) || (lastSpace < 0)) - acc :+ s - else { - val (shortLine, rest) = s.splitAt(lastSpace) - recurse(rest.trim.nn, acc :+ shortLine) - } - - recurse(line, Vector()).toList - } - - println() - - if (info.documentation.nonEmpty) - println(wrapLongLine(info.documentation, maxUsageLineLength).mkString("\n")) - if (info.parameters.nonEmpty) { - val argNameShift = 2 - val argDocShift = argNameShift + 2 - - println("Arguments:") - for param <- info.parameters do - val canonicalName = getNameWithMarker(param.name) - val otherNames = param.aliasNames match { - case Seq() => "" - case names => names.mkString("(", ", ", ") ") - } - val argDoc = StringBuilder(" " * argNameShift) - argDoc.append(s"$canonicalName $otherNames- ${param.typeName.split('.').last}") - if param.isVarargs then argDoc.append(" (vararg)") - else if param.hasDefault then argDoc.append(" (optional)") - - if (param.documentation.nonEmpty) { - val shiftedDoc = - param.documentation.split("\n").nn - .map(line => shiftLines(wrapLongLine(line.nn, maxUsageLineLength - argDocShift), argDocShift)) - .mkString("\n") - argDoc.append("\n").append(shiftedDoc) - } - - println(argDoc) - } - end printExplain - - def shouldPrintDefaultHelp(names: Names, args: Seq[String]): Boolean = - val helpIsOverridden = names.canonicalName(helpArg).isDefined - val shortHelpIsOverridden = names.canonicalName(shortHelpArg).isDefined - (!helpIsOverridden && args.contains(helpArg)) || - (!shortHelpIsOverridden && args.contains(shortHelpArg)) - - end Help - - @experimental // MiMa does not check scope inherited @experimental - private class Names(info: Info): - - checkNames() - checkFlags() - - private lazy val namesToCanonicalName: Map[String, String] = - info.parameters.flatMap(param => - val canonicalName = getNameWithMarker(param.name) - (canonicalName -> canonicalName) +: param.aliasNames.map(_ -> canonicalName) - ).toMap - - private lazy val canonicalFlagsNames: Set[String] = - info.parameters.collect { - case param if param.isFlag => getNameWithMarker(param.name) - }.toSet - - def canonicalName(name: String): Option[String] = namesToCanonicalName.get(name) - - def isFlagName(name: String): Boolean = - namesToCanonicalName.get(name).map(canonicalFlagsNames.contains).contains(true) - - override def toString(): String = s"Names($namesToCanonicalName)" - - private def checkNames(): Unit = - def checkDuplicateNames() = - val nameAndCanonicalName = info.parameters.flatMap { paramInfo => - (getNameWithMarker(paramInfo.name) +: paramInfo.aliasNames).map(_ -> paramInfo.name) - } - val nameToNames = nameAndCanonicalName.groupMap(_._1)(_._2) - for (name, canonicalNames) <- nameToNames if canonicalNames.length > 1 do - throw IllegalArgumentException(s"$name is used for multiple parameters: ${canonicalNames.mkString(", ")}") - def checkValidNames() = - def isValidArgName(name: String): Boolean = - longArgRegex.matches(s"--$name") || shortArgRegex.matches(s"-$name") - for param <- info.parameters do - if !isValidArgName(param.name) then - throw IllegalArgumentException(s"The following argument name is invalid: ${param.name}") - for annot <- param.annotations do - annot match - case alias: alias if !isValidArgName(alias.name) => - throw IllegalArgumentException(s"The following alias is invalid: ${alias.name}") - case _ => - - checkValidNames() - checkDuplicateNames() - - private def checkFlags(): Unit = - for param <- info.parameters if param.isFlag && param.hasDefault do - throw IllegalArgumentException(s"@newMain flag parameters cannot have a default value. `${param.name}` has a default value.") - - end Names - -end newMain - -object newMain: - - /** Alias name for the parameter. - * - * If the name has one character, then it is a short name (e.g. `-i`). - * If the name has more than one characters, then it is a long name (e.g. `--input`). - */ - @experimental - final class alias(val name: String) extends MainAnnotation.ParameterAnnotation - -end newMain diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala index c828cd4e6e67..15c449904074 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala @@ -364,9 +364,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: | val x: Map[Int, new@@] |} """.stripMargin, - "", - // to avoid newMain annotation - filter = str => !str.contains("newMain") + "" ) // TODO: Should provide empty completions // The issue is that the tree looks the same as for `case @@` (it doesn't see `new`) @@ -382,9 +380,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: | } |} """.stripMargin, - "", - // to avoid newMain annotation - filter = str => !str.contains("newMain") + "" ) @Test def `super-typeapply` = diff --git a/tests/neg/main-annotation-currying.scala b/tests/neg/main-annotation-currying.scala deleted file mode 100644 index fa8e9593849c..000000000000 --- a/tests/neg/main-annotation-currying.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - - @newMain def add(num: Int)(inc: Int): Unit = // error - println(s"$num + $inc = ${num + inc}") - -end myProgram diff --git a/tests/neg/main-annotation-generic.scala b/tests/neg/main-annotation-generic.scala deleted file mode 100644 index 6f951056f1b2..000000000000 --- a/tests/neg/main-annotation-generic.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - - @newMain def nop[T](t: T): T = // error - t - -end myProgram diff --git a/tests/neg/main-annotation-implicit-given.scala b/tests/neg/main-annotation-implicit-given.scala deleted file mode 100644 index 2a7d8202acf5..000000000000 --- a/tests/neg/main-annotation-implicit-given.scala +++ /dev/null @@ -1,13 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - implicit val x: Int = 2 - given Int = 3 - - @newMain def showImplicit(implicit num: Int): Unit = // error - println(num) - - @newMain def showUsing(using num: Int): Unit = // error - println(num) - -end myProgram diff --git a/tests/neg/main-annotation-mainannotation.scala b/tests/neg/main-annotation-mainannotation.scala deleted file mode 100644 index 21e37d1779af..000000000000 --- a/tests/neg/main-annotation-mainannotation.scala +++ /dev/null @@ -1,3 +0,0 @@ -import scala.annotation.MainAnnotation - -@MainAnnotation def f(i: Int, n: Int) = () // error diff --git a/tests/neg/main-annotation-multiple-annot.scala b/tests/neg/main-annotation-multiple-annot.scala deleted file mode 100644 index faec8162e9c4..000000000000 --- a/tests/neg/main-annotation-multiple-annot.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - - @newMain @newMain def add1(num: Int, inc: Int): Unit = // error - println(s"$num + $inc = ${num + inc}") - -end myProgram diff --git a/tests/neg/main-annotation-nonmethod.scala b/tests/neg/main-annotation-nonmethod.scala deleted file mode 100644 index 2e46098a9ac5..000000000000 --- a/tests/neg/main-annotation-nonmethod.scala +++ /dev/null @@ -1,11 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - - @newMain val n = 2 // error - - @newMain class A // error - - @newMain val f = ((s: String) => println(s)) // error - -end myProgram diff --git a/tests/neg/main-annotation-nonstatic.scala b/tests/neg/main-annotation-nonstatic.scala deleted file mode 100644 index 68d3ba2b3569..000000000000 --- a/tests/neg/main-annotation-nonstatic.scala +++ /dev/null @@ -1,4 +0,0 @@ -import scala.annotation.newMain - -class A: - @newMain def foo(bar: Int) = () // error diff --git a/tests/neg/main-annotation-unknown-parser-1.scala b/tests/neg/main-annotation-unknown-parser-1.scala deleted file mode 100644 index 75ff2ceac444..000000000000 --- a/tests/neg/main-annotation-unknown-parser-1.scala +++ /dev/null @@ -1,12 +0,0 @@ -import scala.annotation.newMain - -class MyNumber(val value: Int) { - def +(other: MyNumber): MyNumber = MyNumber(value + other.value) -} - -object myProgram: - - @newMain def add(num: MyNumber, inc: MyNumber): Unit = // error - println(s"$num + $inc = ${num + inc}") - -end myProgram diff --git a/tests/neg/main-annotation-unknown-parser-2.scala b/tests/neg/main-annotation-unknown-parser-2.scala deleted file mode 100644 index a5681c39419b..000000000000 --- a/tests/neg/main-annotation-unknown-parser-2.scala +++ /dev/null @@ -1,27 +0,0 @@ -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -object myProgram: - - @newMain def add(num: Test.MyNumber, inc: Test.MyNumber): Unit = // error - val numV = Test.value(num) - val incV = Test.value(inc) - println(s"$numV + $incV = ${numV + incV}") - -end myProgram - - -object Test: - opaque type MyNumber = Int - - def create(n: Int): MyNumber = n - def value(n: MyNumber): Int = n - - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index ca48dd2d8a5f..bc44cbfec18f 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -25,28 +25,6 @@ val experimentalDefinitionInLibrary = Set( "scala.util.TupledFunction", "scala.util.TupledFunction$", - //// New feature: main annotation generalization - // Can be stabilized when language feature is stabilized. - // Needs user feedback. - // Should argGetter/varargGetter be simplified? - // Should we have better support for main annotation macros? - "scala.annotation.MainAnnotation", - "scala.annotation.MainAnnotation$", - "scala.annotation.MainAnnotation$.Info", - "scala.annotation.MainAnnotation$.Parameter", - "scala.annotation.MainAnnotation$.ParameterAnnotation", - - - //// New feature: prototype of new version of @main - // This will never be stabilized. When it is ready it should replace the old @main annotation (requires scala.annotation.MainAnnotation). - // Needs user feedback. - "scala.annotation.newMain", - "scala.annotation.newMain$", - "scala.annotation.newMain$.alias", - "scala.annotation.newMain.Help", - "scala.annotation.newMain.Help$", - "scala.annotation.newMain.Names", - //// New feature: capture checking "scala.annotation.capability", "scala.annotation.retains", diff --git a/tests/run/main-annotation-birthday.scala b/tests/run/main-annotation-birthday.scala deleted file mode 100644 index 32cf28784ced..000000000000 --- a/tests/run/main-annotation-birthday.scala +++ /dev/null @@ -1,35 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -/** - * Wishes a happy birthday to lucky people! - * - * @param age the age of the people whose birthday it is - * @param name the name of the luckiest person! - * @param others all the other lucky people - */ -@newMain def happyBirthday(age: Int, name: String, others: String*) = - val suffix = - age % 100 match - case 11 | 12 | 13 => "th" - case _ => - age % 10 match - case 1 => "st" - case 2 => "nd" - case 3 => "rd" - case _ => "th" - val bldr = new StringBuilder(s"Happy $age$suffix birthday, $name") - for other <- others do bldr.append(" and ").append(other) - println(bldr) - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("happyBirthday") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("23", "Lisa", "Peter")) -end Test diff --git a/tests/run/main-annotation-dash-dash.scala b/tests/run/main-annotation-dash-dash.scala deleted file mode 100644 index 3fe0f47983d5..000000000000 --- a/tests/run/main-annotation-dash-dash.scala +++ /dev/null @@ -1,28 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -object myProgram: - - @newMain def foo(str: String, rest: String*): Unit = - println(s"str = $str") - println(s"rest = ${rest.mkString(",")}") - println() - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("foo") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("x", "y", "z")) - callMain(Array("--", "x", "y", "z")) - callMain(Array("--", "-a", "x", "y", "z")) - callMain(Array("x", "--", "y", "z")) - callMain(Array("--str", "y", "--", "z")) - callMain(Array("--str", "--", "y", "z")) // missing argument for `--str` -end Test diff --git a/tests/run/main-annotation-default-value-1.scala b/tests/run/main-annotation-default-value-1.scala deleted file mode 100644 index cf4ba79e1aff..000000000000 --- a/tests/run/main-annotation-default-value-1.scala +++ /dev/null @@ -1,25 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int = 0, inc: Int = 1): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) - callMain(Array("2")) - callMain(Array()) -end Test diff --git a/tests/run/main-annotation-default-value-2.scala b/tests/run/main-annotation-default-value-2.scala deleted file mode 100644 index 8b60e6197405..000000000000 --- a/tests/run/main-annotation-default-value-2.scala +++ /dev/null @@ -1,36 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - @newMain def alwaysPassParam(forbiddenParam: Int = throw new IllegalStateException("This should not be evaluated!")): Unit = - println(forbiddenParam) - -end myProgram - -object Test: - def hasCauseIllegalStateException(e: Throwable): Boolean = - e.getCause match { - case null => false - case _: IllegalStateException => true - case e: Throwable => hasCauseIllegalStateException(e) - } - - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("alwaysPassParam") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("42")) - try { - callMain(Array()) - println("This should not be printed") - } - catch { - case e: Exception if hasCauseIllegalStateException(e) => println("OK") - } -end Test diff --git a/tests/run/main-annotation-example.scala b/tests/run/main-annotation-example.scala deleted file mode 100644 index 926496e595e7..000000000000 --- a/tests/run/main-annotation-example.scala +++ /dev/null @@ -1,65 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import collection.mutable -import scala.util.CommandLineParser.FromString - -/** Sum all the numbers - * - * @param first Fist number to sum - * @param rest The rest of the numbers to sum - */ -@myMain def sum(first: Int, rest: Int*): Int = first + rest.sum - - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("sum") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("23", "2", "3")) -end Test - -@experimental -class myMain extends MainAnnotation[FromString, Int]: - import MainAnnotation.{ Info, Parameter } - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - if args.contains("--help") then - println(info.documentation) - None // do not parse or run the program - else if info.parameters.exists(_.hasDefault) then - println("Default arguments are not supported") - None - else if info.hasVarargs then - val numPlainArgs = info.parameters.length - 1 - if numPlainArgs > args.length then - println("Not enough arguments") - None - else - Some(args) - else - if info.parameters.length > args.length then - println("Not enough arguments") - None - else if info.parameters.length < args.length then - println("Too many arguments") - None - else - Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using parser: FromString[T]): () => T = - () => parser.fromString(arg) - - def varargGetter[T](param: Parameter, args: Seq[String])(using parser: FromString[T]): () => Seq[T] = - () => args.map(arg => parser.fromString(arg)) - - def run(program: () => Int): Unit = - println("executing program") - val result = program() - println("result: " + result) - println("executed program") -end myMain diff --git a/tests/run/main-annotation-flags.scala b/tests/run/main-annotation-flags.scala deleted file mode 100644 index 8a579e6e2d00..000000000000 --- a/tests/run/main-annotation-flags.scala +++ /dev/null @@ -1,44 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - @newMain def shortFlags(a: Boolean, b: Boolean): Unit = - println(s"shortFlags: a = $a, b = $b") - - @newMain def longFlags(flag1: Boolean, flag2: Boolean): Unit = - println(s"longFlags: flag1 = $flag1, flag2 = $flag2") - - @newMain def mixedFlags(a: Boolean, flag: Boolean): Unit = - println(s"mixedFlags: a = $a, flag = $flag") - -end myProgram - -object Test: - def callMain(name: String, args: String*): Unit = - val clazz = Class.forName(name) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args.toArray) - - def main(args: Array[String]): Unit = - callMain("shortFlags") - callMain("shortFlags", "-a") - callMain("shortFlags", "-a", "-b") - callMain("shortFlags", "true", "false") - callMain("shortFlags", "-a", "true") - callMain("shortFlags", "-b", "true") - - - callMain("longFlags") - callMain("longFlags", "--flag1") - callMain("longFlags", "--flag1", "--flag2") - - callMain("mixedFlags") - callMain("mixedFlags", "-a") - callMain("mixedFlags", "-a", "--flag") - - -end Test diff --git a/tests/run/main-annotation-help-override.scala b/tests/run/main-annotation-help-override.scala deleted file mode 100644 index bfff85c5a353..000000000000 --- a/tests/run/main-annotation-help-override.scala +++ /dev/null @@ -1,56 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.annotation.newMain.alias -import scala.util.Try - -object myProgram: - - /** A method that should let --help and -h display help. */ - @newMain def helpOverride1(notHelp: Int) = ??? - - /** A method that should let -h display help, but not --help. */ - @newMain def helpOverride2(help: Int) = ??? - - /** A method that should let --help display help, but not -h. */ - @newMain def helpOverride3(h: Int) = ??? - - /** A method that should not let --help and -h display help. */ - @newMain def helpOverride4(help: Int, h: Int) = ??? - - - /** A method that should let -h display help, but not --help. */ - @newMain def helpOverride5(@alias("help") notHelp: Int) = ??? - - /** A method that should let --help display help, but not -h. */ - @newMain def helpOverride6(@alias("h") notHelp: Int) = ??? - - /** A method that should not let --help and -h display help. */ - @newMain def helpOverride7(@alias("help") notHelp: Int, @alias("h") notH: Int) = ??? - - /** A method that should not let --help and -h display help. */ - @newMain def helpOverride8(@alias("help") @alias("h") notHelp: Int) = ??? - - /** A method that should not let --help and -h display help. */ - // Probably the correct way to override help flags. - @newMain def helpOverride9(@alias("h") help: Boolean) = println(s"helpOverride9: $help") - -end myProgram - -object Test: - val allClazzes: Seq[Class[?]] = - LazyList.from(1).map(i => Try(Class.forName("helpOverride" + i.toString))).takeWhile(_.isSuccess).map(_.get) - - def callAllMains(args: Array[String]): Unit = - for (clazz <- allClazzes) { - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - } - - def main(args: Array[String]): Unit = - println("##### --help") - callAllMains(Array("--help")) - println("##### -h") - callAllMains(Array("-h")) -end Test diff --git a/tests/run/main-annotation-help.scala b/tests/run/main-annotation-help.scala deleted file mode 100644 index d68bb0d7e874..000000000000 --- a/tests/run/main-annotation-help.scala +++ /dev/null @@ -1,178 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -import scala.util.CommandLineParser.FromString -import scala.util.Try - -class MyNumber(val value: Int): - def +(other: MyNumber): MyNumber = MyNumber(value + other.value) - -class MyGeneric[T](val value: T) - -given FromString[MyNumber] with - override def fromString(s: String): MyNumber = MyNumber(summon[FromString[Int]].fromString(s)) - -given FromString[MyGeneric[Int]] with - override def fromString(s: String): MyGeneric[Int] = MyGeneric(summon[FromString[Int]].fromString(s)) - -object myProgram: - - /** - * Adds two numbers. - */ - @newMain def doc1(num: Int, inc: Int): Unit = () - - /** Adds two numbers. */ - @newMain def doc2(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - */ - @newMain def doc3(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - * @param inc the second number - */ - @newMain def doc4(num: Int, inc: Int = 1): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - */ - @newMain def doc5(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num - * @param inc - */ - @newMain def doc6(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - * @param inc the second number - * @return the sum of the two numbers (not really) - */ - @newMain def doc7(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - * @param inc the second number - * @return the sum of the two numbers (not really) - */ - @newMain def doc8(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. Same as [[doc1]]. - * - * @param num the first number - * @param inc the second number - * @return the sum of the two numbers (not really) - * @see [[doc1]] - */ - @newMain def doc9(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * This should be on another line. - * - * - * - * - * And this also. - * - * - * @param num I might have to write this - * on two lines - * @param inc I might even - * have to write this one - * on three lines - */ - @newMain def doc10(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - * - * Oh, a new line! - * - * @param inc the second number - * - * And another one! - */ - @newMain def doc11(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. It seems that I have a very long line of documentation and therefore might need to be cut at some point to fit a small terminal screen. - */ - @newMain def doc12(num: Int, inc: Int): Unit = () - - /** - * Addstwonumbers.ItseemsthatIhaveaverylonglineofdocumentationandthereforemightneedtobecutatsomepointtofitasmallterminalscreen. - */ - @newMain def doc13(num: Int, inc: Int): Unit = () - - /** - * Loudly judges the number of argument you gave to this poor function. - */ - @newMain def doc14( - arg1: String, arg2: Int, arg3: String, arg4: Int, - arg5: String, arg6: Int, arg7: String, arg8: Int, - arg9: String = "I", arg10: Int = 42, arg11: String = "used", arg12: Int = 0, - arg13: String = "to", arg14: Int = 34, arg15: String = "wonder", arg16: Int* - ): Unit = () - - /** - * Adds two instances of [[MyNumber]]. - * @param myNum my first number to add - * @param myInc my second number to add - */ - @newMain def doc15(myNum: MyNumber, myInc: MyNumber): Unit = () - - /** - * Compares two instances of [[MyGeneric]]. - * @param first my first element - * @param second my second element - */ - @newMain def doc16(first: MyGeneric[Int], second: MyGeneric[Int]): Unit = () - - // This should not be printed in explain! - @newMain def doc17(a: Int, b: Int, c: String): Unit = () - -end myProgram - -object Test: - def callMain1(args: Array[String]): Unit = - val clazz = Class.forName("doc1") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - val allClazzes: Seq[Class[?]] = - LazyList.from(1).map(i => Try(Class.forName("doc" + i.toString))).takeWhile(_.isSuccess).map(_.get) - - def callAllMains(args: Array[String]): Unit = - for (clazz <- allClazzes) { - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - } - - def main(args: Array[String]): Unit = - callMain1(Array("--help")) - callMain1(Array("Some", "garbage", "before", "--help")) - callMain1(Array("--help", "and", "some", "stuff", "after")) - - callAllMains(Array("--help")) -end Test diff --git a/tests/run/main-annotation-homemade-annot-1.scala b/tests/run/main-annotation-homemade-annot-1.scala deleted file mode 100644 index 3106dae4006f..000000000000 --- a/tests/run/main-annotation-homemade-annot-1.scala +++ /dev/null @@ -1,49 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.concurrent._ -import scala.annotation.* -import scala.collection.mutable -import ExecutionContext.Implicits.global -import duration._ -import util.CommandLineParser.FromString - -@mainAwait def get(wait: Int): Future[Int] = Future{ - Thread.sleep(1000 * wait) - 42 -} - -@mainAwait def getMany(wait: Int*): Future[Int] = Future{ - Thread.sleep(1000 * wait.sum) - wait.length -} - -object Test: - def callMain(cls: String, args: Array[String]): Unit = - val clazz = Class.forName(cls) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - println(Await.result(get(1), Duration(2, SECONDS))) - callMain("get", Array("1")) - callMain("getMany", Array("1")) - callMain("getMany", Array("0", "1")) -end Test - -@experimental -class mainAwait(timeout: Int = 2) extends MainAnnotation[FromString, Future[Any]]: - import MainAnnotation.* - - // This is a toy example, it only works with positional args - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = - () => p.fromString(arg) - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = - () => for arg <- args yield p.fromString(arg) - - def run(f: () => Future[Any]): Unit = println(Await.result(f(), Duration(timeout, SECONDS))) - -end mainAwait diff --git a/tests/run/main-annotation-homemade-annot-2.scala b/tests/run/main-annotation-homemade-annot-2.scala deleted file mode 100644 index 980241ff93d3..000000000000 --- a/tests/run/main-annotation-homemade-annot-2.scala +++ /dev/null @@ -1,52 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.collection.mutable -import scala.annotation.* -import util.CommandLineParser.FromString - -@myMain()("A") -def foo1(): Unit = println("I was run!") - -@myMain(0)("This should not be printed") -def foo2() = throw new Exception("This should not be run") - -@myMain(1)("Purple smart", "Blue fast", "White fashion", "Yellow quiet", "Orange honest", "Pink loud") -def foo3() = println("Here are some colors:") - -@myMain()() -def foo4() = println("This will be printed, but nothing more.") - -object Test: - val allClazzes: Seq[Class[?]] = - LazyList.from(1).map(i => scala.util.Try(Class.forName("foo" + i.toString))).takeWhile(_.isSuccess).map(_.get) - - def callMains(): Unit = - for (clazz <- allClazzes) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]()) - - def main(args: Array[String]) = - callMains() -end Test - -// This is a toy example, it only works with positional args -@experimental -class myMain(runs: Int = 3)(after: String*) extends MainAnnotation[FromString, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = - () => p.fromString(arg) - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = - () => for arg <- args yield p.fromString(arg) - - def run(f: () => Any): Unit = - for (_ <- 1 to runs) - f() - if after.length > 0 then println(after.mkString(", ")) - end run - -end myMain diff --git a/tests/run/main-annotation-homemade-annot-3.scala b/tests/run/main-annotation-homemade-annot-3.scala deleted file mode 100644 index 4a894777c562..000000000000 --- a/tests/run/main-annotation-homemade-annot-3.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import scala.util.CommandLineParser.FromString - -@mainNoArgs def foo() = println("Hello world!") - -object Test: - def main(args: Array[String]) = - val clazz = Class.forName("foo") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]()) -end Test - -@experimental -class mainNoArgs extends MainAnnotation[FromString, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = ??? - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = ??? - - def run(program: () => Any): Unit = program() diff --git a/tests/run/main-annotation-homemade-annot-4.scala b/tests/run/main-annotation-homemade-annot-4.scala deleted file mode 100644 index b50e89523475..000000000000 --- a/tests/run/main-annotation-homemade-annot-4.scala +++ /dev/null @@ -1,27 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import scala.util.CommandLineParser.FromString - -@mainManyArgs(1, "B", 3) def foo() = println("Hello world!") - -object Test: - def main(args: Array[String]) = - val clazz = Class.forName("foo") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]()) -end Test - -@experimental -class mainManyArgs(i1: Int, s2: String, i3: Int) extends MainAnnotation[FromString, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = ??? - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = ??? - - - def run(program: () => Any): Unit = program() diff --git a/tests/run/main-annotation-homemade-annot-5.scala b/tests/run/main-annotation-homemade-annot-5.scala deleted file mode 100644 index a129a51da7eb..000000000000 --- a/tests/run/main-annotation-homemade-annot-5.scala +++ /dev/null @@ -1,28 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import scala.util.CommandLineParser.FromString - -@mainManyArgs(Some(1)) def foo() = println("Hello world!") -@mainManyArgs(None) def bar() = println("Hello world!") - -object Test: - def main(args: Array[String]) = - for (methodName <- List("foo", "bar")) - val clazz = Class.forName(methodName) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]()) -end Test - -@experimental -class mainManyArgs(o: Option[Int]) extends MainAnnotation[FromString, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = ??? - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = ??? - - def run(program: () => Any): Unit = program() diff --git a/tests/run/main-annotation-homemade-annot-6.scala b/tests/run/main-annotation-homemade-annot-6.scala deleted file mode 100644 index 5a92e6382d3d..000000000000 --- a/tests/run/main-annotation-homemade-annot-6.scala +++ /dev/null @@ -1,65 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* - -/** Foo docs */ -@myMain def foo(i: Int, j: String = "2") = println(s"foo($i, $j)") -/** Bar docs - * - * @param i the first parameter - */ -@myMain def bar(@MyParamAnnot(3) i: List[Int], rest: Int*) = println(s"bar($i, ${rest.mkString(", ")})") - -object Test: - def main(args: Array[String]) = - for (methodName <- List("foo", "bar")) - val clazz = Class.forName(methodName) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]("1", "2")) -end Test - -@experimental -class myMain extends MainAnnotation[Make, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - def paramInfoString(paramInfo: Parameter) = - import paramInfo.* - s" Parameter(name=\"$name\", typeName=\"$typeName\", hasDefault=$hasDefault, isVarargs=$isVarargs, documentation=\"$documentation\", annotations=$annotations)" - println( - s"""command( - | ${args.mkString("Array(", ", ", ")")}, - | ${info.name}, - | "${info.documentation}", - | ${info.parameters.map(paramInfoString).mkString("Seq(\n", ",\n", "\n )*")} - |)""".stripMargin) - Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: Make[T]): () => T = - () => p.make - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: Make[T]): () => Seq[T] = - println("varargGetter()") - () => Seq(p.make, p.make) - - def run(f: () => Any): Unit = - println("run()") - f() - println() - -@experimental -case class MyParamAnnot(n: Int) extends MainAnnotation.ParameterAnnotation - -trait Make[T]: - def make: T - -given Make[Int] with - def make: Int = 42 - - -given Make[String] with - def make: String = "abc" - -given [T: Make]: Make[List[T]] with - def make: List[T] = List(summon[Make[T]].make) diff --git a/tests/run/main-annotation-homemade-parser-1.scala b/tests/run/main-annotation-homemade-parser-1.scala deleted file mode 100644 index 94d43bf19cc5..000000000000 --- a/tests/run/main-annotation-homemade-parser-1.scala +++ /dev/null @@ -1,30 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -class MyNumber(val value: Int) { - def +(other: MyNumber): MyNumber = MyNumber(value + other.value) -} - -given FromString[MyNumber] with - override def fromString(s: String): MyNumber = MyNumber(summon[FromString[Int]].fromString(s)) - -object myProgram: - - @newMain def add(num: MyNumber, inc: MyNumber): Unit = - println(s"${num.value} + ${inc.value} = ${num.value + inc.value}") - -end myProgram - - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-homemade-parser-2.scala b/tests/run/main-annotation-homemade-parser-2.scala deleted file mode 100644 index 4f40f9b42b27..000000000000 --- a/tests/run/main-annotation-homemade-parser-2.scala +++ /dev/null @@ -1,33 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -given FromString[Test.MyNumber] with - override def fromString(s: String) = Test.create(summon[FromString[Int]].fromString(s)) - -object myProgram: - - @newMain def add(num: Test.MyNumber, inc: Test.MyNumber): Unit = - val numV = Test.value(num) - val incV = Test.value(inc) - println(s"$numV + $incV = ${numV + incV}") - -end myProgram - - -object Test: - opaque type MyNumber = Int - - def create(n: Int): MyNumber = n - def value(n: MyNumber): Int = n - - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-homemade-parser-3.scala b/tests/run/main-annotation-homemade-parser-3.scala deleted file mode 100644 index 066e40f1b3a0..000000000000 --- a/tests/run/main-annotation-homemade-parser-3.scala +++ /dev/null @@ -1,29 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -given FromString[Int] with - override def fromString(s: String) = s.toInt + 42 - -object myProgram: - - given FromString[Int] with - override def fromString(s: String) = -1 * s.toInt // Should be ignored, because not top-level - - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-homemade-parser-4.scala b/tests/run/main-annotation-homemade-parser-4.scala deleted file mode 100644 index 668aa040380c..000000000000 --- a/tests/run/main-annotation-homemade-parser-4.scala +++ /dev/null @@ -1,50 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -given [T : FromString]: FromString[Option[T]] with - override def fromString(s: String) = Some(summon[FromString[T]].fromString(s)) - override def fromStringOption(s: String) = - try { - Some(fromString(s)) - } - catch { - case _: IllegalArgumentException => Some(None) - } - -given [T : FromString]: FromString[Either[T, String]] with - override def fromString(s: String) = Left(summon[FromString[T]].fromString(s)) - override def fromStringOption(s: String) = - try { - Some(fromString(s)) - } - catch { - case _: IllegalArgumentException => Some(Right(s"Unable to parse argument $s")) - } - -object myProgram: - - @newMain def getOption(o: Option[Int] = Some(42)) = println(o) - - @newMain def getEither(e: Either[Int, String] = Right("No argument given")) = println(e) - -end myProgram - - -object Test: - def call(className: String, args: Array[String]): Unit = - val clazz = Class.forName(className) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - call("getOption", Array("7")) - call("getOption", Array()) - call("getOption", Array("abc")) - println - call("getEither", Array("7")) - call("getEither", Array()) - call("getEither", Array("abc")) -end Test diff --git a/tests/run/main-annotation-homemade-parser-5.scala b/tests/run/main-annotation-homemade-parser-5.scala deleted file mode 100644 index 123631312ef7..000000000000 --- a/tests/run/main-annotation-homemade-parser-5.scala +++ /dev/null @@ -1,29 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -given intParser: FromString[Int => Int] with - override def fromString(s: String) = n => summon[FromString[Int]].fromString(s) + n - -given stringParser: FromString[String => String] with - override def fromString(s: String) = s1 => summon[FromString[String]].fromString(s) + s1 - -object myProgram: - - @newMain def show(getI: Int => Int, getS: String => String) = - println(getI(3)) - println(getS(" world!")) - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("show") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("39", "Hello")) -end Test diff --git a/tests/run/main-annotation-multiple.scala b/tests/run/main-annotation-multiple.scala deleted file mode 100644 index dbc66d0df9ca..000000000000 --- a/tests/run/main-annotation-multiple.scala +++ /dev/null @@ -1,28 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - - /** Subtracts two numbers */ - @newMain def sub(num: Int, inc: Int): Unit = - println(s"$num - $inc = ${num - inc}") - -end myProgram - -object Test: - def callMain(mainMeth: String, args: Array[String]): Unit = - val clazz = Class.forName(mainMeth) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain("add", Array("2", "3")) - callMain("sub", Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-named-params.scala b/tests/run/main-annotation-named-params.scala deleted file mode 100644 index 4cfa2c8049b4..000000000000 --- a/tests/run/main-annotation-named-params.scala +++ /dev/null @@ -1,30 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("--num", "2", "--inc", "3")) - callMain(Array("--inc", "3", "--num", "2")) - - callMain(Array("2", "--inc", "3")) - callMain(Array("--num", "2", "3")) - - callMain(Array("--num", "2", "--num", "1", "--inc", "3")) - callMain(Array("--inc", "1", "--num", "2", "--num", "1", "--inc", "3")) -end Test diff --git a/tests/run/main-annotation-newMain.scala b/tests/run/main-annotation-newMain.scala deleted file mode 100644 index 5b00a46ce7e9..000000000000 --- a/tests/run/main-annotation-newMain.scala +++ /dev/null @@ -1,323 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import collection.mutable -import scala.util.CommandLineParser.FromString - -@newMain def happyBirthday(age: Int, name: String, others: String*) = - val suffix = - age % 100 match - case 11 | 12 | 13 => "th" - case _ => - age % 10 match - case 1 => "st" - case 2 => "nd" - case 3 => "rd" - case _ => "th" - val bldr = new StringBuilder(s"Happy $age$suffix birthday, $name") - for other <- others do bldr.append(" and ").append(other) - println(bldr) - - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("happyBirthday") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("23", "Lisa", "Peter")) -end Test - - - -@experimental -final class newMain extends MainAnnotation[FromString, Any]: - import newMain._ - import MainAnnotation._ - - private inline val argMarker = "--" - private inline val shortArgMarker = "-" - - /** The name of the special argument to display the method's help. - * If one of the method's parameters is called the same, will be ignored. - */ - private inline val helpArg = "help" - - /** The short name of the special argument to display the method's help. - * If one of the method's parameters uses the same short name, will be ignored. - */ - private inline val shortHelpArg = 'h' - - private inline val maxUsageLineLength = 120 - - private var info: Info = _ // TODO remove this var - - - /** A buffer for all errors */ - private val errors = new mutable.ArrayBuffer[String] - - /** Issue an error, and return an uncallable getter */ - private def error(msg: String): () => Nothing = - errors += msg - () => throw new AssertionError("trying to get invalid argument") - - private def getAliases(param: Parameter): Seq[String] = - param.annotations.collect{ case a: Alias => a }.flatMap(_.aliases) - - private def getAlternativeNames(param: Parameter): Seq[String] = - getAliases(param).filter(nameIsValid(_)) - - private def getShortNames(param: Parameter): Seq[Char] = - getAliases(param).filter(shortNameIsValid(_)).map(_(0)) - - private inline def nameIsValid(name: String): Boolean = - name.length > 1 // TODO add more checks for illegal characters - - private inline def shortNameIsValid(name: String): Boolean = - name.length == 1 && shortNameIsValidChar(name(0)) - - private inline def shortNameIsValidChar(shortName: Char): Boolean = - ('A' <= shortName && shortName <= 'Z') || ('a' <= shortName && shortName <= 'z') - - private def getNameWithMarker(name: String | Char): String = name match { - case c: Char => shortArgMarker + c - case s: String if shortNameIsValid(s) => shortArgMarker + s - case s => argMarker + s - } - - private def getInvalidNames(param: Parameter): Seq[String | Char] = - getAliases(param).filter(name => !nameIsValid(name) && !shortNameIsValid(name)) - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - this.info = info - - val namesToCanonicalName: Map[String, String] = info.parameters.flatMap( - infos => - val names = getAlternativeNames(infos) - val canonicalName = infos.name - if nameIsValid(canonicalName) then (canonicalName +: names).map(_ -> canonicalName) - else names.map(_ -> canonicalName) - ).toMap - val shortNamesToCanonicalName: Map[Char, String] = info.parameters.flatMap( - infos => - val names = getShortNames(infos) - val canonicalName = infos.name - if shortNameIsValid(canonicalName) then (canonicalName(0) +: names).map(_ -> canonicalName) - else names.map(_ -> canonicalName) - ).toMap - - val helpIsOverridden = namesToCanonicalName.exists((name, _) => name == helpArg) - val shortHelpIsOverridden = shortNamesToCanonicalName.exists((name, _) => name == shortHelpArg) - - val (positionalArgs, byNameArgs, invalidByNameArgs) = { - def getCanonicalArgName(arg: String): Option[String] = - if arg.startsWith(argMarker) && arg.length > argMarker.length then - namesToCanonicalName.get(arg.drop(argMarker.length)) - else if arg.startsWith(shortArgMarker) && arg.length == shortArgMarker.length + 1 then - shortNamesToCanonicalName.get(arg(shortArgMarker.length)) - else - None - - def isArgName(arg: String): Boolean = - val isFullName = arg.startsWith(argMarker) - val isShortName = arg.startsWith(shortArgMarker) && arg.length == shortArgMarker.length + 1 && shortNameIsValidChar(arg(shortArgMarker.length)) - isFullName || isShortName - - def recurse(remainingArgs: Seq[String], pa: mutable.Queue[String], bna: Seq[(String, String)], ia: Seq[String]): (mutable.Queue[String], Seq[(String, String)], Seq[String]) = - remainingArgs match { - case Seq() => - (pa, bna, ia) - case argName +: argValue +: rest if isArgName(argName) => - getCanonicalArgName(argName) match { - case Some(canonicalName) => recurse(rest, pa, bna :+ (canonicalName -> argValue), ia) - case None => recurse(rest, pa, bna, ia :+ argName) - } - case arg +: rest => - recurse(rest, pa :+ arg, bna, ia) - } - - val (pa, bna, ia) = recurse(args.toSeq, mutable.Queue.empty, Vector(), Vector()) - val nameToArgValues: Map[String, Seq[String]] = if bna.isEmpty then Map.empty else bna.groupMapReduce(_._1)(p => List(p._2))(_ ++ _) - (pa, nameToArgValues, ia) - } - - val argStrings: Seq[Seq[String]] = - for paramInfo <- info.parameters yield { - if (paramInfo.isVarargs) { - val byNameGetters = byNameArgs.getOrElse(paramInfo.name, Seq()) - val positionalGetters = positionalArgs.removeAll() - // First take arguments passed by name, then those passed by position - byNameGetters ++ positionalGetters - } else { - byNameArgs.get(paramInfo.name) match - case Some(Nil) => - throw AssertionError(s"${paramInfo.name} present in byNameArgs, but it has no argument value") - case Some(argValues) => - if argValues.length > 1 then - // Do not accept multiple values - // Remove this test to take last given argument - error(s"more than one value for ${paramInfo.name}: ${argValues.mkString(", ")}") - Nil - else - List(argValues.last) - case None => - if positionalArgs.length > 0 then - List(positionalArgs.dequeue()) - else if paramInfo.hasDefault then - Nil - else - error(s"missing argument for ${paramInfo.name}") - Nil - } - } - - // Check aliases unicity - val nameAndCanonicalName = info.parameters.flatMap { - case paramInfo => (paramInfo.name +: getAlternativeNames(paramInfo) ++: getShortNames(paramInfo)).map(_ -> paramInfo.name) - } - val nameToCanonicalNames = nameAndCanonicalName.groupMap(_._1)(_._2) - - for (name, canonicalNames) <- nameToCanonicalNames if canonicalNames.length > 1 do - throw IllegalArgumentException(s"$name is used for multiple parameters: ${canonicalNames.mkString(", ")}") - - // Check aliases validity - val problematicNames = info.parameters.flatMap(getInvalidNames) - if problematicNames.length > 0 then - throw IllegalArgumentException(s"The following aliases are invalid: ${problematicNames.mkString(", ")}") - - // Handle unused and invalid args - for (remainingArg <- positionalArgs) error(s"unused argument: $remainingArg") - for (invalidArg <- invalidByNameArgs) error(s"unknown argument name: $invalidArg") - - val displayHelp = - (!helpIsOverridden && args.contains(getNameWithMarker(helpArg))) || - (!shortHelpIsOverridden && args.contains(getNameWithMarker(shortHelpArg))) - - if displayHelp then - usage() - println() - explain() - None - else if errors.nonEmpty then - for msg <- errors do println(s"Error: $msg") - usage() - None - else - Some(argStrings.flatten) - end command - - private def usage(): Unit = - def argsUsage: Seq[String] = - for (infos <- info.parameters) - yield { - val canonicalName = getNameWithMarker(infos.name) - val shortNames = getShortNames(infos).map(getNameWithMarker) - val alternativeNames = getAlternativeNames(infos).map(getNameWithMarker) - val namesPrint = (canonicalName +: alternativeNames ++: shortNames).mkString("[", " | ", "]") - val shortTypeName = infos.typeName.split('.').last - if infos.isVarargs then s"[<$shortTypeName> [<$shortTypeName> [...]]]" - else if infos.hasDefault then s"[$namesPrint <$shortTypeName>]" - else s"$namesPrint <$shortTypeName>" - } - - def wrapArgumentUsages(argsUsage: Seq[String], maxLength: Int): Seq[String] = { - def recurse(args: Seq[String], currentLine: String, acc: Vector[String]): Seq[String] = - (args, currentLine) match { - case (Nil, "") => acc - case (Nil, l) => (acc :+ l) - case (arg +: t, "") => recurse(t, arg, acc) - case (arg +: t, l) if l.length + 1 + arg.length <= maxLength => recurse(t, s"$l $arg", acc) - case (arg +: t, l) => recurse(t, arg, acc :+ l) - } - - recurse(argsUsage, "", Vector()).toList - } - - val usageBeginning = s"Usage: ${info.name} " - val argsOffset = usageBeginning.length - val usages = wrapArgumentUsages(argsUsage, maxUsageLineLength - argsOffset) - - println(usageBeginning + usages.mkString("\n" + " " * argsOffset)) - end usage - - private def explain(): Unit = - inline def shiftLines(s: Seq[String], shift: Int): String = s.map(" " * shift + _).mkString("\n") - - def wrapLongLine(line: String, maxLength: Int): List[String] = { - def recurse(s: String, acc: Vector[String]): Seq[String] = - val lastSpace = s.trim.nn.lastIndexOf(' ', maxLength) - if ((s.length <= maxLength) || (lastSpace < 0)) - acc :+ s - else { - val (shortLine, rest) = s.splitAt(lastSpace) - recurse(rest.trim.nn, acc :+ shortLine) - } - - recurse(line, Vector()).toList - } - - if (info.documentation.nonEmpty) - println(wrapLongLine(info.documentation, maxUsageLineLength).mkString("\n")) - if (info.parameters.nonEmpty) { - val argNameShift = 2 - val argDocShift = argNameShift + 2 - - println("Arguments:") - for infos <- info.parameters do - val canonicalName = getNameWithMarker(infos.name) - val shortNames = getShortNames(infos).map(getNameWithMarker) - val alternativeNames = getAlternativeNames(infos).map(getNameWithMarker) - val otherNames = (alternativeNames ++: shortNames) match { - case Seq() => "" - case names => names.mkString("(", ", ", ") ") - } - val argDoc = StringBuilder(" " * argNameShift) - argDoc.append(s"$canonicalName $otherNames- ${infos.typeName.split('.').last}") - if infos.isVarargs then argDoc.append(" (vararg)") - else if infos.hasDefault then argDoc.append(" (optional)") - - if (infos.documentation.nonEmpty) { - val shiftedDoc = - infos.documentation.split("\n").nn - .map(line => shiftLines(wrapLongLine(line.nn, maxUsageLineLength - argDocShift), argDocShift)) - .mkString("\n") - argDoc.append("\n").append(shiftedDoc) - } - - println(argDoc) - } - end explain - - private def convert[T](argName: String, arg: String, p: FromString[T]): () => T = - p.fromStringOption(arg) match - case Some(t) => () => t - case None => error(s"invalid argument for $argName: $arg") - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = { - if arg.nonEmpty then convert(param.name, arg, p) - else defaultArgument match - case Some(defaultGetter) => defaultGetter - case None => error(s"missing argument for ${param.name}") - } - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = { - val getters = args.map(arg => convert(param.name, arg, p)) - () => getters.map(_()) - } - - def run(execProgram: () => Any): Unit = { - if errors.nonEmpty then - for msg <- errors do println(s"Error: $msg") - usage() - else - execProgram() - } - -end newMain - -object newMain: - @experimental - final class Alias(val aliases: String*) extends MainAnnotation.ParameterAnnotation -end newMain diff --git a/tests/run/main-annotation-no-parameters-no-parens.scala b/tests/run/main-annotation-no-parameters-no-parens.scala deleted file mode 100644 index b62fd55538de..000000000000 --- a/tests/run/main-annotation-no-parameters-no-parens.scala +++ /dev/null @@ -1,23 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Does nothing, except confirming that it runs */ - @newMain def run: Unit = - println("I run properly!") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("run") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array()) -end Test diff --git a/tests/run/main-annotation-no-parameters.scala b/tests/run/main-annotation-no-parameters.scala deleted file mode 100644 index fc92a5680e07..000000000000 --- a/tests/run/main-annotation-no-parameters.scala +++ /dev/null @@ -1,23 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Does nothing, except confirming that it runs */ - @newMain def run(): Unit = - println("I run properly!") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("run") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array()) -end Test diff --git a/tests/run/main-annotation-overload.scala b/tests/run/main-annotation-overload.scala deleted file mode 100644 index 60f9b68a58a2..000000000000 --- a/tests/run/main-annotation-overload.scala +++ /dev/null @@ -1,35 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds three numbers (malformed, doesn't work) */ - def add(num1: Int, num2: Int, num3: Int): Unit = - ??? - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - - /** Adds one number (malformed, doesn't work) */ - def add(num: Int): Unit = - ??? - - /** Adds zero numbers (malformed, doesn't work) */ - def add(): Int = - ??? - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-param-annot-1.scala b/tests/run/main-annotation-param-annot-1.scala deleted file mode 100644 index 5cf29b9f4efb..000000000000 --- a/tests/run/main-annotation-param-annot-1.scala +++ /dev/null @@ -1,111 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.annotation.newMain.alias - -object myProgram: - @newMain def altName1( - @alias("myNum") num: Int, - inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def altName2( - @alias("myNum") num: Int, - @alias("myInc") inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def shortName1( - @alias("n") num: Int, - inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def shortName2( - @alias("n") num: Int, - @alias("i") inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def mix1( - @alias("myNum") @alias("n") num: Int, - @alias("i") @alias("myInc") inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - def myNum: String = "myNum" - def myShortNum = { - var short = 'a' - for i <- 0 until 'n' - 'a' - do - short = (short.toInt + 1).toChar - short.toString - } - def myInc = {new Exception("myInc")}.getMessage - def myShortInc = () => "i" - - @newMain def mix2( - @alias(myNum) @alias(myShortNum) num: Int, - @alias(myShortInc()) @alias(myInc) inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def multiple( - @alias("myNum") @alias("n") num: Int, - @alias("i") @alias("myInc") inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") -end myProgram - - -object Test: - def callMain(className: String, args: Array[String]) = - val clazz = Class.forName(className) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain("altName1", Array("--num", "2", "--inc", "3")) - callMain("altName1", Array("--myNum", "2", "--inc", "3")) - - callMain("altName2", Array("--num", "2", "--inc", "3")) - callMain("altName2", Array("--myNum", "2", "--inc", "3")) - callMain("altName2", Array("--num", "2", "--myInc", "3")) - callMain("altName2", Array("--myNum", "2", "--myInc", "3")) - - callMain("shortName1", Array("--num", "2", "--inc", "3")) - callMain("shortName1", Array("-n", "2", "--inc", "3")) - - callMain("shortName2", Array("--num", "2", "--inc", "3")) - callMain("shortName2", Array("-n", "2", "--inc", "3")) - callMain("shortName2", Array("--num", "2", "-i", "3")) - callMain("shortName2", Array("-n", "2", "-i", "3")) - - callMain("mix1", Array("--num", "2", "--inc", "3")) - callMain("mix1", Array("-n", "2", "--inc", "3")) - callMain("mix1", Array("--num", "2", "-i", "3")) - callMain("mix1", Array("-n", "2", "-i", "3")) - callMain("mix1", Array("--myNum", "2", "--myInc", "3")) - callMain("mix1", Array("-n", "2", "--myInc", "3")) - callMain("mix1", Array("--myNum", "2", "-i", "3")) - callMain("mix1", Array("-n", "2", "-i", "3")) - callMain("mix2", Array("--num", "2", "--inc", "3")) - callMain("mix2", Array("-n", "2", "--inc", "3")) - callMain("mix2", Array("--num", "2", "-i", "3")) - callMain("mix2", Array("-n", "2", "-i", "3")) - callMain("mix2", Array("--myNum", "2", "--myInc", "3")) - callMain("mix2", Array("-n", "2", "--myInc", "3")) - callMain("mix2", Array("--myNum", "2", "-i", "3")) - callMain("mix2", Array("-n", "2", "-i", "3")) - - callMain("multiple", Array("--num", "2", "--inc", "3")) - callMain("multiple", Array("-n", "2", "--inc", "3")) - callMain("multiple", Array("--num", "2", "-i", "3")) - callMain("multiple", Array("-n", "2", "-i", "3")) - callMain("multiple", Array("--myNum", "2", "--myInc", "3")) - callMain("multiple", Array("-n", "2", "--myInc", "3")) - callMain("multiple", Array("--myNum", "2", "-i", "3")) - callMain("multiple", Array("-n", "2", "-i", "3")) -end Test diff --git a/tests/run/main-annotation-param-annot-2.scala b/tests/run/main-annotation-param-annot-2.scala deleted file mode 100644 index 76033f24e614..000000000000 --- a/tests/run/main-annotation-param-annot-2.scala +++ /dev/null @@ -1,63 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.annotation.newMain.alias - -object myProgram: - @newMain def multipleSameShortNames1( - @alias("n") num: Int, - @alias("n") inc: Int - ): Unit = () - - @newMain def multipleSameShortNames2( - @alias("n") @alias("n") num: Int, - inc: Int - ): Unit = () - - @newMain def multipleSameNames1( - @alias("arg") num: Int, - @alias("arg") inc: Int - ): Unit = () - - @newMain def multipleSameNames2( - @alias("arg") @alias("arg") num: Int, - inc: Int - ): Unit = () - - @newMain def multipleSameNames3( - num: Int, - @alias("num") inc: Int - ): Unit = () -end myProgram - - -object Test: - def hasCauseIllegalArgumentException(e: Throwable): Boolean = - e.getCause match { - case null => false - case _: IllegalArgumentException => true - case e: Throwable => hasCauseIllegalArgumentException(e) - } - - def callMain(className: String, args: Array[String]) = - val clazz = Class.forName(className) - val method = clazz.getMethod("main", classOf[Array[String]]) - - try { method.invoke(null, args) } - catch { - case e: Exception if hasCauseIllegalArgumentException(e) => println("OK") - } - - def main(args: Array[String]): Unit = - callMain("multipleSameShortNames1", Array("--num", "2", "--inc", "3")) - callMain("multipleSameShortNames1", Array("-n", "2", "--inc", "3")) - callMain("multipleSameShortNames2", Array("--num", "2", "--inc", "3")) - callMain("multipleSameShortNames2", Array("-n", "2", "--inc", "3")) - - callMain("multipleSameNames1", Array("--num", "2", "--inc", "3")) - callMain("multipleSameNames1", Array("--arg", "2", "--inc", "3")) - callMain("multipleSameNames2", Array("--num", "2", "--inc", "3")) - callMain("multipleSameNames2", Array("--arg", "2", "--inc", "3")) - callMain("multipleSameNames3", Array("--num", "2", "--inc", "3")) -end Test diff --git a/tests/run/main-annotation-param-annot-invalid-params.scala b/tests/run/main-annotation-param-annot-invalid-params.scala deleted file mode 100644 index 46bc812863b1..000000000000 --- a/tests/run/main-annotation-param-annot-invalid-params.scala +++ /dev/null @@ -1,48 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.annotation.newMain.alias - -import java.lang.reflect.InvocationTargetException - -object myProgram: - - @newMain def empty( - @alias("") i: Int, - ): Unit = () - - @newMain def space( - @alias(" ") i: Int, - ): Unit = () - - @newMain def nonLetter( - @alias("1") i: Int, - ): Unit = () - -end myProgram - -object Test: - def hasCauseIllegalArgumentException(e: Throwable): Boolean = - e.getCause match { - case null => false - case _: IllegalArgumentException => true - case e: Throwable => hasCauseIllegalArgumentException(e) - } - - def callMain(className: String, args: Array[String]) = - val clazz = Class.forName(className) - val method = clazz.getMethod("main", classOf[Array[String]]) - try { - method.invoke(null, args) - println(s"Calling $className should result in an IllegalArgumentException being thrown") - } - catch { - case e: InvocationTargetException if hasCauseIllegalArgumentException(e) => println("OK") - } - - def main(args: Array[String]): Unit = - callMain("empty", Array("3")) - callMain("space", Array("3")) - callMain("nonLetter", Array("3")) -end Test diff --git a/tests/run/main-annotation-return-type-1.scala b/tests/run/main-annotation-return-type-1.scala deleted file mode 100644 index 1366cceeba8a..000000000000 --- a/tests/run/main-annotation-return-type-1.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers and returns them */ - @newMain def add(num: Int, inc: Int) = - println(num + inc) - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - println("Direct call") - myProgram.add(2, 3) - println("Main call") - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-return-type-2.scala b/tests/run/main-annotation-return-type-2.scala deleted file mode 100644 index e2dc6b8ae4e6..000000000000 --- a/tests/run/main-annotation-return-type-2.scala +++ /dev/null @@ -1,29 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -class MyResult(val result: Int): - override def toString: String = result.toString - -// Sample main method -object myProgram: - - /** Adds two numbers and returns them */ - @newMain def add(num: Int, inc: Int) = - println(MyResult(num + inc)) - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - println("Direct call") - myProgram.add(2, 3) - println("Main call") - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-short-name.scala b/tests/run/main-annotation-short-name.scala deleted file mode 100644 index 4a179fb793e1..000000000000 --- a/tests/run/main-annotation-short-name.scala +++ /dev/null @@ -1,25 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -object myProgram: - - /** Adds two numbers */ - @newMain def add(n: Int, i: Int): Unit = - println(s"$n + $i = ${n + i}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("-n", "2", "-i", "3")) - callMain(Array("-i", "3", "-n", "2")) - - callMain(Array("--n", "2", "--i", "3")) -end Test diff --git a/tests/run/main-annotation-simple.scala b/tests/run/main-annotation-simple.scala deleted file mode 100644 index 7d2fd501849b..000000000000 --- a/tests/run/main-annotation-simple.scala +++ /dev/null @@ -1,22 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-top-level.scala b/tests/run/main-annotation-top-level.scala deleted file mode 100644 index 3e2bb7bb2fb4..000000000000 --- a/tests/run/main-annotation-top-level.scala +++ /dev/null @@ -1,37 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -/** Adds two numbers */ -@newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -/** Adds any amount of numbers */ -@newMain def addAll(num: Int = 0, incs: Int*): Unit = - print(num) - if (incs.length > 0) { - print(" + ") - print(incs.mkString(" + ")) - } - println(s" = ${num + incs.sum}") - -object Test: - def callMainAdd(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def callMainAddAll(args: Array[String]): Unit = - val clazz = Class.forName("addAll") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMainAdd(Array("2", "3")) - - callMainAddAll(Array("2", "3")) - callMainAddAll(Array("2")) - callMainAddAll(Array()) - callMainAddAll(Array("1", "2", "3", "4")) -end Test diff --git a/tests/run/main-annotation-types.scala b/tests/run/main-annotation-types.scala deleted file mode 100644 index 0ee6220a1196..000000000000 --- a/tests/run/main-annotation-types.scala +++ /dev/null @@ -1,35 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Displays some parameters */ - @newMain def show( - int: Int, - double: Double, - string: String, - byte: Byte - ): Unit = - println("Here's what I got:") - println(s"int - $int") - println(s"double - $double") - println(s"string - $string") - println(s"byte - $byte") - println() - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("show") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3", "4", "1")) - callMain(Array("-1", "3456789098765445678", "false", "127")) - callMain(Array("2147483647", "3.1415926535", "Hello world!", "0")) -end Test diff --git a/tests/run/main-annotation-vararg-1.scala b/tests/run/main-annotation-vararg-1.scala deleted file mode 100644 index 0227054e0189..000000000000 --- a/tests/run/main-annotation-vararg-1.scala +++ /dev/null @@ -1,30 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds any amount of numbers */ - @newMain def add(nums: Int*): Unit = - if (nums.isEmpty) - println("No number input") - else - println(s"${nums.mkString(" + ")} = ${nums.sum}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) - callMain(Array("2", "3", "-4")) - callMain((1 to 10).toArray.map(_.toString)) - callMain(Array("0")) - callMain(Array()) -end Test diff --git a/tests/run/main-annotation-vararg-2.scala b/tests/run/main-annotation-vararg-2.scala deleted file mode 100644 index 8521795388b2..000000000000 --- a/tests/run/main-annotation-vararg-2.scala +++ /dev/null @@ -1,33 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Checks that the correct amount of parameters were passed */ - @newMain def count(count: Int, elems: String*): Unit = - if (elems.length == count) - println("Correct") - else - println(s"Expected $count argument${if (count != 1) "s" else ""}, but got ${elems.length}") - println(s" ${elems.mkString(", ")}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("count") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("1", "Hello")) - callMain(Array("2", "Hello", "world!")) - callMain(Array("3", "No 3 elements")) - callMain(Array("0")) - callMain(Array("0", "I", "shouldn't", "be", "here")) - callMain(Array("-2", "How does that make sense?")) - callMain(Array("26") ++ ('a' to 'z').toArray.map(_.toString)) -end Test diff --git a/tests/run/main-annotation-wrong-param-1.scala b/tests/run/main-annotation-wrong-param-1.scala deleted file mode 100644 index 6c9e9e991136..000000000000 --- a/tests/run/main-annotation-wrong-param-1.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "true", "SPAAAAACE")) - callMain(Array("add", "2", "3")) - callMain(Array("true", "false", "10")) - callMain(Array("binary", "10", "01")) -end Test diff --git a/tests/run/main-annotation-wrong-param-names.scala b/tests/run/main-annotation-wrong-param-names.scala deleted file mode 100644 index 90622d543bf1..000000000000 --- a/tests/run/main-annotation-wrong-param-names.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("--n", "1", "--i", "10")) - callMain(Array("num", "1", "inc", "10")) - callMain(Array("--something", "1", "10")) - callMain(Array("1", "--else", "10")) -end Test diff --git a/tests/run/main-annotation-wrong-param-number.scala b/tests/run/main-annotation-wrong-param-number.scala deleted file mode 100644 index b8ef8c0ea9e7..000000000000 --- a/tests/run/main-annotation-wrong-param-number.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array()) - callMain(Array("1")) - callMain(Array("1", "2", "3")) - callMain(Array((1 to 10).toArray.map(_.toString): _*)) -end Test diff --git a/tests/run/main-annotation-wrong-param-type.scala b/tests/run/main-annotation-wrong-param-type.scala deleted file mode 100644 index 0fbae70a48a5..000000000000 --- a/tests/run/main-annotation-wrong-param-type.scala +++ /dev/null @@ -1,28 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "true")) - callMain(Array("2.1", "3")) - callMain(Array("2", "3.1415921535")) - callMain(Array("192.168.1.1", "3")) - callMain(Array("false", "true")) - callMain(Array("Hello", "world!")) -end Test diff --git a/tests/run/main-calculator-example.scala b/tests/run/main-calculator-example.scala deleted file mode 100644 index fc2e1397009b..000000000000 --- a/tests/run/main-calculator-example.scala +++ /dev/null @@ -1,67 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -sealed trait Expression: - def eval(): Int -case class Number(n: Int) extends Expression: - def eval(): Int = n -case class Plus(e1: Expression, e2: Expression) extends Expression: - def eval(): Int = e1.eval() + e2.eval() - -//// - -@main def sum(n1: Int, n2: Int) = - val x1 = Number(n1) - val x2 = Number(n2) - val expr = Plus(x1, x2) - println(s"Expression: $expr") - val result = expr.eval() - println(s"Calculated: $result") - -//// - -import scala.annotation.{ MainAnnotation, experimental } -import scala.annotation.MainAnnotation.{ Info, Parameter } -import scala.util.CommandLineParser.FromString - -class showAndEval extends MainAnnotation[FromString, Expression]: - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - assert(info.parameters.forall(param => param.typeName == "Number"), "Only Number parameters allowed") - println(s"executing ${info.name} with inputs: ${args.mkString(" ")}") - Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using parser: FromString[T]): () => T = - () => parser.fromString(arg) - - def varargGetter[T](param: Parameter, args: Seq[String])(using parser: FromString[T]): () => Seq[T] = - () => args.map(arg => parser.fromString(arg)) - - def run(program: () => Expression): Unit = - val expr = program() - println(s"Expression: $expr") - val result = expr.eval() - println(s"Calculated: $result") -end showAndEval - -given FromString[Number] = (x: String) => Number(x.toInt) - -//// - -@showAndEval def sum2(x1: Number, x2: Number): Expression = - sumAll(x1, x2) - -@showAndEval def sumAll(xs: Number*): Expression = - if xs.isEmpty then Number(0) - else xs.tail.fold[Expression](xs.head)(Plus) - -//// - -@main def Test: Unit = - def callMain(name: String, args: String*): Unit = - val clazz = Class.forName(name) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args.toArray) - callMain("sum", "1", "2") - callMain("sum2", "2", "3") - callMain("sumAll", "1", "2", "3") -end Test From 5a158924f3b4277064928a0acf0d6b1d1fda3773 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 27 Mar 2024 17:36:33 +0100 Subject: [PATCH 023/465] Add test case supplied by @LucySMartin --- compiler/test-resources/repl/10693 | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 compiler/test-resources/repl/10693 diff --git a/compiler/test-resources/repl/10693 b/compiler/test-resources/repl/10693 new file mode 100644 index 000000000000..ab4d175c1a6f --- /dev/null +++ b/compiler/test-resources/repl/10693 @@ -0,0 +1,16 @@ +scala> def test[A, B](a: A, b: B): A | B = a +def test[A, B](a: A, b: B): A | B +scala> def d0 = test("string", 1) +def d0: String | Int +scala> def d1 = test(1, "string") +def d1: Int | String +scala> def d2 = test(d0, d1) +def d2: String | Int +scala> def d3 = test(d1, d0) +def d3: Int | String +scala> def d4 = test(d2, d3) +def d4: String | Int +scala> def d5 = test(d3, d2) +def d5: Int | String +scala> def d6 = test(d4, d5) +def d6: String | Int \ No newline at end of file From 401f1264d87f09406210b1702c47519b2829c397 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 27 Mar 2024 20:35:14 +0100 Subject: [PATCH 024/465] Also reduce references with prefixes that alias class types --- compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index b70f286b70ed..6e88ee9d18b5 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2721,7 +2721,7 @@ object Types extends TypeUtils { if reduced.exists then reduced else prefix.stripTypeVar match case pre: (AppliedType | TypeRef) - if prefix.typeSymbol.isClass && this.symbol.isAliasType => dealias + if prefix.dealias.typeSymbol.isClass && this.symbol.isAliasType => dealias case _ => this /** Guard against cycles that can arise if given `op` From 6a40dd502352ea0f3fabce9198000b726df3082d Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 26 Mar 2024 12:45:23 +0100 Subject: [PATCH 025/465] Do match type reduction atPhaseNoLater than ElimOpaque --- compiler/src/dotty/tools/dotc/core/Phases.scala | 4 ++++ compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- tests/pos/i19434.scala | 11 +++++++++++ 3 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i19434.scala diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index c704846a82da..043c5beb9076 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -220,6 +220,7 @@ object Phases { private var myPatmatPhase: Phase = uninitialized private var myElimRepeatedPhase: Phase = uninitialized private var myElimByNamePhase: Phase = uninitialized + private var myElimOpaquePhase: Phase = uninitialized private var myExtensionMethodsPhase: Phase = uninitialized private var myExplicitOuterPhase: Phase = uninitialized private var myGettersPhase: Phase = uninitialized @@ -245,6 +246,7 @@ object Phases { final def patmatPhase: Phase = myPatmatPhase final def elimRepeatedPhase: Phase = myElimRepeatedPhase final def elimByNamePhase: Phase = myElimByNamePhase + final def elimOpaquePhase: Phase = myElimOpaquePhase final def extensionMethodsPhase: Phase = myExtensionMethodsPhase final def explicitOuterPhase: Phase = myExplicitOuterPhase final def gettersPhase: Phase = myGettersPhase @@ -272,6 +274,7 @@ object Phases { myRefChecksPhase = phaseOfClass(classOf[RefChecks]) myElimRepeatedPhase = phaseOfClass(classOf[ElimRepeated]) myElimByNamePhase = phaseOfClass(classOf[ElimByName]) + myElimOpaquePhase = phaseOfClass(classOf[ElimOpaque]) myExtensionMethodsPhase = phaseOfClass(classOf[ExtensionMethods]) myErasurePhase = phaseOfClass(classOf[Erasure]) myElimErasedValueTypePhase = phaseOfClass(classOf[ElimErasedValueType]) @@ -511,6 +514,7 @@ object Phases { def refchecksPhase(using Context): Phase = ctx.base.refchecksPhase def elimRepeatedPhase(using Context): Phase = ctx.base.elimRepeatedPhase def elimByNamePhase(using Context): Phase = ctx.base.elimByNamePhase + def elimOpaquePhase(using Context): Phase = ctx.base.elimOpaquePhase def extensionMethodsPhase(using Context): Phase = ctx.base.extensionMethodsPhase def explicitOuterPhase(using Context): Phase = ctx.base.explicitOuterPhase def gettersPhase(using Context): Phase = ctx.base.gettersPhase diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 701a6360fd3d..4f760b6f1b14 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5028,7 +5028,7 @@ object Types extends TypeUtils { private def thisMatchType = this - def reduced(using Context): Type = { + def reduced(using Context): Type = atPhaseNoLater(elimOpaquePhase) { def contextInfo(tp: Type): Type = tp match { case tp: TypeParamRef => diff --git a/tests/pos/i19434.scala b/tests/pos/i19434.scala new file mode 100644 index 000000000000..e8595fa252d0 --- /dev/null +++ b/tests/pos/i19434.scala @@ -0,0 +1,11 @@ + +object Test: + + object Named: + opaque type Named[name <: String & Singleton, A] >: A = A + + type DropNames[T <: Tuple] = T match + case Named.Named[_, x] *: xs => x *: DropNames[xs] + case _ => T + + def f[T <: Tuple]: DropNames[T] = ??? \ No newline at end of file From b9430cb927214b3302b44abcddb429827cc9b85e Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 28 Mar 2024 15:26:53 +0100 Subject: [PATCH 026/465] Update compiler/src/dotty/tools/dotc/core/TypeComparer.scala --- compiler/src/dotty/tools/dotc/core/TypeComparer.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 39e9fcea4e0f..159b2f01dbf4 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2529,6 +2529,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if isSuperOf(sub) then NoType else tp end dropIfSuper + /** If some (|-operand of) `tp` is a subtype of `sup` replace it with `NoType`. */ private def dropIfSub(tp: Type, sup: Type, canConstrain: Boolean): Type = def isSubOf(sup: Type): Boolean = sup match From 477b47d9a626dc96ad716ca35e89b9fbc557456c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Fri, 29 Mar 2024 11:33:28 +0100 Subject: [PATCH 027/465] Setup versions for 3.5.0 --- project/Build.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index d9c224790b12..336d576c7207 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -83,7 +83,7 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.4.0" + val referenceVersion = "3.4.2-RC1" val baseVersion = "3.5.0-RC1" @@ -104,7 +104,7 @@ object Build { * set to 3.1.3. If it is going to be 3.1.0, it must be set to the latest * 3.0.x release. */ - val previousDottyVersion = "3.4.0" + val previousDottyVersion = "3.4.1" /** Version against which we check binary compatibility. */ val ltsDottyVersion = "3.3.0" From c8c3bde0270db83c2950c79cbe68267af7cfbc69 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20Fornal?= <24961583+Florian3k@users.noreply.github.com> Date: Fri, 29 Mar 2024 11:48:51 +0100 Subject: [PATCH 028/465] Fix Closure span assignment in makeClosure (#15841) Fixes #15098 --- .../src/dotty/tools/dotc/ast/Desugar.scala | 2 +- .../dotty/tools/dotc/ast/NavigateAST.scala | 4 +- .../backend/jvm/DottyBytecodeTests.scala | 58 +++++++++++++++++++ tests/neg/i15741.scala | 8 +-- tests/neg/i19351a.check | 10 +--- tests/neg/i19351a/Test.scala | 2 +- tests/neg/i9299.scala | 2 +- 7 files changed, 68 insertions(+), 18 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 2d99cf201375..6ed05976a19e 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1523,7 +1523,7 @@ object desugar { DefDef(nme.ANON_FUN, paramss, if (tpt == null) TypeTree() else tpt, body) .withSpan(span) .withMods(synthetic | Artifact), - Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) + Closure(Nil, Ident(nme.ANON_FUN), EmptyTree).withSpan(span)) /** If `nparams` == 1, expand partial function * diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index 2960af8fcdec..f83f12e1c027 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -4,7 +4,7 @@ package ast import core.Contexts.* import core.Decorators.* import util.Spans.* -import Trees.{MemberDef, DefTree, WithLazyFields} +import Trees.{Closure, MemberDef, DefTree, WithLazyFields} import dotty.tools.dotc.core.Types.AnnotatedType import dotty.tools.dotc.core.Types.ImportType import dotty.tools.dotc.core.Types.Type @@ -76,7 +76,7 @@ object NavigateAST { var bestFit: List[Positioned] = path while (it.hasNext) { val path1 = it.next() match { - case p: Positioned => singlePath(p, path) + case p: Positioned if !p.isInstanceOf[Closure[?]] => singlePath(p, path) case m: untpd.Modifiers => childPath(m.productIterator, path) case xs: List[?] => childPath(xs.iterator, path) case _ => path diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index 51390e35b527..f446913d7964 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -1785,6 +1785,64 @@ class DottyBytecodeTests extends DottyBytecodeTest { } } + + @Test def i15098 = { + val source = + """object Main { + | def main(args: Array[String]): Unit = { + | Array(1).foreach { n => + | val x = 123 + | println(n) + | } + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Main$.class", directory = false).input + val clsNode = loadClassNode(clsIn, skipDebugInfo = false) + val method = getMethod(clsNode, "main") + val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) + + val expected = List( + LineNumber(3, Label(0)), + ) + + assertSameCode(instructions, expected) + } + } + + @Test def i15098_2 = { + val source = + """object Main { + | def main(args: Array[String]): Unit = { + | Array(1).map { n => + | val x = 123 + | x + n + | }.foreach { n => + | println(n) + | println(n) + | } + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Main$.class", directory = false).input + val clsNode = loadClassNode(clsIn, skipDebugInfo = false) + val method = getMethod(clsNode, "main") + val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) + + val expected = List( + LineNumber(3, Label(0)), + LineNumber(6, Label(15)), + LineNumber(3, Label(24)), + LineNumber(6, Label(27)), + ) + + assertSameCode(instructions, expected) + } + } } object invocationReceiversTestCode { diff --git a/tests/neg/i15741.scala b/tests/neg/i15741.scala index 2d536c515f76..45d6c3bed16d 100644 --- a/tests/neg/i15741.scala +++ b/tests/neg/i15741.scala @@ -1,15 +1,15 @@ def get(using Int): String = summon[Int].toString - def pf2: PartialFunction[String, Int ?=> String] = { + def pf2: PartialFunction[String, Int ?=> String] = { // error case "hoge" => get case "huga" => get - } // error + } type IS = Int ?=> String - def pf3: PartialFunction[String, IS] = { + def pf3: PartialFunction[String, IS] = { // error case "hoge" => get case "huga" => get - } // error + } diff --git a/tests/neg/i19351a.check b/tests/neg/i19351a.check index 3c1353811f3d..10789c2db5aa 100644 --- a/tests/neg/i19351a.check +++ b/tests/neg/i19351a.check @@ -1,12 +1,4 @@ -- Error: tests/neg/i19351a/Test.scala:8:34 ---------------------------------------------------------------------------- -8 |inline def not(b: Bool): Bool = ${notMacro('b)} // error // error +8 |inline def not(b: Bool): Bool = ${notMacro('b)} // error | ^ |Cyclic macro dependency; macro refers to a toplevel symbol in tests/neg/i19351a/Test.scala from which the macro is called --- [E046] Cyclic Error: tests/neg/i19351a/Test.scala:8:46 -------------------------------------------------------------- -8 |inline def not(b: Bool): Bool = ${notMacro('b)} // error // error - | ^ - | Cyclic reference involving method $anonfun - | - | Run with -explain-cyclic for more details. - | - | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i19351a/Test.scala b/tests/neg/i19351a/Test.scala index 51f608aa46ea..84fb6ca4ae78 100644 --- a/tests/neg/i19351a/Test.scala +++ b/tests/neg/i19351a/Test.scala @@ -5,7 +5,7 @@ type Bool = [R] => (R, R) => R val True: Bool = [R] => (t: R, _: R) => t val False: Bool = [R] => (_: R, f: R) => f -inline def not(b: Bool): Bool = ${notMacro('b)} // error // error +inline def not(b: Bool): Bool = ${notMacro('b)} // error inline def show(b: Bool): String = ${showMacro('b)} //inline def not(b: Bool): Bool = ${foldMacro('b, 'False, 'True)} //inline def show(b: Bool): String = ${foldMacro('b, '{"TRUE"}, '{"FALSE"})} diff --git a/tests/neg/i9299.scala b/tests/neg/i9299.scala index 6c23d11553ff..c3ae55ab9d18 100644 --- a/tests/neg/i9299.scala +++ b/tests/neg/i9299.scala @@ -1,4 +1,4 @@ type F <: F = 1 match { // error - case _ => foo.foo // error // error + case _ => foo.foo // error } def foo(a: Int): Unit = ??? From f2b7c12e6941b472086debef420e4eb3578c178a Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 31 Mar 2024 11:21:32 +0200 Subject: [PATCH 029/465] Plug unsoundness for reach capabilities Enforce an analogous restriction to the one for creating reach capabilities for all values. The type of a value cannot both have a reach capability with variance >= 0 and at the same time a universal capability with variance <= 0. Plug soundness hole for reach capabilities --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 44 ++++++++++++++++++- tests/neg/unsound-reach-2.scala | 25 +++++++++++ tests/neg/unsound-reach-3.scala | 21 +++++++++ tests/neg/unsound-reach.check | 5 +++ tests/neg/unsound-reach.scala | 20 +++++++++ 5 files changed, 113 insertions(+), 2 deletions(-) create mode 100644 tests/neg/unsound-reach-2.scala create mode 100644 tests/neg/unsound-reach-3.scala create mode 100644 tests/neg/unsound-reach.check create mode 100644 tests/neg/unsound-reach.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index de584797f154..675cd012e801 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -249,6 +249,44 @@ class CheckCaptures extends Recheck, SymTransformer: else i"references $cs1$cs1description are not all", pos, provenance) + def showRef(ref: CaptureRef)(using Context): String = + ctx.printer.toTextCaptureRef(ref).show + + // Uses 4-space indent as a trial + def checkReachCapsIsolated(tpe: Type, pos: SrcPos)(using Context): Unit = + + object checker extends TypeTraverser: + var refVariances: Map[Boolean, Int] = Map.empty + var seenReach: CaptureRef | Null = null + def traverse(tp: Type) = + tp match + case CapturingType(parent, refs) => + traverse(parent) + for ref <- refs.elems do + if ref.isReach && !ref.stripReach.isInstanceOf[TermParamRef] + || ref.isRootCapability + then + val isReach = ref.isReach + def register() = + refVariances = refVariances.updated(isReach, variance) + seenReach = ref + refVariances.get(isReach) match + case None => register() + case Some(v) => if v != 0 && variance == 0 then register() + case _ => + traverseChildren(tp) + + checker.traverse(tpe) + if checker.refVariances.size == 2 + && checker.refVariances(true) >= 0 + && checker.refVariances(false) <= 0 + then + report.error( + em"""Reach capability ${showRef(checker.seenReach.nn)} and universal capability cap cannot both + |appear in the type $tpe of this expression""", + pos) + end checkReachCapsIsolated + /** The current environment */ private val rootEnv: Env = inContext(ictx): Env(defn.RootClass, EnvKind.Regular, CaptureSet.empty, null) @@ -779,8 +817,10 @@ class CheckCaptures extends Recheck, SymTransformer: report.error(ex.getMessage.nn) tree.tpe finally curEnv = saved - if tree.isTerm && !pt.isBoxedCapturing then - markFree(res.boxedCaptureSet, tree.srcPos) + if tree.isTerm then + checkReachCapsIsolated(res.widen, tree.srcPos) + if !pt.isBoxedCapturing then + markFree(res.boxedCaptureSet, tree.srcPos) res override def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = diff --git a/tests/neg/unsound-reach-2.scala b/tests/neg/unsound-reach-2.scala new file mode 100644 index 000000000000..27742d72557b --- /dev/null +++ b/tests/neg/unsound-reach-2.scala @@ -0,0 +1,25 @@ +import language.experimental.captureChecking +trait Consumer[-T]: + def apply(x: T): Unit + +trait File: + def close(): Unit + +def withFile[R](path: String)(op: Consumer[File]): R = ??? + +trait Foo[+X]: + def use(x: File^)(op: Consumer[X]): Unit +class Bar extends Foo[File^]: + def use(x: File^)(op: Consumer[File^]): Unit = op.apply(x) + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + boom.use(f): // error + new Consumer[File^{backdoor*}]: + def apply(f1: File^{backdoor*}) = + escaped = f1 + diff --git a/tests/neg/unsound-reach-3.scala b/tests/neg/unsound-reach-3.scala new file mode 100644 index 000000000000..71c27fe5007d --- /dev/null +++ b/tests/neg/unsound-reach-3.scala @@ -0,0 +1,21 @@ +import language.experimental.captureChecking +trait File: + def close(): Unit + +def withFile[R](path: String)(op: File^ => R): R = ??? + +trait Foo[+X]: + def use(x: File^): X +class Bar extends Foo[File^]: + def use(x: File^): File^ = x + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + escaped = boom.use(f) // error + // boom.use: (x: File^) -> File^{backdoor*}, it is a selection so reach capabilities are allowed + // f: File^, so there is no reach capabilities + diff --git a/tests/neg/unsound-reach.check b/tests/neg/unsound-reach.check new file mode 100644 index 000000000000..fd5c401416d1 --- /dev/null +++ b/tests/neg/unsound-reach.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/unsound-reach.scala:18:9 --------------------------------------------------------------------------- +18 | boom.use(f): (f1: File^{backdoor*}) => // error + | ^^^^^^^^ + | Reach capability backdoor* and universal capability cap cannot both + | appear in the type (x: File^)(op: box File^{backdoor*} => Unit): Unit of this expression diff --git a/tests/neg/unsound-reach.scala b/tests/neg/unsound-reach.scala new file mode 100644 index 000000000000..468730168019 --- /dev/null +++ b/tests/neg/unsound-reach.scala @@ -0,0 +1,20 @@ +import language.experimental.captureChecking +trait File: + def close(): Unit + +def withFile[R](path: String)(op: File^ => R): R = ??? + +trait Foo[+X]: + def use(x: File^)(op: X => Unit): Unit +class Bar extends Foo[File^]: + def use(x: File^)(op: File^ => Unit): Unit = op(x) + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + boom.use(f): (f1: File^{backdoor*}) => // error + escaped = f1 + From 3c06b77d690d166f8fe7a6c50d5c5b88d739bbdb Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 31 Mar 2024 18:09:42 -0700 Subject: [PATCH 030/465] Detabulate files --- compiler/src/dotty/tools/dotc/ast/TreeInfo.scala | 2 +- compiler/src/dotty/tools/dotc/core/Flags.scala | 2 +- .../src/dotty/tools/dotc/core/SymDenotations.scala | 6 +++--- .../src/dotty/tools/dotc/core/SymbolLoaders.scala | 4 ++-- compiler/src/dotty/tools/dotc/core/Types.scala | 6 +++--- .../dotty/tools/dotc/printing/RefinedPrinter.scala | 6 +++--- .../src/dotty/tools/dotc/transform/Bridges.scala | 4 ++-- .../src/dotty/tools/dotc/transform/Erasure.scala | 8 ++++---- .../dotty/tools/dotc/transform/ExplicitOuter.scala | 12 ++++++------ .../src/dotty/tools/dotc/transform/PostTyper.scala | 4 ++-- .../src/dotty/tools/dotc/typer/Applications.scala | 4 ++-- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 6 +++--- compiler/src/dotty/tools/dotc/typer/Typer.scala | 10 +++++----- 13 files changed, 37 insertions(+), 37 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 28d3ef6daaef..a1bba544cc06 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -1105,7 +1105,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case AndType(ref, nn1) if qual.tpe eq ref => qual.tpe.widen match case OrNull(nn2) if nn1 eq nn2 => - Some(qual) + Some(qual) case _ => None case _ => None case _ => None diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 249940d8ff99..1f38289161cd 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -137,7 +137,7 @@ object Flags { def flagStrings(privateWithin: String = ""): Seq[String] = { var rawStrings = (2 to MaxFlag).flatMap(x.flagString(_)) // DOTTY problem: cannot drop with (_) if (!privateWithin.isEmpty && !x.is(Protected)) - rawStrings = rawStrings :+ "private" + rawStrings :+= "private" val scopeStr = if (x.is(Local)) "this" else privateWithin if (scopeStr != "") rawStrings.filter(_ != "").map { diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 5304c9efadc0..5578028a82c2 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -772,7 +772,7 @@ object SymDenotations { * This can mean one of two things: * - the method and class are defined in a structural given instance, or * - the class is an implicit class and the method is its implicit conversion. - */ + */ final def isCoDefinedGiven(cls: Symbol)(using Context): Boolean = is(Method) && isOneOf(GivenOrImplicit) && ( is(Synthetic) // previous scheme used in 3.0 @@ -1071,8 +1071,8 @@ object SymDenotations { */ final def moduleClass(using Context): Symbol = { def notFound = { - if (Config.showCompletions) println(s"missing module class for $name: $myInfo") - NoSymbol + if (Config.showCompletions) println(s"missing module class for $name: $myInfo") + NoSymbol } if (this.is(ModuleVal)) myInfo match { diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 75c610b29140..abdcdd610042 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -221,8 +221,8 @@ object SymbolLoaders { Stats.record("package scopes") /** The scope of a package. This is different from a normal scope - * in that names of scope entries are kept in mangled form. - */ + * in that names of scope entries are kept in mangled form. + */ final class PackageScope extends MutableScope(0) { override def newScopeEntry(name: Name, sym: Symbol)(using Context): ScopeEntry = super.newScopeEntry(name.mangled, sym) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 62844a54bf48..513e00347251 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1340,7 +1340,7 @@ object Types extends TypeUtils { * * For instance, if `A` is an unconstrained type variable, then * - * ArrayBuffer[Int] | ArrayBuffer[A] + * ArrayBuffer[Int] | ArrayBuffer[A] * * is approximated by constraining `A` to be =:= to `Int` and returning `ArrayBuffer[Int]` * instead of `ArrayBuffer[? >: Int | A <: Int & A]` @@ -3488,7 +3488,7 @@ object Types extends TypeUtils { private var myFactorCount = 0 override def andFactorCount = if myFactorCount == 0 then - myFactorCount = tp1.andFactorCount + tp2.andFactorCount + myFactorCount = tp1.andFactorCount + tp2.andFactorCount myFactorCount def derivedAndType(tp1: Type, tp2: Type)(using Context): Type = @@ -3766,7 +3766,7 @@ object Types extends TypeUtils { * LambdaType | TermLambda | TypeLambda * -------------+-------------------+------------------ * HKLambda | HKTermLambda | HKTypeLambda - * MethodOrPoly | MethodType | PolyType + * MethodOrPoly | MethodType | PolyType */ trait LambdaType extends BindingType with TermType { self => type ThisName <: Name diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 93e280f8a13c..e84cbc7c50d5 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -478,9 +478,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if isWildcardStarArg(tree) then expr match case Ident(nme.WILDCARD_STAR) => - // `_*` is used as a wildcard name to indicate a vararg splice pattern; - // avoid the double `*` in this case. - toText(expr) + // `_*` is used as a wildcard name to indicate a vararg splice pattern; + // avoid the double `*` in this case. + toText(expr) case _ => toText(expr) ~ "*" else diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index 2110ac1464c2..482e5056fad0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -26,8 +26,8 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { override def isSubParent(parent: Symbol, bc: Symbol)(using Context) = true - // Never consider a bridge if there is a superclass that would contain it - // See run/t2857.scala for a test that would break with a VerifyError otherwise. + // Never consider a bridge if there is a superclass that would contain it + // See run/t2857.scala for a test that would break with a VerifyError otherwise. /** Only use the superclass of `root` as a parent class. This means * overriding pairs that have a common implementation in a trait parent diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 01fc423b0076..8bfbb90a0700 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -594,9 +594,9 @@ object Erasure { def erasedDef(sym: Symbol)(using Context): Tree = if sym.isClass then - // We cannot simply drop erased classes, since then they would not generate classfiles - // and would not be visible under separate compilation. So we transform them to - // empty interfaces instead. + // We cannot simply drop erased classes, since then they would not generate classfiles + // and would not be visible under separate compilation. So we transform them to + // empty interfaces instead. tpd.ClassDef(sym.asClass, DefDef(sym.primaryConstructor.asTerm), Nil) else if sym.owner.isClass then sym.dropAfter(erasurePhase) @@ -667,7 +667,7 @@ object Erasure { */ override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { if tree.name == nme.apply && integrateSelect(tree) then - return typed(tree.qualifier, pt) + return typed(tree.qualifier, pt) val qual1 = typed(tree.qualifier, AnySelectionProto) diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index b976dfaa2f9f..55a692780b85 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -352,12 +352,12 @@ object ExplicitOuter { case _ => // Need to be careful to dealias before erasure, otherwise we lose prefixes. atPhaseNoLater(erasurePhase)(outerPrefix(tpe.underlying)) - // underlying is fine here and below since we are calling this after erasure. - // However, there is some weird stuff going on with parboiled2 where an - // AppliedType with a type alias as constructor is fed to outerPrefix. - // For some other unknown reason this works with underlying but not with superType. - // I was not able to minimize the problem and parboiled2 spits out way too much - // macro generated code to be able to pinpoint the root problem. + // underlying is fine here and below since we are calling this after erasure. + // However, there is some weird stuff going on with parboiled2 where an + // AppliedType with a type alias as constructor is fed to outerPrefix. + // For some other unknown reason this works with underlying but not with superType. + // I was not able to minimize the problem and parboiled2 spits out way too much + // macro generated code to be able to pinpoint the root problem. } case tpe: TypeProxy => outerPrefix(tpe.underlying) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 3bcec80b5b10..46f5774e671a 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -328,8 +328,8 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => else tree def app1 = - // reverse order of transforming args and fun. This way, we get a chance to see other - // well-formedness errors before reporting errors in possible inferred type args of fun. + // reverse order of transforming args and fun. This way, we get a chance to see other + // well-formedness errors before reporting errors in possible inferred type args of fun. val args1 = transform(app.args) cpy.Apply(app)(transform(app.fun), args1) methPart(app) match diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 82f4c89ae203..3974dab75461 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1359,8 +1359,8 @@ trait Applications extends Compatibility { || ctx.reporter.hasErrors then result else notAnExtractor(result) - // It might be that the result of typedExpr is an `apply` selection or implicit conversion. - // Reject in this case. + // It might be that the result of typedExpr is an `apply` selection or implicit conversion. + // Reject in this case. def tryWithTypeArgs(qual: untpd.Tree, targs: List[Tree])(fallBack: (Tree, TyperState) => Tree): Tree = tryEither { diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5162b3fed1b9..1a13ee396d19 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -411,9 +411,9 @@ object Implicits: /** Search mode to use for possibly avoiding looping givens */ enum SearchMode: case Old, // up to 3.3, old mode w/o protection - CompareWarn, // from 3.4, old mode, warn if new mode would change result - CompareErr, // from 3.5, old mode, error if new mode would change result - New // from future, new mode where looping givens are avoided + CompareWarn, // from 3.4, old mode, warn if new mode would change result + CompareErr, // from 3.5, old mode, error if new mode would change result + New // from future, new mode where looping givens are avoided /** The result of an implicit search */ sealed abstract class SearchResult extends Showable { diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 0b05bcd078ff..8b9c144f68e9 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -922,9 +922,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer import untpd.* var templ1 = templ def isEligible(tp: Type) = - tp.exists - && !tp.typeSymbol.is(Final) - && (!tp.isTopType || tp.isAnyRef) // Object is the only toplevel class that can be instantiated + tp.exists + && !tp.typeSymbol.is(Final) + && (!tp.isTopType || tp.isAnyRef) // Object is the only toplevel class that can be instantiated if (templ1.parents.isEmpty && isFullyDefined(pt, ForceDegree.flipBottom) && isSkolemFree(pt) && @@ -3006,7 +3006,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Translate infix operation expression `l op r` to * - * l.op(r) if `op` is left-associative + * l.op(r) if `op` is left-associative * { val x = l; r.op(x) } if `op` is right-associative call-by-value and `l` is impure, and not in a quote pattern * r.op(l) if `op` is right-associative call-by-name, or `l` is pure, or in a quote pattern * @@ -4381,7 +4381,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case poly: PolyType if !(ctx.mode is Mode.Type) && dummyTreeOfType.unapply(tree).isEmpty => // If we are in a conversion from a TermRef with polymorphic underlying - // type, give up. In this case the typed `null` literal cannot be instantiated. + // type, give up. In this case the typed `null` literal cannot be instantiated. // Test case was but i18695.scala, but it got fixed by a different tweak in #18719. // We leave test for this condition in as a defensive measure in case // it arises somewhere else. From 91fd6c20bdddaed0f21739f620e67516c999292a Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Mon, 1 Apr 2024 12:12:00 +0100 Subject: [PATCH 031/465] Dealias type before checking reach refinements --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 2 +- tests/neg/unsound-reach-4.check | 5 +++++ tests/neg/unsound-reach-4.scala | 20 +++++++++++++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 tests/neg/unsound-reach-4.check create mode 100644 tests/neg/unsound-reach-4.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 675cd012e801..9b6217033ede 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -259,7 +259,7 @@ class CheckCaptures extends Recheck, SymTransformer: var refVariances: Map[Boolean, Int] = Map.empty var seenReach: CaptureRef | Null = null def traverse(tp: Type) = - tp match + tp.dealias match case CapturingType(parent, refs) => traverse(parent) for ref <- refs.elems do diff --git a/tests/neg/unsound-reach-4.check b/tests/neg/unsound-reach-4.check new file mode 100644 index 000000000000..47256baf408a --- /dev/null +++ b/tests/neg/unsound-reach-4.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/unsound-reach-4.scala:20:19 ------------------------------------------------------------------------ +20 | escaped = boom.use(f) // error + | ^^^^^^^^ + | Reach capability backdoor* and universal capability cap cannot both + | appear in the type (x: F): box File^{backdoor*} of this expression diff --git a/tests/neg/unsound-reach-4.scala b/tests/neg/unsound-reach-4.scala new file mode 100644 index 000000000000..fa395fa117ca --- /dev/null +++ b/tests/neg/unsound-reach-4.scala @@ -0,0 +1,20 @@ +import language.experimental.captureChecking +trait File: + def close(): Unit + +def withFile[R](path: String)(op: File^ => R): R = ??? + +type F = File^ + +trait Foo[+X]: + def use(x: F): X +class Bar extends Foo[File^]: + def use(x: F): File^ = x + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + escaped = boom.use(f) // error From f34ff5d16319420edba7297e12ddb525487b84d8 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Mon, 1 Apr 2024 13:07:13 +0100 Subject: [PATCH 032/465] Drop retains annotations in inferred type trees --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 10 ++++++++++ compiler/src/dotty/tools/dotc/typer/Typer.scala | 9 +++++++-- tests/pos-custom-args/captures/tablediff.scala | 11 +++++++++++ 3 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 tests/pos-custom-args/captures/tablediff.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 7c75ed833945..5c0dbd8508bf 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -445,6 +445,16 @@ extension (tp: AnnotatedType) case ann: CaptureAnnotation => ann.boxed case _ => false +class CleanupRetains(using Context) extends TypeMap: + def apply(tp: Type): Type = cleanupRetains(tp, this) + +/** Drop retains annotations in the type. */ +def cleanupRetains(tp: Type, theMap: CleanupRetains | Null = null)(using Context): Type = + def mapOver = (if theMap != null then theMap else new CleanupRetains).mapOver(tp) + tp match + case RetainingType(tp, _) => tp + case _ => mapOver + /** An extractor for `caps.reachCapability(ref)`, which is used to express a reach * capability as a tree in a @retains annotation. */ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 0b05bcd078ff..7a82ac78b75c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -48,7 +48,7 @@ import staging.StagingLevel import reporting.* import Nullables.* import NullOpsDecorator.* -import cc.{CheckCaptures, isRetainsLike} +import cc.{CheckCaptures, isRetainsLike, cleanupRetains} import config.Config import config.MigrationVersion @@ -2187,7 +2187,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer errorTree(tree, em"Something's wrong: missing original symbol for type tree") } case _ => - completeTypeTree(InferredTypeTree(), pt, tree) + val pt1 = cleanupRetains(pt) + // Cleans up retains annotations in inferred type trees. This is needed because + // during the typer, it is infeasible to correctly infer the capture sets in most + // cases, resulting ill-formed capture sets that could crash the pickler later on. + // See #20035. + completeTypeTree(InferredTypeTree(), pt1, tree) def typedInLambdaTypeTree(tree: untpd.InLambdaTypeTree, pt: Type)(using Context): Tree = val tp = diff --git a/tests/pos-custom-args/captures/tablediff.scala b/tests/pos-custom-args/captures/tablediff.scala new file mode 100644 index 000000000000..244ee1a46a23 --- /dev/null +++ b/tests/pos-custom-args/captures/tablediff.scala @@ -0,0 +1,11 @@ +import language.experimental.captureChecking + +trait Seq[+A]: + def zipAll[A1 >: A, B](that: Seq[B]^, thisElem: A1, thatElem: B): Seq[(A1, B)]^{this, that} + def map[B](f: A => B): Seq[B]^{this, f} + +def zipAllOption[X](left: Seq[X], right: Seq[X]) = + left.map(Option(_)).zipAll(right.map(Option(_)), None, None) + +def fillRow[T](headRow: Seq[T], tailRow: Seq[T]) = + val paddedZip = zipAllOption(headRow, tailRow) From 5b37700616f15f19a9310b7cdfa2967088939fcb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 13:21:28 +0000 Subject: [PATCH 033/465] Bump burnett01/rsync-deployments from 7.0.0 to 7.0.1 Bumps [burnett01/rsync-deployments](https://github.com/burnett01/rsync-deployments) from 7.0.0 to 7.0.1. - [Release notes](https://github.com/burnett01/rsync-deployments/releases) - [Commits](https://github.com/burnett01/rsync-deployments/compare/7.0.0...7.0.1) --- updated-dependencies: - dependency-name: burnett01/rsync-deployments dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/spec.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/spec.yml b/.github/workflows/spec.yml index 94b99e81e044..a639c80bbda9 100644 --- a/.github/workflows/spec.yml +++ b/.github/workflows/spec.yml @@ -45,7 +45,7 @@ jobs: env: USER_FOR_TEST: ${{ secrets.SPEC_DEPLOY_USER }} if: ${{ env.USER_FOR_TEST != '' }} - uses: burnett01/rsync-deployments@7.0.0 + uses: burnett01/rsync-deployments@7.0.1 with: switches: -rzv path: docs/_spec/_site/ From 7dc40716b6c62cc10bfbab76cec6a72cbdccbfb2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 13:21:33 +0000 Subject: [PATCH 034/465] Bump VirtusLab/scala-cli-setup from 1.2.0 to 1.2.1 Bumps [VirtusLab/scala-cli-setup](https://github.com/virtuslab/scala-cli-setup) from 1.2.0 to 1.2.1. - [Release notes](https://github.com/virtuslab/scala-cli-setup/releases) - [Commits](https://github.com/virtuslab/scala-cli-setup/compare/v1.2.0...v1.2.1) --- updated-dependencies: - dependency-name: VirtusLab/scala-cli-setup dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 7e8564a634c4..7136d6bb2f7d 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.2.0 + - uses: VirtusLab/scala-cli-setup@v1.2.1 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From 87c2b14c75bfe856bad024bcca007587e4dec4b6 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 19 Mar 2024 18:50:20 +0100 Subject: [PATCH 035/465] Add support for `var` in refinements Co-authored-by: Anna Herlihy Co-authored-by: Nicolas Stucki --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 17 ++++++++++++++++- .../src/dotty/tools/dotc/parsing/Parsers.scala | 6 ++---- compiler/src/dotty/tools/dotc/typer/Typer.scala | 1 - docs/_docs/internals/syntax.md | 1 + docs/_docs/reference/syntax.md | 1 + tests/neg/i13703.check | 9 ++------- tests/neg/i13703.scala | 2 -- tests/neg/i19809.check | 10 ++++++++++ tests/neg/i19809.scala | 4 ++++ tests/neg/i4496b.scala | 2 +- tests/neg/illegal-refinements.scala | 3 +-- tests/neg/structural.scala | 2 +- tests/pos/i19809.scala | 7 +++++++ 13 files changed, 46 insertions(+), 19 deletions(-) create mode 100644 tests/neg/i19809.check create mode 100644 tests/neg/i19809.scala create mode 100644 tests/pos/i19809.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 2d99cf201375..9c8b2566ed7e 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1943,12 +1943,27 @@ object desugar { case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) case _ => defn.AnyType :: Nil } + + val refinements1 = Trees.flatten: + refinements.mapConserve { + case tree: ValDef if tree.mods.is(Mutable) => + val getter = + cpy.DefDef(tree)(name = tree.name, paramss = Nil, tpt = tree.tpt, rhs = tree.rhs) + .withFlags(tree.mods.flags & (AccessFlags | Synthetic)) + val setterParam = makeSyntheticParameter(tpt = tree.tpt) + val setter = + cpy.DefDef(tree)(name = tree.name.setterName, paramss = List(List(setterParam)), tpt = untpd.scalaUnit, rhs = EmptyTree) + .withFlags(tree.mods.flags & (AccessFlags | Synthetic)) + Thicket(getter, setter) + case tree => tree + } + val parentCores = stripToCore(parent.tpe) val untpdParent = TypedSplice(parent) val (classParents, self) = if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef) else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) - val impl = Template(emptyConstructor, classParents, Nil, self, refinements) + val impl = Template(emptyConstructor, classParents, Nil, self, refinements1) TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) } diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index b1c71f34e2dc..5fdd01a61079 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -4380,6 +4380,7 @@ object Parsers { /** RefineStatSeq ::= RefineStat {semi RefineStat} * RefineStat ::= ‘val’ VarDef + * | ‘var’ VarDef * | ‘def’ DefDef * | ‘type’ {nl} TypeDef * (in reality we admit class defs and vars and filter them out afterwards in `checkLegal`) @@ -4392,10 +4393,7 @@ object Parsers { syntaxError(msg, tree.span) Nil tree match - case tree: ValDef if tree.mods.is(Mutable) => - fail(em"""refinement cannot be a mutable var. - |You can use an explicit getter ${tree.name} and setter ${tree.name}_= instead""") - case tree: MemberDef if !(tree.mods.flags & ModifierFlags).isEmpty => + case tree: MemberDef if !(tree.mods.flags & (ModifierFlags &~ Mutable)).isEmpty => fail(em"refinement cannot be ${(tree.mods.flags & ModifierFlags).flagStrings().mkString("`", "`, `", "`")}") case tree: DefDef if tree.termParamss.nestedExists(!_.rhs.isEmpty) => fail(em"refinement cannot have default arguments") diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ade395f835ca..2c6c2f19a78a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2211,7 +2211,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val refineCls = createSymbol(refineClsDef).asClass val TypeDef(_, impl: Template) = typed(refineClsDef): @unchecked val refinements1 = impl.body - assert(tree.refinements.hasSameLengthAs(refinements1), i"${tree.refinements}%, % > $refinements1%, %") val seen = mutable.Set[Symbol]() for (refinement <- refinements1) { // TODO: get clarity whether we want to enforce these conditions typr.println(s"adding refinement $refinement") diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 10f068e53c7f..4207a13ea66d 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -431,6 +431,7 @@ EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ### Definitions ```ebnf RefineDcl ::= ‘val’ ValDcl + | ‘var’ ValDcl | ‘def’ DefDcl | ‘type’ {nl} TypeDef ValDcl ::= ids ‘:’ Type diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index 1980bc4e0ab2..f8e7ba6a5cbc 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -415,6 +415,7 @@ EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ``` RefineDcl ::= ‘val’ ValDcl | ‘def’ DefDcl + | ‘var’ ValDcl | ‘type’ {nl} TypeDef ValDcl ::= ids ‘:’ Type DefDcl ::= DefSig ‘:’ Type diff --git a/tests/neg/i13703.check b/tests/neg/i13703.check index eb782c982295..a02bbdf407f7 100644 --- a/tests/neg/i13703.check +++ b/tests/neg/i13703.check @@ -1,10 +1,5 @@ --- Error: tests/neg/i13703.scala:3:17 ---------------------------------------------------------------------------------- -3 |val f: Foo { var i: Int } = new Foo { var i: Int = 0 } // error - | ^^^^^^^^^^ - | refinement cannot be a mutable var. - | You can use an explicit getter i and setter i_= instead --- [E007] Type Mismatch Error: tests/neg/i13703.scala:5:78 ------------------------------------------------------------- -5 |val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // error +-- [E007] Type Mismatch Error: tests/neg/i13703.scala:3:78 ------------------------------------------------------------- +3 |val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // error | ^ | Found: Object with Foo {...} | Required: Foo{val i: Int; def i_=(x: Int): Unit} diff --git a/tests/neg/i13703.scala b/tests/neg/i13703.scala index e8e54db8807d..6616b4f2e11c 100644 --- a/tests/neg/i13703.scala +++ b/tests/neg/i13703.scala @@ -1,7 +1,5 @@ trait Foo extends reflect.Selectable -val f: Foo { var i: Int } = new Foo { var i: Int = 0 } // error - val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // error val f3: Foo { def i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // OK diff --git a/tests/neg/i19809.check b/tests/neg/i19809.check new file mode 100644 index 000000000000..269eacd18fd2 --- /dev/null +++ b/tests/neg/i19809.check @@ -0,0 +1,10 @@ +-- [E120] Naming Error: tests/neg/i19809.scala:3:6 --------------------------------------------------------------------- +3 | def x_=(x: Int): Unit // error + | ^ + | Double definition: + | def x_=(x$1: Int): Unit in trait at line 2 and + | def x_=(x: Int): Unit in trait at line 3 + | have the same type after erasure. + | + | Consider adding a @targetName annotation to one of the conflicting definitions + | for disambiguation. diff --git a/tests/neg/i19809.scala b/tests/neg/i19809.scala new file mode 100644 index 000000000000..02eb9b08faf5 --- /dev/null +++ b/tests/neg/i19809.scala @@ -0,0 +1,4 @@ +type A = Any { + var x : Int + def x_=(x: Int): Unit // error +} diff --git a/tests/neg/i4496b.scala b/tests/neg/i4496b.scala index e84c29fd9347..b19d4915474f 100644 --- a/tests/neg/i4496b.scala +++ b/tests/neg/i4496b.scala @@ -5,7 +5,7 @@ trait Foo2 { def a: Int } trait Foo3 { var a: Int } object TestStructuralVar { - type T0 = {var a: Int} // error + type T0 = {var a: Int} object TestStructuralVar { type T = {val a: Int; def a_=(x: Int): Unit} def upcast1(v: Foo1): T = v // error diff --git a/tests/neg/illegal-refinements.scala b/tests/neg/illegal-refinements.scala index 4a170bc345d8..374d3dca7a84 100644 --- a/tests/neg/illegal-refinements.scala +++ b/tests/neg/illegal-refinements.scala @@ -2,6 +2,5 @@ trait x0 { type T = String { val x: Int = 1 } // error: illegal refinement type U = String { def x(): Int = 1 } // error: illegal refinement - type V = String { var x: Int } // error: illegal refinement - + type V = String { var x: Int = 1 } // error: illegal refinement } diff --git a/tests/neg/structural.scala b/tests/neg/structural.scala index de70092c0396..e8fad254a801 100644 --- a/tests/neg/structural.scala +++ b/tests/neg/structural.scala @@ -11,7 +11,7 @@ object Test3 { type A = { def foo(x: Int): Unit; def foo(x: String): Unit } // error: overloaded definition // error: overloaded definition type B = { val foo: Int; def foo: Int } // error: duplicate foo - type C = { var foo: Int } // error: refinements cannot have vars + type C = { var foo: Int } trait Entry { type Key; val key: Key } type D = { def foo(e: Entry, k: e.Key): Unit } diff --git a/tests/pos/i19809.scala b/tests/pos/i19809.scala new file mode 100644 index 000000000000..4c1b55d01eeb --- /dev/null +++ b/tests/pos/i19809.scala @@ -0,0 +1,7 @@ +type A = Any { var x: Int } + +val f: Any { var i: Int } = new AnyRef { var i: Int = 0 } + +def Test = + summon[Any { def x: Int; def x_=(x: Int): Unit } <:< Any { var x: Int }] + summon[Any { var x: Int } <:< Any { def x: Int; def x_=(x: Int): Unit }] From 70afe4737a93ca36cbe35443e0c2c7cedf9a6e33 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Thu, 15 Feb 2024 11:32:25 +0100 Subject: [PATCH 036/465] Add Dockerfile from lampepfl/dotty-ci --- .github/Dockerfile | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .github/Dockerfile diff --git a/.github/Dockerfile b/.github/Dockerfile new file mode 100644 index 000000000000..d56ec6a59f2d --- /dev/null +++ b/.github/Dockerfile @@ -0,0 +1,25 @@ +FROM ubuntu:22.04 + +# The default locale is "POSIX" which is just ASCII. +ENV LANG C.UTF-8 +ENV DEBIAN_FRONTEND noninteractive +ENV TZ Europe/Zurich + +# Add packages to image, set default JDK version +RUN apt-get update && \ + apt-get upgrade -y && \ + # Use a PPA to get Java 17 + apt-get install -y software-properties-common && add-apt-repository ppa:openjdk-r/ppa && \ + apt-get install -y bash curl git ssh htop nano vim-tiny zile \ + openjdk-8-jdk-headless \ + openjdk-17-jdk-headless \ + openjdk-21-jdk-headless && \ + (curl -fsSL https://deb.nodesource.com/setup_18.x | bash -) && \ + apt-get install -y nodejs + + +# Install sbt +ENV SBT_HOME /usr/local/sbt +ENV PATH ${SBT_HOME}/bin:${PATH} +ENV SBT_VERSION 1.9.0 +RUN curl -sL "https://github.com/sbt/sbt/releases/download/v$SBT_VERSION/sbt-$SBT_VERSION.tgz" | gunzip | tar -x -C /usr/local \ No newline at end of file From 9a5b9b4598c567c60d6d339ae28bb7fbff4080db Mon Sep 17 00:00:00 2001 From: Hamza Remmal <56235032+hamzaremmal@users.noreply.github.com> Date: Wed, 3 Apr 2024 07:35:10 +0000 Subject: [PATCH 037/465] Automatically assign dependabot PRs (#20069) Automatically assign all the dependabot PRs to me and ask my reviews. [skip ci] --- .github/dependabot.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 99602049f5ec..f9cb18a0ad00 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,7 +1,10 @@ version: 2 updates: - - - package-ecosystem: "github-actions" + - package-ecosystem: github-actions directory: "/" schedule: - interval: "weekly" + interval: weekly + assignees: + - hamzaremmal + reviewers: + - hamzaremmal From 90c3fbde37a70fdd366ed25d2082e05ddf24ac3e Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 3 Apr 2024 13:29:38 +0200 Subject: [PATCH 038/465] Normalize types before collecting parts determining implicit scope This is necessary to ensure the implicit scope is consistent when involving match types, since they may or may not have been reduced before implicit search. We can for example get different results when loading from tasty than when in the same run. Fixes #20071 --- .../dotty/tools/dotc/typer/Implicits.scala | 6 ++-- tests/neg/i20071.scala | 28 +++++++++++++++++++ tests/pos/i15183/test_2.scala | 4 +++ 3 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 tests/neg/i20071.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5162b3fed1b9..949e791d0496 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -636,7 +636,7 @@ trait ImplicitRunInfo: else if implicitScopeCache.contains(t) then parts += t else partSeen += t - t.dealias match + t.dealias.normalized match case t: TypeRef => if isAnchor(t.symbol) then parts += t @@ -663,7 +663,6 @@ trait ImplicitRunInfo: traverseChildren(t) case t => traverseChildren(t) - traverse(t.normalized) catch case ex: Throwable => handleRecursive("collectParts of", t.show, ex) def apply(tp: Type): collection.Set[Type] = @@ -775,6 +774,7 @@ trait ImplicitRunInfo: * if `T` is of the form `(P#x).type`, the anchors of `P`. * - If `T` is the this-type of a static object, the anchors of a term reference to that object. * - If `T` is some other this-type `P.this.type`, the anchors of `P`. + * - If `T` is match type or an applied match alias, the anchors of the normalization of `T`. * - If `T` is some other type, the union of the anchors of each constituent type of `T`. * * The _implicit scope_ of a type `tp` is the smallest set S of term references (i.e. TermRefs) @@ -787,7 +787,7 @@ trait ImplicitRunInfo: * - If `T` is a reference to an opaque type alias named `A`, S includes * a reference to an object `A` defined in the same scope as the type, if it exists, * as well as the implicit scope of `T`'s underlying type or bounds. - * - If `T` is a reference to an an abstract type or match type alias named `A`, + * - If `T` is a reference to an an abstract type or unreducible match type alias named `A`, * S includes a reference to an object `A` defined in the same scope as the type, * if it exists, as well as the implicit scopes of `T`'s lower and upper bound, * if present. diff --git a/tests/neg/i20071.scala b/tests/neg/i20071.scala new file mode 100644 index 000000000000..2d3dd5fe17d1 --- /dev/null +++ b/tests/neg/i20071.scala @@ -0,0 +1,28 @@ + +trait Scope +object Scope: + given i: Int = ??? + +type ReferencesScope[S] >: Int <: Int + +type ScopeToInt[Why] = Why match + case Scope => Int + +def foo[T](using d: ReferencesScope[T]): Any = ??? + +def bar[T](using d: ScopeToInt[T]): Any = ??? + +def test: Unit = + foo[Scope] // ok + bar[Scope] // error + + import Scope.i + bar[Scope] // ok + + /* + Before the changes: + `ScopeToInt[Scope]` may or may not be reduced before implicit search, + thereby impacting the scope considered for the search. `Scope.i` is included + iff `Scope` still appears in the type, which is the case only before reduction. + In contrast, `ReferencesScope[Scope]` is ok since it will never lose the anchor. + */ diff --git a/tests/pos/i15183/test_2.scala b/tests/pos/i15183/test_2.scala index 2069d5637734..eeb3848449be 100644 --- a/tests/pos/i15183/test_2.scala +++ b/tests/pos/i15183/test_2.scala @@ -1,4 +1,8 @@ // Fails in each cases below +import Decoder.{derived as _, given} +// NOTE Decoder.derived is already in the implicit scope +// but the others require an import as they depend on match type reduction + enum Env derives Decoder: case Local,Sit,Prod From 205272ce6b47e890a6f19639847bd1510a130871 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 3 Apr 2024 20:39:07 +0200 Subject: [PATCH 039/465] Refine overloading and implicit disambiguation We sometimes have two alternatives a.m and b.m with the same symbol but different prefixes. Previously these would always be ambiguous. We now try to disambiguate this so that the alternative with the more specific prefix wins. To determine this, we widen prefixes also going from module classes to their parents and then compare the resulting types. This might fix a problem in ScalaTest that popped up after #20054. --- .../dotty/tools/dotc/typer/Applications.scala | 32 ++++++++++++++++++- .../pos/implicit-prefix-disambiguation.scala | 14 ++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 tests/pos/implicit-prefix-disambiguation.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 82f4c89ae203..cb119b92431b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1807,8 +1807,38 @@ trait Applications extends Compatibility { else tp } + def widenPrefix(alt: TermRef): Type = alt.prefix.widen match + case pre: (TypeRef | ThisType) if pre.typeSymbol.is(Module) => + pre.parents.reduceLeft(TypeComparer.andType(_, _)) + case wpre => wpre + + /** If two alternatives have the same symbol, we pick the one with the most + * specific prefix. To determine that, we widen the prefix types and also + * widen module classes to the intersection of their parent classes. Then + * if one of the resulting types is a more specific value type than the other, + * it wins. Example: + * + * trait A { given M = ... } + * trait B extends A + * object a extends A + * object b extends B + * + * In this case `b.M` would be regarded as more specific than `a.M`. + */ + def comparePrefixes(pre1: Type, pre2: Type) = + val winsPrefix1 = isAsSpecificValueType(pre1, pre2) + val winsPrefix2 = isAsSpecificValueType(pre2, pre1) + if winsPrefix1 == winsPrefix2 then 0 + else if winsPrefix1 then 1 + else -1 + def compareWithTypes(tp1: Type, tp2: Type) = { - val ownerScore = compareOwner(alt1.symbol.maybeOwner, alt2.symbol.maybeOwner) + val ownerScore = + val sym1 = alt1.symbol + val sym2 = alt2.symbol + if sym1 == sym2 then comparePrefixes(widenPrefix(alt1), widenPrefix(alt2)) + else compareOwner(sym1.maybeOwner, sym2.maybeOwner) + def winsType1 = isAsSpecific(alt1, tp1, alt2, tp2) def winsType2 = isAsSpecific(alt2, tp2, alt1, tp1) diff --git a/tests/pos/implicit-prefix-disambiguation.scala b/tests/pos/implicit-prefix-disambiguation.scala new file mode 100644 index 000000000000..5059aa2db4eb --- /dev/null +++ b/tests/pos/implicit-prefix-disambiguation.scala @@ -0,0 +1,14 @@ +class I[X] + +trait A: + given I[B] = ??? +object A extends A + +trait B extends A +object B extends B + +//import B.given, A.given + +def Test = summon[I[B]] + + From 2e640c2b8624f99468cda2a69a30a008aa5c1ac4 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 3 Apr 2024 20:45:43 +0200 Subject: [PATCH 040/465] Make compareOwner symmetric compareOwner did certain tests for one side but not for the other, which made its outcome dependent on the order in which alternatives were presented. --- .../dotty/tools/dotc/typer/Applications.scala | 32 +++++++++++-------- .../dotty/tools/dotc/util/Signatures.scala | 4 +-- 2 files changed, 21 insertions(+), 15 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index cb119b92431b..ceef89f8bfff 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1614,11 +1614,12 @@ trait Applications extends Compatibility { * Module classes also inherit the relationship from their companions. This means, * if no direct derivation exists between `sym1` and `sym2` also perform the following * tests: - * - If both sym1 and sym1 are module classes that have companion classes, - * and sym2 does not inherit implicit members from a base class (#), - * compare the companion classes. - * - If sym1 is a module class with a companion, and sym2 is a normal class or trait, - * compare the companion with sym2. + * - If both sym1 and sym2 are module classes that have companion classes, + * compare the companion classes. Return the result of that comparison, + * provided the module class with the larger companion class does not itself + * inherit implicit members from a base class (#), + * - If one sym is a module class with a companion, and the other is a normal class or trait, + * compare the companion with the other class or trait. * * Condition (#) is necessary to make `compareOwner(_, _) > 0` a transitive relation. * For instance: @@ -1642,17 +1643,22 @@ trait Applications extends Compatibility { * This means we get an ambiguity between `a` and `b` in all cases. */ def compareOwner(sym1: Symbol, sym2: Symbol)(using Context): Int = + def cls1 = sym1.companionClass + def cls2 = sym2.companionClass if sym1 == sym2 then 0 else if sym1.isSubClass(sym2) then 1 else if sym2.isSubClass(sym1) then -1 - else if sym1.is(Module) then - val cls1 = sym1.companionClass - if sym2.is(Module) then - if sym2.thisType.implicitMembers.forall(_.symbol.owner == sym2) then // test for (#) - compareOwner(cls1, sym2.companionClass) - else 0 - else compareOwner(cls1, sym2) - else 0 + else + if sym1.is(Module) && sym2.is(Module) then + val r = compareOwner(cls1, cls2) + if r == 0 then 0 + else + val larger = if r < 0 then sym1 else sym2 + if larger.thisType.implicitMembers.forall(_.symbol.owner == larger) then r + else 0 + else if sym1.is(Module) then compareOwner(cls1, sym2) + else if sym2.is(Module) then compareOwner(sym1, cls2) + else 0 /** Compare two alternatives of an overloaded call or an implicit search. * diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 0bd407261125..736633e0f6a7 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -495,8 +495,8 @@ object Signatures { case res => List(tpe) def isSyntheticEvidence(name: String) = - if !name.startsWith(NameKinds.ContextBoundParamName.separator) then false else - symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.is(Flags.Implicit)) + name.startsWith(NameKinds.ContextBoundParamName.separator) + && symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.is(Flags.Implicit)) def toTypeParam(tpe: PolyType): List[Param] = val evidenceParams = (tpe.paramNamess.flatten zip tpe.paramInfoss.flatten).flatMap: From 8eeee0bb4dbe156ee8b2384ab9a222aadecb9308 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 3 Apr 2024 21:45:53 +0200 Subject: [PATCH 041/465] Do prefix comparison only as a final tie breaker The alternatives with the same symbol could have nevertheless different types. We first want to disambiguate based on these types before we turn to prefixes as a final tie breaker. --- .../dotty/tools/dotc/typer/Applications.scala | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index ceef89f8bfff..10886f676732 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1839,11 +1839,7 @@ trait Applications extends Compatibility { else -1 def compareWithTypes(tp1: Type, tp2: Type) = { - val ownerScore = - val sym1 = alt1.symbol - val sym2 = alt2.symbol - if sym1 == sym2 then comparePrefixes(widenPrefix(alt1), widenPrefix(alt2)) - else compareOwner(sym1.maybeOwner, sym2.maybeOwner) + val ownerScore = compareOwner(alt1.symbol.maybeOwner, alt2.symbol.maybeOwner) def winsType1 = isAsSpecific(alt1, tp1, alt2, tp2) def winsType2 = isAsSpecific(alt2, tp2, alt1, tp1) @@ -1874,11 +1870,14 @@ trait Applications extends Compatibility { val strippedType2 = stripImplicit(fullType2) val result = compareWithTypes(strippedType1, strippedType2) - if (result != 0) result - else if (strippedType1 eq fullType1) - if (strippedType2 eq fullType2) 0 // no implicits either side: its' a draw + if result != 0 then result + else if strippedType1 eq fullType1 then + if strippedType2 eq fullType2 then + if alt1.symbol != alt2.symbol then 0 // no implicits either side: it's a draw ... + else comparePrefixes( // ... unless the symbol is the same, in which case + widenPrefix(alt1), widenPrefix(alt2)) // we compare prefixes else 1 // prefer 1st alternative with no implicits - else if (strippedType2 eq fullType2) -1 // prefer 2nd alternative with no implicits + else if strippedType2 eq fullType2 then -1 // prefer 2nd alternative with no implicits else compareWithTypes(fullType1, fullType2) // continue by comparing implicits parameters } end compare From 804294c2de2dfb1a87bed6611d350286baebdf56 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 5 Jan 2024 14:47:32 +0100 Subject: [PATCH 042/465] Inline `unapply`s in the inlining phase These currently got inlined while typing. Therefore they used to generate code that should not be pickled. --- .../src/dotty/tools/dotc/transform/Inlining.scala | 13 +++++++++---- compiler/src/dotty/tools/dotc/typer/Typer.scala | 3 --- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 907fe948ac30..94df114290e4 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -90,10 +90,6 @@ class Inlining extends MacroTransform, IdentityDenotTransformer { else super.transform(tree) case _: Typed | _: Block => super.transform(tree) - case _ if Inlines.needsInlining(tree) => - val tree1 = super.transform(tree) - if tree1.tpe.isError then tree1 - else Inlines.inlineCall(tree1) case _: PackageDef => super.transform(tree) match case tree1: PackageDef => @@ -106,6 +102,15 @@ class Inlining extends MacroTransform, IdentityDenotTransformer { case tree1 => tree1 case _ => if tree.isType then tree + else if Inlines.needsInlining(tree) then + tree match + case tree: UnApply => + val fun1 = Inlines.inlinedUnapplyFun(tree.fun) + super.transform(cpy.UnApply(tree)(fun = fun1)) + case _ => + val tree1 = super.transform(tree) + if tree1.tpe.isError then tree1 + else Inlines.inlineCall(tree1) else super.transform(tree) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 0b05bcd078ff..17329dd57f4b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1956,9 +1956,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (bounds != null) sym.info = bounds } b - case t: UnApply if t.symbol.is(Inline) => - assert(!t.symbol.is(Transparent)) - cpy.UnApply(t)(fun = Inlines.inlinedUnapplyFun(t.fun)) // TODO inline these in the inlining phase (see #19382) case t => t } } From f055ceef7c8dc96fbed17ffa69d37b2bb5846b98 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 4 Apr 2024 10:28:27 +0200 Subject: [PATCH 043/465] Elide unit binding when beta-reducing See https://github.com/scala/scala3/discussions/20082#discussioncomment-9006501 --- .../tools/dotc/transform/BetaReduce.scala | 4 +++- .../backend/jvm/InlineBytecodeTests.scala | 20 +++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index 653a5e17990f..720ed45bbe6c 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -8,6 +8,7 @@ import MegaPhase.* import Symbols.*, Contexts.*, Types.*, Decorators.* import StdNames.nme import ast.TreeTypeMap +import Constants.Constant import scala.collection.mutable.ListBuffer @@ -133,7 +134,7 @@ object BetaReduce: else if arg.tpe.dealias.isInstanceOf[ConstantType] then arg.tpe.dealias else arg.tpe.widen val binding = ValDef(newSymbol(ctx.owner, param.name, flags, tpe, coord = arg.span), arg).withSpan(arg.span) - if !(tpe.isInstanceOf[ConstantType] && isPureExpr(arg)) then + if !((tpe.isInstanceOf[ConstantType] || tpe.derivesFrom(defn.UnitClass)) && isPureExpr(arg)) then bindings += binding binding.symbol @@ -147,6 +148,7 @@ object BetaReduce: val expansion1 = new TreeMap { override def transform(tree: Tree)(using Context) = tree.tpe.widenTermRefExpr match case ConstantType(const) if isPureExpr(tree) => cpy.Literal(tree)(const) + case tpe: TypeRef if tpe.derivesFrom(defn.UnitClass) && isPureExpr(tree) => cpy.Literal(tree)(Constant(())) case _ => super.transform(tree) }.transform(expansion) diff --git a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala index 6173842e9ad1..fcbc738f2934 100644 --- a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala @@ -765,4 +765,24 @@ class InlineBytecodeTests extends DottyBytecodeTest { diffInstructions(instructions1, instructions2)) } } + + @Test def beta_reduce_elide_unit_binding = { + val source = """class Test: + | def test = ((u: Unit) => u).apply(()) + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = List(Op(RETURN)) + + assert(instructions == expected, + "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + + } + } + } From fa2f7bf1081714e3bf19129c50f2eec8e782eb1b Mon Sep 17 00:00:00 2001 From: Aviv Keller <38299977+RedYetiDev@users.noreply.github.com> Date: Thu, 4 Apr 2024 05:02:33 -0400 Subject: [PATCH 044/465] Optimize the documentation JS files (#20075) This PR optimizes the three files in the `/docs/_assets/js` directory for performance. In `toolbar.js`, jQuery selectors have been cached as variables to prevent constant calls to the DOM. In `sidebar.js`, the multiple `toggleClass(...).toggleClass(...)` have been merged into one In `api-search.js`: - Defined functions have been switched to newer ES format (`const myFunc = () => {}`) - JSON has been switched to **JS**ON (`{"key":"value"}` -> `{key:"value"}`) --- docs/_assets/js/api-search.js | 70 +++++++++++++---------------------- docs/_assets/js/sidebar.js | 2 +- docs/_assets/js/toolbar.js | 44 ++++++++++++---------- 3 files changed, 52 insertions(+), 64 deletions(-) diff --git a/docs/_assets/js/api-search.js b/docs/_assets/js/api-search.js index 4950d2067ffe..03981350e81b 100644 --- a/docs/_assets/js/api-search.js +++ b/docs/_assets/js/api-search.js @@ -28,66 +28,48 @@ * } * ``` */ -onmessage = function(e) { - var docs = e.data.docs; - var searchTerm = e.data.search; - - var regexForTerm = function(query) { - var escaped = query.replace(/([\.\*\+\?\|\(\)\[\]\\])/g, '\\$1'); +onmessage = function({ data: { docs, search } }) { + const regexForTerm = (query) => { + const escaped = query.replace(/([\.\*\+\?\|\(\)\[\]\\])/g, '\\$1'); if (query.toLowerCase() != query) { // Regexp that matches CamelCase subbits: "BiSe" is // "[a-z]*Bi[a-z]*Se" and matches "BitSet", "ABitSet", ... return new RegExp(escaped.replace(/([A-Z])/g,"[a-z]*$1")); } - else { // if query is all lower case make a normal case insensitive search - return new RegExp(escaped, "i"); - } + // if query is all lower case make a normal case insensitive search + return new RegExp(escaped, "i"); }; - var searchRegex = regexForTerm(searchTerm); + const searchRegex = regexForTerm(search); - var filterPackages = function(entity) { - switch(entity.kind) { - case "val": - case "def": - case "type": - case "package": - return false; - default: - return true; - } - }; + const filterPackages = (entity) => !["val", "def", "type", "package"].includes(entity.kind); - // look at this higher order function, such syntax: - var messageParentIfMatches = function(parent) { - return function(entity) { - var fullName = entity.path.join('.'); + const messageParentIfMatches = (parent) => (entity) => { + const fullName = entity.path.join('.'); - if (searchRegex.test(fullName)) { + if (searchRegex.test(fullName)) { + postMessage({ + type: "entityResult", + package: parent, + entity + }); + } + + entity.members.forEach((member) => { + if (searchRegex.test(member.name)) { postMessage({ - "type": "entityResult", - "package": parent, - "entity": entity + type: "memberResult", + package: parent, + parent: entity, + member }); } - - var searchChild = function(member) { - if (searchRegex.test(member.name)) { - postMessage({ - "type": "memberResult", - "package": parent, - "parent": entity, - "member": member, - }); - } - }; - entity.members.forEach(searchChild); - }; + }); }; - docs.forEach(function(pack) { + docs.forEach((pack) => { pack.members .filter(filterPackages) .forEach(messageParentIfMatches(pack)); }); -} +}; diff --git a/docs/_assets/js/sidebar.js b/docs/_assets/js/sidebar.js index aa377ed8aa0e..2832486c1d6a 100644 --- a/docs/_assets/js/sidebar.js +++ b/docs/_assets/js/sidebar.js @@ -2,5 +2,5 @@ function toggleSection(titleElement) { const title = $(titleElement); title.siblings("ul").toggleClass("toggled"); - title.children("i.fas").toggleClass("fa-angle-right").toggleClass("fa-angle-down"); + title.children("i.fas").toggleClass("fa-angle-right fa-angle-down"); } diff --git a/docs/_assets/js/toolbar.js b/docs/_assets/js/toolbar.js index be132e7db4a9..a799ca661dd9 100644 --- a/docs/_assets/js/toolbar.js +++ b/docs/_assets/js/toolbar.js @@ -1,20 +1,26 @@ -$(document).ready(function() { - $("#menu-icon").click(() => { - $(".sidebar").toggleClass("toggled"); - }) - $("#search-icon").click(() => { - $("#searchbar").toggleClass("shown"); - $("#search-api-input").focus(); - }) - const searchInput = $("#search-api-input"); - searchInput.keydown(evt => { - if (evt.which == 13) { - const baseUrl = $("#baseurl-input").val(); - window.location = ( - baseUrl + "/api/search.html?" + - "searchTerm=" + searchInput.val() + - "&previousUrl=" + encodeURI(window.location) - ); +$(function() { + const menuIcon = $("#menu-icon"); + const sidebar = $(".sidebar"); + menuIcon.on("click", () => { + sidebar.toggleClass("toggled"); + }); + + const searchIcon = $("#search-icon"); + const searchbar = $("#searchbar"); + const searchApiInput = $("#search-api-input"); + searchIcon.on("click", () => { + searchbar.toggleClass("shown"); + searchApiInput.focus(); + }); + + const baseurlInput = $("#baseurl-input"); + searchApiInput.keydown(evt => { + if (evt.which === 13) { // Enter + const baseUrl = baseurlInput.val(); + const searchTerm = searchApiInput.val(); + const previousUrl = encodeURI(window.location); + const searchUrl = `${baseUrl}/api/search.html?searchTerm=${searchTerm}&previousUrl=${previousUrl}`; + window.location = searchUrl; } - }) -}) + }); +}); From 9f5618b6ec8b8258f8a1006d6f65b895f229e3bb Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Thu, 4 Apr 2024 13:33:32 +0200 Subject: [PATCH 045/465] Set inlining phase in context used for checking macro trees --- .../src/dotty/tools/dotc/transform/TreeChecker.scala | 10 ++++++++++ tests/pos-macros/i17009/Macro_1.scala | 6 ++++++ tests/pos-macros/i17009/Main_2.scala | 6 ++++++ 3 files changed, 22 insertions(+) create mode 100644 tests/pos-macros/i17009/Macro_1.scala create mode 100644 tests/pos-macros/i17009/Main_2.scala diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 4a7548f40f43..f1f703fb07ee 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -837,9 +837,19 @@ object TreeChecker { def checkMacroGeneratedTree(original: tpd.Tree, expansion: tpd.Tree)(using Context): Unit = if ctx.settings.XcheckMacros.value then + // We want make sure that transparent inline macros are checked in the same way that + // non transparent macros are, so we try to prepare a context which would make + // the checks behave the same way for both types of macros. + // + // E.g. Different instances of skolem types are by definition not able to be a subtype of + // one another, however in practice this is only upheld during typer phase, and we do not want + // it to be upheld during this check. + // See issue: #17009 val checkingCtx = ctx .fresh .setReporter(new ThrowingReporter(ctx.reporter)) + .setPhase(ctx.base.inliningPhase) + val phases = ctx.base.allPhases.toList val treeChecker = new LocalChecker(previousPhases(phases)) diff --git a/tests/pos-macros/i17009/Macro_1.scala b/tests/pos-macros/i17009/Macro_1.scala new file mode 100644 index 000000000000..0535220420e5 --- /dev/null +++ b/tests/pos-macros/i17009/Macro_1.scala @@ -0,0 +1,6 @@ +import scala.quoted._ + +object Macro { + transparent inline def transform[T](inline expr: T): T = ${ transformImpl[T]('expr) } + def transformImpl[T: Type](f: Expr[T])(using Quotes): Expr[T] = f +} diff --git a/tests/pos-macros/i17009/Main_2.scala b/tests/pos-macros/i17009/Main_2.scala new file mode 100644 index 000000000000..a32b032e8b9d --- /dev/null +++ b/tests/pos-macros/i17009/Main_2.scala @@ -0,0 +1,6 @@ +def processLine(line: String): Unit = { + Macro.transform { + line.split(" ").nn + ??? + } +} From c22e31344d45880443c217bf2495fd59e132771a Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 17 Aug 2023 13:44:14 +0200 Subject: [PATCH 046/465] add support for pipeline build of Scala and Java files - rename '-Yjava-tasty-output' to '-Yearly-tasty-output' because now Scala TASTy will also be written to this destination. - add '-Ypickle-java' alias of '-Yjava-tasty', as expected by Zinc - add '-Ypickle-write' alias of '-Yearly-tasty-output', as expected by Zinc - move ExtractAPI phase to after Pickler, this way we can do it in parallel with generating TASTy bytes. At the end of this phase we write the TASTy to the '-Yearly-tasty-output' destination. Also ensure that ExtractAPI phase runs with '-Yjava-tasty', even if no incremental callback is set (don't extract the API in this case). - test the pipelining with sbt scripted tests, including for inline methods and macros with pipelining - describe semantics with respect to suspensions, introduce -Yno-suspended-units flag for greater control by the user. --- .../dotty/tools/dotc/CompilationUnit.scala | 15 ++-- compiler/src/dotty/tools/dotc/Compiler.scala | 2 +- .../tools/dotc/config/ScalaSettings.scala | 9 +-- .../dotty/tools/dotc/config/Settings.scala | 4 +- .../src/dotty/tools/dotc/core/Phases.scala | 20 +++++- .../dotty/tools/dotc/core/SymbolLoaders.scala | 3 +- .../dotty/tools/dotc/inlines/Inliner.scala | 3 + .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 72 +++++++++++++++++-- .../dotty/tools/dotc/transform/Pickler.scala | 54 +++----------- .../a/src/main/scala/a/A.scala | 10 +++ .../b/src/main/scala/b/B.scala | 10 +++ .../Yearly-tasty-output-inline/build.sbt | 14 ++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../Yearly-tasty-output-inline/test | 3 + .../a/src/main/scala/a/A.scala | 5 ++ .../Yearly-tasty-output/b-early-out/.keep | 0 .../b/src/main/scala/b/B.scala | 5 ++ .../pipelining/Yearly-tasty-output/build.sbt | 23 ++++++ .../c/src/main/scala/c/C.scala | 9 +++ .../project/DottyInjectedPlugin.scala | 12 ++++ sbt-test/pipelining/Yearly-tasty-output/test | 5 ++ .../Yjava-tasty-annotation/build.sbt | 2 +- .../pipelining/Yjava-tasty-enum/build.sbt | 2 +- .../Yjava-tasty-from-tasty/build.sbt | 4 +- .../Yjava-tasty-fromjavaobject/build.sbt | 4 +- .../a/src/main/scala/a/A.java | 6 ++ .../b/src/main/scala/b/B.scala | 8 ++- .../pipelining/Yjava-tasty-generic/build.sbt | 2 +- .../pipelining/Yjava-tasty-paths/build.sbt | 2 +- .../Yjava-tasty-result-types/build.sbt | 2 +- .../a/src/main/scala/a/A.scala | 8 +++ .../b/src/main/scala/b/B.scala | 10 +++ .../pipelining-scala-inline/build.sbt | 35 +++++++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining/pipelining-scala-inline/test | 1 + .../a/src/main/scala/a/A.scala | 5 ++ .../b/src/main/scala/b/B.java | 5 ++ .../pipelining-scala-java-basic/build.sbt | 17 +++++ .../c/src/main/scala/c/C.scala | 15 ++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining-scala-java-basic/test | 1 + .../a/src/main/scala/a/A.scala | 18 +++++ .../b/src/main/scala/b/B.scala | 10 +++ .../pipelining-scala-macro-fail/build.sbt | 28 ++++++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining-scala-macro-fail/test | 2 + .../a/src/main/scala/a/A.scala | 13 ++++ .../a/src/main/scala/a/AConsume.scala | 5 ++ .../main/scala/a/AConsumeTransparent.scala | 5 ++ .../b/src/main/scala/b/B.scala | 14 ++++ .../pipelining-scala-macro-force/build.sbt | 45 ++++++++++++ .../src/main/scala/macros/MacroImpl.scala | 15 ++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining-scala-macro-force/test | 1 + .../a/src/main/scala/a/A.scala | 21 ++++++ .../a/src/main/scala/a/ASuspendInlining.scala | 5 ++ .../a/src/main/scala/a/ASuspendTyper.scala | 5 ++ .../b/src/main/scala/b/B.scala | 14 ++++ .../pipelining-scala-macro/build.sbt | 56 +++++++++++++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining/pipelining-scala-macro/test | 1 + .../a/src/main/scala/a/A.scala | 5 ++ .../b/src/main/scala/b/B.scala | 12 ++++ .../pipelining-scala-only/build.sbt | 12 ++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining/pipelining-scala-only/test | 1 + 66 files changed, 700 insertions(+), 79 deletions(-) create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/test create mode 100644 sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output/b-early-out/.keep create mode 100644 sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output/build.sbt create mode 100644 sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output/test create mode 100644 sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-inline/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-inline/test create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/test create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/test create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/test create mode 100644 sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/test create mode 100644 sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-only/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-only/test diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 2358739ebd74..4ea9b558ea7f 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -97,12 +97,15 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn // when this unit is unsuspended. depRecorder.clear() if !suspended then - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspended: $this") - suspended = true - ctx.run.nn.suspendedUnits += this - if ctx.phase == Phases.inliningPhase then - suspendedAtInliningPhase = true + if ctx.settings.YnoSuspendedUnits.value then + report.error(i"Compilation unit suspended $this (-Yno-suspended-units is set)") + else + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspended: $this") + suspended = true + ctx.run.nn.suspendedUnits += this + if ctx.phase == Phases.inliningPhase then + suspendedAtInliningPhase = true throw CompilationUnit.SuspendException() private var myAssignmentSpans: Map[Int, List[Span]] | Null = null diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 290df761d117..06ef70b4cea5 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -41,13 +41,13 @@ class Compiler { List(new semanticdb.ExtractSemanticDB.ExtractSemanticInfo) :: // Extract info into .semanticdb files List(new PostTyper) :: // Additional checks and cleanups after type checking List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) - List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols Nil /** Phases dealing with TASTY tree pickling and unpickling */ protected def picklerPhases: List[List[Phase]] = List(new Pickler) :: // Generate TASTY info + List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks List(new Inlining) :: // Inline and execute macros List(new PostInlining) :: // Add mirror support for inlined code List(new CheckUnused.PostInlining) :: // Check for unused elements diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 687adfe05ca7..5ac4cf2e5829 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -31,7 +31,7 @@ object ScalaSettings extends ScalaSettings // Kept as seperate type to avoid breaking backward compatibility abstract class ScalaSettings extends SettingGroup, AllScalaSettings: - val settingsByCategory: Map[SettingCategory, List[Setting[_]]] = + val settingsByCategory: Map[SettingCategory, List[Setting[_]]] = allSettings.groupBy(_.category) .view.mapValues(_.toList).toMap .withDefaultValue(Nil) @@ -43,7 +43,7 @@ abstract class ScalaSettings extends SettingGroup, AllScalaSettings: val verboseSettings: List[Setting[_]] = settingsByCategory(VerboseSetting).sortBy(_.name) val settingsByAliases: Map[String, Setting[_]] = allSettings.flatMap(s => s.aliases.map(_ -> s)).toMap - + trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: self: SettingGroup => @@ -380,6 +380,7 @@ private sealed trait YSettings: val YprintPos: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos", "Show tree positions.") val YprintPosSyms: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos-syms", "Show symbol definitions positions.") val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") + val YnoSuspendedUnits: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-suspended-units", "Do not suspend units, e.g. when calling a macro defined in the same run. This will error instead of suspending.") val YnoPatmatOpt: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-patmat-opt", "Disable all pattern matching optimizations.") val YplainPrinter: Setting[Boolean] = BooleanSetting(ForkSetting, "Yplain-printer", "Pretty-print using a plain printer.") val YprintSyms: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") @@ -439,7 +440,7 @@ private sealed trait YSettings: val YdebugMacros: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-macros", "Show debug info when quote pattern match fails") // Pipeline compilation options - val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute pickles for .java defined symbols for use by build tools") - val YjavaTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yjava-tasty-output", "directory|jar", "(Internal use only!) destination for generated .tasty files containing Java type signatures.", NoAbstractFile) + val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Ypickle-java")) + val YearlyTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Ypickle-write")) val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.") end YSettings diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 816d85e6c6fd..87760a2a034e 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -357,8 +357,8 @@ object Settings: def MultiStringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) - def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = - publish(Setting(category, prependName(name), descr, default, helpArg)) + def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile, aliases: List[String] = Nil): Setting[AbstractFile] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = publish(Setting(category, prependName(name), descr, default, aliases = aliases)) diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index c704846a82da..59736447af3c 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -210,6 +210,7 @@ object Phases { private var myTyperPhase: Phase = uninitialized private var myPostTyperPhase: Phase = uninitialized private var mySbtExtractDependenciesPhase: Phase = uninitialized + private var mySbtExtractAPIPhase: Phase = uninitialized private var myPicklerPhase: Phase = uninitialized private var myInliningPhase: Phase = uninitialized private var myStagingPhase: Phase = uninitialized @@ -235,6 +236,7 @@ object Phases { final def typerPhase: Phase = myTyperPhase final def postTyperPhase: Phase = myPostTyperPhase final def sbtExtractDependenciesPhase: Phase = mySbtExtractDependenciesPhase + final def sbtExtractAPIPhase: Phase = mySbtExtractAPIPhase final def picklerPhase: Phase = myPicklerPhase final def inliningPhase: Phase = myInliningPhase final def stagingPhase: Phase = myStagingPhase @@ -263,6 +265,7 @@ object Phases { myTyperPhase = phaseOfClass(classOf[TyperPhase]) myPostTyperPhase = phaseOfClass(classOf[PostTyper]) mySbtExtractDependenciesPhase = phaseOfClass(classOf[sbt.ExtractDependencies]) + mySbtExtractAPIPhase = phaseOfClass(classOf[sbt.ExtractAPI]) myPicklerPhase = phaseOfClass(classOf[Pickler]) myInliningPhase = phaseOfClass(classOf[Inlining]) myStagingPhase = phaseOfClass(classOf[Staging]) @@ -336,19 +339,29 @@ object Phases { /** skip the phase for a Java compilation unit, may depend on -Yjava-tasty */ def skipIfJava(using Context): Boolean = true + final def isAfterLastJavaPhase(using Context): Boolean = + // With `-Yjava-tasty` nominally the final phase is expected be ExtractAPI, + // otherwise drop Java sources at the end of TyperPhase. + // Checks if the last Java phase is before this phase, + // which always fails if the terminal phase is before lastJavaPhase. + val lastJavaPhase = if ctx.settings.YjavaTasty.value then sbtExtractAPIPhase else typerPhase + lastJavaPhase <= this + /** @pre `isRunnable` returns true */ def run(using Context): Unit /** @pre `isRunnable` returns true */ def runOn(units: List[CompilationUnit])(using runCtx: Context): List[CompilationUnit] = val buf = List.newBuilder[CompilationUnit] - // factor out typedAsJava check when not needed - val doSkipJava = ctx.settings.YjavaTasty.value && this <= picklerPhase && skipIfJava + + // Test that we are in a state where we need to check if the phase should be skipped for a java file, + // this prevents checking the expensive `unit.typedAsJava` unnecessarily. + val doCheckJava = skipIfJava && !isAfterLastJavaPhase for unit <- units do given unitCtx: Context = runCtx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports if ctx.run.enterUnit(unit) then try - if doSkipJava && unit.typedAsJava then + if doCheckJava && unit.typedAsJava then () else run @@ -503,6 +516,7 @@ object Phases { def typerPhase(using Context): Phase = ctx.base.typerPhase def postTyperPhase(using Context): Phase = ctx.base.postTyperPhase def sbtExtractDependenciesPhase(using Context): Phase = ctx.base.sbtExtractDependenciesPhase + def sbtExtractAPIPhase(using Context): Phase = ctx.base.sbtExtractAPIPhase def picklerPhase(using Context): Phase = ctx.base.picklerPhase def inliningPhase(using Context): Phase = ctx.base.inliningPhase def stagingPhase(using Context): Phase = ctx.base.stagingPhase diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 8b5a7ddfa65c..cbdcf2d0fe43 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -456,7 +456,8 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { val tastyUUID = unpickler.unpickler.header.uuid new ClassfileTastyUUIDParser(classfile)(ctx).checkTastyUUID(tastyUUID) else - // This will be the case in any of our tests that compile with `-Youtput-only-tasty` + // This will be the case in any of our tests that compile with `-Youtput-only-tasty`, or when + // tasty file compiled by `-Yearly-tasty-output-write` comes from an early output jar. report.inform(s"No classfiles found for $tastyFile when checking TASTy UUID") private def mayLoadTreesFromTasty(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 8bd89a71fa50..1b4d985c7c4c 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -1042,6 +1042,9 @@ class Inliner(val call: tpd.Tree)(using Context): for sym <- dependencies do if ctx.compilationUnit.source.file == sym.associatedFile then report.error(em"Cannot call macro $sym defined in the same source file", call.srcPos) + else if ctx.settings.YnoSuspendedUnits.value then + val addendum = ", suspension prevented by -Yno-suspended-units" + report.error(em"Cannot call macro $sym defined in the same compilation run$addendum", call.srcPos) if (suspendable && ctx.settings.XprintSuspension.value) report.echo(i"suspension triggered by macro call to ${sym.showLocated} in ${sym.associatedFile}", call.srcPos) if suspendable then diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 079687ac3122..d43a2f22a7fb 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -19,6 +19,7 @@ import StdNames.str import NameOps.* import inlines.Inlines import transform.ValueClasses +import transform.Pickler import dotty.tools.io.{File, FileExtension, JarArchive} import util.{Property, SourceFile} import java.io.PrintWriter @@ -51,7 +52,7 @@ class ExtractAPI extends Phase { override def description: String = ExtractAPI.description override def isRunnable(using Context): Boolean = { - super.isRunnable && ctx.runZincPhases + super.isRunnable && (ctx.runZincPhases || ctx.settings.YjavaTasty.value) } // Check no needed. Does not transform trees @@ -65,16 +66,75 @@ class ExtractAPI extends Phase { // after `PostTyper` (unlike `ExtractDependencies`, the simplication to trees // done by `PostTyper` do not affect this phase because it only cares about // definitions, and `PostTyper` does not change definitions). - override def runsAfter: Set[String] = Set(transform.PostTyper.name) + override def runsAfter: Set[String] = Set(transform.Pickler.name) override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = + val doZincCallback = ctx.runZincPhases + val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YearlyTastyOutput.value match + case earlyOut if earlyOut.isDirectory && earlyOut.exists => + Some(Pickler.EarlyFileWriter(earlyOut)) + case _ => + None val nonLocalClassSymbols = new mutable.HashSet[Symbol] - val ctx0 = ctx.withProperty(NonLocalClassSymbolsInCurrentUnits, Some(nonLocalClassSymbols)) - val units0 = super.runOn(units)(using ctx0) - ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) - units0 + val units0 = + if doZincCallback then + val ctx0 = ctx.withProperty(NonLocalClassSymbolsInCurrentUnits, Some(nonLocalClassSymbols)) + super.runOn(units)(using ctx0) + else + units // still run the phase for the side effects (writing TASTy files to -Yearly-tasty-output) + sigWriter.foreach(writeSigFiles(units0, _)) + if doZincCallback then + ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) + if ctx.settings.YjavaTasty.value then + units0.filterNot(_.typedAsJava) // remove java sources, this is the terminal phase when `-Yjava-tasty` is set + else + units0 end runOn + // Why we only write to early output in the first run? + // =================================================== + // TL;DR the point of pipeline compilation is to start downstream projects early, + // so we don't want to wait for suspended units to be compiled. + // + // But why is it safe to ignore suspended units? + // If this project contains a transparent macro that is called in the same project, + // the compilation unit of that call will be suspended (if the macro implementation + // is also in this project), causing a second run. + // However before we do that run, we will have already requested sbt to begin + // early downstream compilation. This means that the suspended definitions will not + // be visible in *early* downstream compilation. + // + // However, sbt will by default prevent downstream compilation happening in this scenario, + // due to the existence of macro definitions. So we are protected from failure if user tries + // to use the suspended definitions. + // + // Additionally, it is recommended for the user to move macro implementations to another project + // if they want to force early output. In this scenario the suspensions will no longer occur, so now + // they will become visible in the early-output. + // + // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force` + // for examples of this in action. + // + // Therefore we only need to write to early output in the first run. We also provide the option + // to diagnose suspensions with the `-Yno-suspended-units` flag. + private def writeSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = { + try + for + unit <- units + (cls, pickled) <- unit.pickled + if cls.isDefinedInCurrentRun + do + val internalName = + if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn + else cls.binaryClassName + val _ = writer.writeTasty(internalName, pickled()) + finally + writer.close() + if ctx.settings.verbose.value then + report.echo("[sig files written]") + end try + } + private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit = for cls <- nonLocalClassSymbols do val sourceFile = cls.source diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index b0aed580e824..3a4212547d16 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -56,7 +56,7 @@ class Pickler extends Phase { // No need to repickle trees coming from TASTY override def isRunnable(using Context): Boolean = - super.isRunnable && (!ctx.settings.fromTasty.value || ctx.settings.YjavaTasty.value) + super.isRunnable && !ctx.settings.fromTasty.value // when `-Yjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false @@ -94,9 +94,7 @@ class Pickler extends Phase { private val executor = Executor[Array[Byte]]() - private def useExecutor(using Context) = - Pickler.ParallelPickling && !ctx.settings.YtestPickler.value && - !ctx.settings.YjavaTasty.value // disable parallel pickling when `-Yjava-tasty` is set (internal testing only) + private def useExecutor(using Context) = Pickler.ParallelPickling && !ctx.settings.YtestPickler.value private def printerContext(isOutline: Boolean)(using Context): Context = if isOutline then ctx.fresh.setPrinterFn(OutlinePrinter(_)) @@ -196,22 +194,13 @@ class Pickler extends Phase { } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YjavaTastyOutput.value match - case jar: JarArchive if jar.exists => - Some(Pickler.EarlyFileWriter(jar)) - case _ => - None - val units0 = - if ctx.settings.fromTasty.value then - // we still run the phase for the side effect of writing the pipeline tasty files - units + val result = + if useExecutor then + executor.start() + try super.runOn(units) + finally executor.close() else - if useExecutor then - executor.start() - try super.runOn(units) - finally executor.close() - else - super.runOn(units) + super.runOn(units) if ctx.settings.YtestPickler.value then val ctx2 = ctx.fresh .setSetting(ctx.settings.YreadComments, true) @@ -222,36 +211,9 @@ class Pickler extends Phase { .setReporter(new ThrowingReporter(ctx.reporter)) .addMode(Mode.ReadPositions) ) - val result = - if ctx.settings.YjavaTasty.value then - sigWriter.foreach(writeJavaSigFiles(units0, _)) - units0.filterNot(_.typedAsJava) // remove java sources, this is the terminal phase when `-Yjava-tasty` is set - else - units0 result } - private def writeJavaSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = { - var count = 0 - try - for - unit <- units if unit.typedAsJava - (cls, pickled) <- unit.pickled - if cls.isDefinedInCurrentRun - do - val binaryClassName = cls.binaryClassName - val internalName = - if (cls.is(Module)) binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn - else binaryClassName - val _ = writer.writeTasty(internalName, pickled()) - count += 1 - finally - writer.close() - if ctx.settings.verbose.value then - report.echo(s"[$count java sig files written]") - end try - } - private def testUnpickler(using Context): Unit = pickling.println(i"testing unpickler at run ${ctx.runId}") ctx.initialize() diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala b/sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..930e0ee78eb9 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala @@ -0,0 +1,10 @@ +package a + +import scala.quoted.* + +object A { + inline def power(x: Double, inline n: Int): Double = + inline if (n == 0) 1.0 + else inline if (n % 2 == 1) x * power(x, n - 1) + else power(x * x, n / 2) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7055d6d2d006 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala @@ -0,0 +1,10 @@ +package b + +import a.A + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt b/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt new file mode 100644 index 000000000000..c0c726ce6a02 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt @@ -0,0 +1,14 @@ +// defines a inline method +lazy val a = project.in(file("a")) + .settings( + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), + scalacOptions += "-Ystop-after:firstTransform", + scalacOptions += "-Ycheck:all", + ) + +// uses the inline method, this is fine as there is no macro classloader involved +lazy val b = project.in(file("b")) + .settings( + Compile / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-early.jar"), + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/test b/sbt-test/pipelining/Yearly-tasty-output-inline/test new file mode 100644 index 000000000000..9779d91ce131 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/test @@ -0,0 +1,3 @@ +> a/compile +# uses the early output jar of a +> b/run diff --git a/sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala b/sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output/b-early-out/.keep b/sbt-test/pipelining/Yearly-tasty-output/b-early-out/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..5e6fa369e309 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala @@ -0,0 +1,5 @@ +package b + +object B { + val bar: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output/build.sbt b/sbt-test/pipelining/Yearly-tasty-output/build.sbt new file mode 100644 index 000000000000..62990c616071 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/build.sbt @@ -0,0 +1,23 @@ +// early out is a jar +lazy val a = project.in(file("a")) + .settings( + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), + scalacOptions += "-Ystop-after:firstTransform", + scalacOptions += "-Ycheck:all", + ) + +// early out is a directory +lazy val b = project.in(file("b")) + .settings( + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "b-early-out").toString), + scalacOptions += "-Ystop-after:firstTransform", + scalacOptions += "-Ycheck:all", + ) + +// reads classpaths from early tasty outputs. No need for extra flags as the full tasty is available. +lazy val c = project.in(file("c")) + .settings( + Compile / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-early.jar"), + Compile / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "b-early-out"), + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala b/sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala new file mode 100644 index 000000000000..fd1876088778 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala @@ -0,0 +1,9 @@ +package c + +import a.A +import b.B + +object C { + val f: 2 = A.foo(1) + val g: 3 = B.bar(2) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output/test b/sbt-test/pipelining/Yearly-tasty-output/test new file mode 100644 index 000000000000..52d60facc75b --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/test @@ -0,0 +1,5 @@ +> a/compile +# same as a but with a directory output +> b/compile +# c uses the early output jar of a and b +> c/compile diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt b/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt index 18f6b8224968..20a13d7d4ba0 100644 --- a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-annotation-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt b/sbt-test/pipelining/Yjava-tasty-enum/build.sbt index aca2391987e9..2083003d9ebe 100644 --- a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-enum/build.sbt @@ -2,7 +2,7 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt b/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt index e4b15d3d9c7e..040c3bf6eac8 100644 --- a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt @@ -3,7 +3,7 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-pre-classes"), // send classfiles to a different directory ) @@ -17,7 +17,7 @@ lazy val a_from_tasty = project.in(file("a_from_tasty")) scalacOptions += "-from-tasty", // read the jar file tasties as the source files scalacOptions += "-Yjava-tasty", scalacOptions += "-Yallow-outline-from-tasty", // allow outline signatures to be read with -from-tasty - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a_from_tasty-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt index 6738db3016fa..9013490f1f54 100644 --- a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt @@ -2,7 +2,7 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory ) @@ -14,7 +14,7 @@ lazy val aCheck = project.in(file("a-check")) Compile / sources := (a / Compile / sources).value, // use the same sources as a compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty-2.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty-2.jar").toString), Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes-2"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java index 1fcb7e78ae3d..c6e7431f0bbe 100644 --- a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java +++ b/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java @@ -1,6 +1,8 @@ // this test ensures that it is possible to read a generic java class from TASTy. package a; +import java.lang.Object; + public abstract class A { private final int _value; @@ -11,4 +13,8 @@ protected A(final int value) { public int value() { return _value; } + + public int hash(Object any) { + return any.hashCode(); + } } diff --git a/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala index f132e012a5fc..62e58aa72f94 100644 --- a/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala +++ b/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala @@ -7,9 +7,15 @@ class B[T] { } object B { + + val someAny: Any = 23 + + val inner = (new B[Int]).inner + @main def test = { - val derived: Int = (new B[Int]).inner.value + val derived: Int = inner.value assert(derived == 23, s"actually was $derived") + assert(inner.hash(someAny) == someAny.hashCode, s"actually was ${inner.hash(someAny)}") } } diff --git a/sbt-test/pipelining/Yjava-tasty-generic/build.sbt b/sbt-test/pipelining/Yjava-tasty-generic/build.sbt index 07e2ea56fbaa..9e2796600333 100644 --- a/sbt-test/pipelining/Yjava-tasty-generic/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-generic/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-generic-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-paths/build.sbt b/sbt-test/pipelining/Yjava-tasty-paths/build.sbt index d63d1f9a3f7e..49487fccb57e 100644 --- a/sbt-test/pipelining/Yjava-tasty-paths/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-paths/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-paths-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-paths-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-paths-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt b/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt index 512344f0635b..80bcf71b3365 100644 --- a/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-result-types-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..c2dfb3e2c886 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala @@ -0,0 +1,8 @@ +package a + +object A { + inline def power(x: Double, inline n: Int): Double = + inline if (n == 0) 1.0 + else inline if (n % 2 == 1) x * power(x, n - 1) + else power(x * x, n / 2) +} diff --git a/sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7055d6d2d006 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala @@ -0,0 +1,10 @@ +package b + +import a.A + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) +} diff --git a/sbt-test/pipelining/pipelining-scala-inline/build.sbt b/sbt-test/pipelining/pipelining-scala-inline/build.sbt new file mode 100644 index 000000000000..cd2a0c4eef07 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/build.sbt @@ -0,0 +1,35 @@ +ThisBuild / usePipelining := true + +// defines a purely inline function, and we always force the early output, this should not be needed in practice +// because pure inline methods do not have a Macro flag. +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + // assert that the analysis contains the class `a.A` and that it does not have a macro. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal + val a_A = internalClasses.get("a.A") + assert(a_A.exists(cls => !cls.hasMacro), "`a.A` wasn't found, or it had a macro.") + + // returning true will force the early output ping and activate downstream pipelining, + // this is fine for inline methods, but see `sbt-test/pipelining/pipelining-scala-macro-fail` for how + // we can force a failure by returning true here. + true + } + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the purely inline function +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-inline/test b/sbt-test/pipelining/pipelining-scala-inline/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java b/sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java new file mode 100644 index 000000000000..7cac88d3cd46 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java @@ -0,0 +1,5 @@ +package b; + +public class B { + public static final String VALUE = "B"; +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/build.sbt b/sbt-test/pipelining/pipelining-scala-java-basic/build.sbt new file mode 100644 index 000000000000..2b49443ae8f0 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/build.sbt @@ -0,0 +1,17 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +lazy val b = project.in(file("b")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +lazy val c = project.in(file("c")) + .dependsOn(a, b) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala b/sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala new file mode 100644 index 000000000000..b8e23e0b5920 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala @@ -0,0 +1,15 @@ +package c + +import a.A +import b.B + +object C { + val c_1: 2 = A.foo(1) + val c_2: "B" = B.VALUE + + @main def run = + assert(A.foo(0) == 1) + assert(A.foo(1) == 2) + assert(A.foo(2) == 3) + assert(B.VALUE == "B") +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/test b/sbt-test/pipelining/pipelining-scala-java-basic/test new file mode 100644 index 000000000000..77f2017c835f --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/test @@ -0,0 +1 @@ +> c/run diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..d98a9d2c1159 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala @@ -0,0 +1,18 @@ +package a + +import scala.quoted.* + +object A { + + inline def power(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + def impl(x: Double, n: Int): Double = + if (n == 0) 1.0 + else if (n % 2 == 1) x * impl(x, n - 1) + else impl(x * x, n / 2) + + Expr(impl(x.valueOrError, n.valueOrError)) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7055d6d2d006 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala @@ -0,0 +1,10 @@ +package b + +import a.A + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt new file mode 100644 index 000000000000..c98e664af507 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt @@ -0,0 +1,28 @@ +ThisBuild / usePipelining := true + +// defines a macro, normally this would cause sbt not to write the early output jar, but we force it +// this will cause b to fail to compile due to the missing macro class, +// see `sbt-test/pipelining/pipelining-scala-macro` for how by default sbt does the right thing +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + // force early output, this is safe in projects where the macro implementation is not in the same project, + // however in this build, b will now fail as it will not find the macro implementation class. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = true + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the macro, this will fail because we forced early output ping, causing the missing macro implementation class +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/test b/sbt-test/pipelining/pipelining-scala-macro-fail/test new file mode 100644 index 000000000000..13daffd6dfa0 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/test @@ -0,0 +1,2 @@ +> a/compile +-> b/compile diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..520aec03482a --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala @@ -0,0 +1,13 @@ +package a + +import scala.quoted.* + +object A { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ macros.MacroImpl.powerCode('x, 'n) } + + inline def power(x: Double, inline n: Int): Double = + ${ macros.MacroImpl.powerCode('x, 'n) } + +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala new file mode 100644 index 000000000000..1a4b0c234910 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala @@ -0,0 +1,5 @@ +package a + +object AConsume { + def sixtyFour: Double = A.power(2.0, 6) // cause a suspension in inlining +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala new file mode 100644 index 000000000000..cbd356047c4d --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala @@ -0,0 +1,5 @@ +package a + +object AConsumeTransparent { + def thirtyTwo: Double = A.transparentPower(2.0, 5) // cause a suspension in typer +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7955b1d7cfbb --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala @@ -0,0 +1,14 @@ +package b + +import a.A +import a.AConsumeTransparent +import a.AConsume + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) + assert(AConsumeTransparent.thirtyTwo == 32.0) // these are not actually suspended in this project + assert(AConsume.sixtyFour == 64.0) // check that suspended definition is still available +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-force/build.sbt new file mode 100644 index 000000000000..ee06080d0e76 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/build.sbt @@ -0,0 +1,45 @@ +ThisBuild / usePipelining := true + +// defines just the macro implementations +lazy val macros = project.in(file("macros")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / exportPipelining := false // downstream waits until classfiles are available + ) + +// defines a macro, we need to force sbt to produce the early output jar +// because it will detect macros in the analysis. +// However the classes for the implementation are provided by `macros` +lazy val a = project.in(file("a")) + .dependsOn(macros) + .settings( + scalacOptions += "-Ycheck:all", + scalacOptions += "-Xprint-suspension", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + // force early output, this is safe because the macro class from `macros` will be available. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal + val a_A = internalClasses.get("a.A") + val a_AConsume = internalClasses.get("a.AConsume") + val a_AConsumeTransparent = internalClasses.get("a.AConsumeTransparent") + assert(a_A.exists(cls => cls.hasMacro), s"`a.A` wasn't found, or it didn't have a macro.") + assert(a_AConsume.isDefined, s"`a.AConsume` wasn't found.") + assert(a_AConsumeTransparent.isDefined, s"`a.AConsumeTransparent` wasn't found.") + true // because `a.A` has macros, normally this would be false + } + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the macro, will still succeed as the macro implementation class is available +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala b/sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala new file mode 100644 index 000000000000..d7c03aaf0ae0 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala @@ -0,0 +1,15 @@ +package macros + +import scala.quoted.* + +object MacroImpl { + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + def impl(x: Double, n: Int): Double = + if (n == 0) 1.0 + else if (n % 2 == 1) x * impl(x, n - 1) + else impl(x * x, n / 2) + + Expr(impl(x.valueOrError, n.valueOrError)) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/test b/sbt-test/pipelining/pipelining-scala-macro-force/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..9077f0a2e849 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala @@ -0,0 +1,21 @@ +package a + +import scala.quoted.* + +object A { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + inline def power(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + def impl(x: Double, n: Int): Double = + if (n == 0) 1.0 + else if (n % 2 == 1) x * impl(x, n - 1) + else impl(x * x, n / 2) + + Expr(impl(x.valueOrError, n.valueOrError)) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala new file mode 100644 index 000000000000..0fa449601d31 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala @@ -0,0 +1,5 @@ +package a + +object ASuspendInlining { + def sixtyFour: Double = A.power(2.0, 6) // cause a suspension in inlining +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala new file mode 100644 index 000000000000..2af5139b30bc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala @@ -0,0 +1,5 @@ +package a + +object ASuspendTyper { + def thirtyTwo: Double = A.transparentPower(2.0, 5) // cause a suspension in typer +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..17f72ddf1644 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala @@ -0,0 +1,14 @@ +package b + +import a.A +import a.ASuspendTyper +import a.ASuspendInlining + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) + assert(ASuspendTyper.thirtyTwo == 32.0) // check that suspended definition is still available + assert(ASuspendInlining.sixtyFour == 64.0) // check that suspended definition is still available +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/build.sbt b/sbt-test/pipelining/pipelining-scala-macro/build.sbt new file mode 100644 index 000000000000..f8576cdae796 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/build.sbt @@ -0,0 +1,56 @@ +ThisBuild / usePipelining := true + +// defines a macro, sbt will not force the early output +// because it will detect macros in the analysis, so b will compile fine, +// see `sbt-test/pipelining/pipelining-scala-macro-fail` for how we can +// force a failure by always forcing early output. +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + scalacOptions += "-Xprint-suspension", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + @volatile var knownSuspension = false + + def didFindMacros(analysis: xsbti.compile.CompileAnalysis) = { + val foundMacros = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal.values.exists(_.hasMacro) + assert(foundMacros, "expected macros to be found in the analysis.") + foundMacros + } + + // force early output, this is safe because the macro class from `macros` will be available. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal + val a_A = internalClasses.get("a.A") + val a_ASuspendTyper = internalClasses.get("a.ASuspendTyper") + val a_ASuspendInlining = internalClasses.get("a.ASuspendInlining") + assert(a_A.isDefined, s"`a.A` wasn't found.") + + if (!knownSuspension) { + // this callback is called multiple times, so we only want to assert the first time, + // in subsequent runs the suspended definition will be "resumed", so a.ASuspendTyper be found. + knownSuspension = true + assert(a_ASuspendTyper.isEmpty, s"`a.ASuspendTyper` should have been suspended initially.") + } + + assert(a_ASuspendInlining.isDefined, s"`a.ASuspendInlining` wasn't found.") + + // do what sbt does typically, + // it will not force early output because macros are found + !didFindMacros(analysis) + } + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the macro, sbt is smart enough to not use pipelining flags when upstream compilation has macros +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/test b/sbt-test/pipelining/pipelining-scala-macro/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..971d07d5656d --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala @@ -0,0 +1,12 @@ +package b + +import a.A + +object B { + val b: 2 = A.foo(1) + + @main def run = + assert(A.foo(0) == 1) + assert(A.foo(1) == 2) + assert(A.foo(2) == 3) +} diff --git a/sbt-test/pipelining/pipelining-scala-only/build.sbt b/sbt-test/pipelining/pipelining-scala-only/build.sbt new file mode 100644 index 000000000000..16e182e48801 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/build.sbt @@ -0,0 +1,12 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-only/test b/sbt-test/pipelining/pipelining-scala-only/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/test @@ -0,0 +1 @@ +> b/run From 94162a38e2fa4d90dc848087a85164ef466940c1 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 8 Nov 2023 17:41:20 +0100 Subject: [PATCH 047/465] fix prediction in ProgressCallbackTest --- .../test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala b/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala index 489dc0f1759c..49fd3ee68d5f 100644 --- a/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala +++ b/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala @@ -97,7 +97,11 @@ final class ProgressCallbackTest extends DottyTest: locally: // (4) assert that the final progress recorded is at the target phase, // and progress is equal to the number of phases before the target. - val (befores, target +: next +: _) = runnableSubPhases.span(_ != targetPhase): @unchecked + // + // (4.1) extract the real befores by looking at the runnable phases + val (befores, target +: _) = runnableSubPhases.span(_ != targetPhase): @unchecked + // (4.2) extract the predicted next phase by looking at all phases + val (_, `target` +: next +: _) = allSubPhases.span(_ != targetPhase): @unchecked // (4.1) we expect cancellation to occur *as we enter* the target phase, // so no units should be visited in this phase. Therefore progress // should be equal to the number of phases before the target. (as we have 1 unit) From 20d635d439c7bb6ff344b7df82d505755ed8fc61 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 15 Mar 2024 14:22:17 +0100 Subject: [PATCH 048/465] support test scope by ignoring repeated pipelining flags --- .../tools/dotc/config/ScalaSettings.scala | 4 ++-- .../dotty/tools/dotc/config/Settings.scala | 24 +++++++++++++------ .../a/src/main/scala/a/A.scala | 5 ++++ .../a/src/test/scala/a/Hello.scala | 12 ++++++++++ sbt-test/pipelining/pipelining-test/build.sbt | 7 ++++++ .../project/DottyInjectedPlugin.scala | 12 ++++++++++ sbt-test/pipelining/pipelining-test/test | 12 ++++++++++ 7 files changed, 67 insertions(+), 9 deletions(-) create mode 100644 sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala create mode 100644 sbt-test/pipelining/pipelining-test/build.sbt create mode 100644 sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-test/test diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 5ac4cf2e5829..2e48ca78258f 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -440,7 +440,7 @@ private sealed trait YSettings: val YdebugMacros: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-macros", "Show debug info when quote pattern match fails") // Pipeline compilation options - val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Ypickle-java")) - val YearlyTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Ypickle-write")) + val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Ypickle-java"), preferPrevious = true) + val YearlyTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Ypickle-write"), preferPrevious = true) val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.") end YSettings diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 87760a2a034e..241ab34052a1 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -79,6 +79,7 @@ object Settings: aliases: List[String] = Nil, depends: List[(Setting[?], Any)] = Nil, ignoreInvalidArgs: Boolean = false, + preferPrevious: Boolean = false, propertyClass: Option[Class[?]] = None, deprecationMsg: Option[String] = None, // kept only for -Ykind-projector option compatibility @@ -125,11 +126,16 @@ object Settings: valueList.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") current ++ valueList else - if sstate.wasChanged(idx) then dangers :+= s"Flag $name set repeatedly" + if sstate.wasChanged(idx) then + assert(!preferPrevious, "should have shortcutted with ignoreValue, side-effect may be present!") + dangers :+= s"Flag $name set repeatedly" value ArgsSummary(updateIn(sstate, valueNew), args, errors, dangers) end update + def ignoreValue(args: List[String]): ArgsSummary = + ArgsSummary(sstate, args, errors, warnings) + def fail(msg: String, args: List[String]) = ArgsSummary(sstate, args, errors :+ msg, warnings) @@ -196,7 +202,8 @@ object Settings: def doSet(argRest: String) = ((summon[ClassTag[T]], args): @unchecked) match { case (BooleanTag, _) => - setBoolean(argRest, args) + if sstate.wasChanged(idx) && preferPrevious then ignoreValue(args) + else setBoolean(argRest, args) case (OptionTag, _) => update(Some(propertyClass.get.getConstructor().newInstance()), args) case (ct, args) => @@ -216,7 +223,10 @@ object Settings: case StringTag => setString(arg, argsLeft) case OutputTag => - setOutput(arg, argsLeft) + if sstate.wasChanged(idx) && preferPrevious then + ignoreValue(argsLeft) // do not risk side effects e.g. overwriting a jar + else + setOutput(arg, argsLeft) case IntTag => setInt(arg, argsLeft) case VersionTag => @@ -333,8 +343,8 @@ object Settings: assert(!name.startsWith("-"), s"Setting $name cannot start with -") "-" + name - def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = - publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases)) + def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil, preferPrevious: Boolean = false): Setting[Boolean] = + publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases, preferPrevious = preferPrevious)) def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) @@ -357,8 +367,8 @@ object Settings: def MultiStringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) - def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile, aliases: List[String] = Nil): Setting[AbstractFile] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) + def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile, aliases: List[String] = Nil, preferPrevious: Boolean = false): Setting[AbstractFile] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, preferPrevious = preferPrevious)) def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = publish(Setting(category, prependName(name), descr, default, aliases = aliases)) diff --git a/sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala b/sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala new file mode 100644 index 000000000000..1cfa3424bd98 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala @@ -0,0 +1,12 @@ +package a + +import a.A + +import org.junit.Test + +class Hello { + + @Test def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-test/build.sbt b/sbt-test/pipelining/pipelining-test/build.sbt new file mode 100644 index 000000000000..576ecc793ac6 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/build.sbt @@ -0,0 +1,7 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test", + ) diff --git a/sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-test/test b/sbt-test/pipelining/pipelining-test/test new file mode 100644 index 000000000000..e2b8e39082b2 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/test @@ -0,0 +1,12 @@ +# run the tests on a project with pipelining +# exercises the fact that -Ypickle-java and -Ypickle-write +# flags are set twice. +# steps: +# - Compile scope is compiled with flags `-Ypickle-java -Ypickle-write early/a-early-7423784.jar` +# - sbt copies `early/a-early-7423784.jar` to `early/a-early.jar` +# - Test scope is compiled with flags `-Ypickle-java -Ypickle-write early-test/a-early-963232.jar -Ypickle-java -Ypickle-write early/a-early.jar -classpath early/a-early.jar` +# e.g. for some reason the classpath has the same `a-early.jar` that +# is passed with `Ypickle-write`. +# Therefore we MUST avoid even reading the second `-Ypickle-write` setting, +# otherwise we will zero-out `a-early.jar`, causing type errors because its contents are blank. +> a/test From c19b67ed5322b7c40e89a7365ca854c5d22ef917 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Sat, 16 Mar 2024 01:53:40 +0100 Subject: [PATCH 049/465] add test to show incremental compilation works under pipelining --- .../pipelining/pipelining-changes/build.sbt | 27 +++++++++++++++++++ .../pipelining-changes/changes/A1.scala | 5 ++++ .../project/CompileState.scala | 4 +++ .../project/DottyInjectedPlugin.scala | 11 ++++++++ .../src/main/scala/a/A.scala | 5 ++++ .../src/main/scala/a/App.scala | 11 ++++++++ sbt-test/pipelining/pipelining-changes/test | 7 +++++ 7 files changed, 70 insertions(+) create mode 100644 sbt-test/pipelining/pipelining-changes/build.sbt create mode 100644 sbt-test/pipelining/pipelining-changes/changes/A1.scala create mode 100644 sbt-test/pipelining/pipelining-changes/project/CompileState.scala create mode 100644 sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala create mode 100644 sbt-test/pipelining/pipelining-changes/test diff --git a/sbt-test/pipelining/pipelining-changes/build.sbt b/sbt-test/pipelining/pipelining-changes/build.sbt new file mode 100644 index 000000000000..630bd4be5b3e --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/build.sbt @@ -0,0 +1,27 @@ +import sbt.internal.inc.Analysis +import complete.DefaultParsers._ + +ThisBuild / usePipelining := true + +// Reset compiler iterations, necessary because tests run in batch mode +val recordPreviousIterations = taskKey[Unit]("Record previous iterations.") +recordPreviousIterations := { + val log = streams.value.log + CompileState.previousIterations = { + val previousAnalysis = (previousCompile in Compile).value.analysis.asScala + previousAnalysis match { + case None => + log.info("No previous analysis detected") + 0 + case Some(a: Analysis) => a.compilations.allCompilations.size + } + } +} + +val checkIterations = inputKey[Unit]("Verifies the accumulated number of iterations of incremental compilation.") + +checkIterations := { + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = ((compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }) - CompileState.previousIterations + assert(expected == actual, s"Expected $expected compilations, got $actual (previous: ${CompileState.previousIterations})") +} diff --git a/sbt-test/pipelining/pipelining-changes/changes/A1.scala b/sbt-test/pipelining/pipelining-changes/changes/A1.scala new file mode 100644 index 000000000000..db5605e419d1 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/changes/A1.scala @@ -0,0 +1,5 @@ +package a + +enum A { + case A, B +} diff --git a/sbt-test/pipelining/pipelining-changes/project/CompileState.scala b/sbt-test/pipelining/pipelining-changes/project/CompileState.scala new file mode 100644 index 000000000000..078db9c7bf56 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/project/CompileState.scala @@ -0,0 +1,4 @@ +// This is necessary because tests are run in batch mode +object CompileState { + @volatile var previousIterations: Int = -1 +} diff --git a/sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..1c6c00400f04 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + ) +} diff --git a/sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4a0eec46ec7e --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +enum A { + case A +} diff --git a/sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala new file mode 100644 index 000000000000..a9862cea9dc4 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala @@ -0,0 +1,11 @@ +package a + +import scala.deriving.Mirror + +object App { + val m = summon[Mirror.SumOf[a.A]] + def size = compiletime.constValue[Tuple.Size[m.MirroredElemTypes]] + + @main def test = + assert(size == 2, s"Expected size 2, got $size") +} diff --git a/sbt-test/pipelining/pipelining-changes/test b/sbt-test/pipelining/pipelining-changes/test new file mode 100644 index 000000000000..e6fb01d57f5a --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/test @@ -0,0 +1,7 @@ +# test the interaction of incremental compilation and pipelining +> compile +> recordPreviousIterations +$ copy-file changes/A1.scala src/main/scala/a/A.scala +# A recompilation should trigger recompilation of App.scala, otherwise test assert will fail +> run +> checkIterations 2 From e6e01a3720f548ab28ea4cada4fb9322640ffb76 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Thu, 4 Apr 2024 17:23:46 +0200 Subject: [PATCH 050/465] Cleanup retains annotations in PostTyper --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- .../dotty/tools/dotc/transform/PostTyper.scala | 16 ++++++++++++++-- compiler/src/dotty/tools/dotc/typer/Typer.scala | 7 +------ tests/neg-custom-args/captures/byname.check | 2 +- 4 files changed, 17 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 9ab41859f170..b9e25a84fc38 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -454,7 +454,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case _ => false def signatureChanges = - tree.tpt.hasRememberedType && !sym.isConstructor || paramSignatureChanges + (tree.tpt.hasRememberedType || tree.tpt.isInstanceOf[InferredTypeTree]) && !sym.isConstructor || paramSignatureChanges // Replace an existing symbol info with inferred types where capture sets of // TypeParamRefs and TermParamRefs put in correspondence by BiTypeMaps with the diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 3bcec80b5b10..db451c64650a 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -19,6 +19,7 @@ import config.Feature import util.SrcPos import reporting.* import NameKinds.WildcardParamName +import cc.* object PostTyper { val name: String = "posttyper" @@ -279,6 +280,17 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if !tree.symbol.is(Package) then tree else errorTree(tree, em"${tree.symbol} cannot be used as a type") + // Cleans up retains annotations in inferred type trees. This is needed because + // during the typer, it is infeasible to correctly infer the capture sets in most + // cases, resulting ill-formed capture sets that could crash the pickler later on. + // See #20035. + private def cleanupRetainsAnnot(symbol: Symbol, tpt: Tree)(using Context): Tree = + tpt match + case tpt: InferredTypeTree if !symbol.allOverriddenSymbols.hasNext => + val tpe1 = cleanupRetains(tpt.tpe) + tpt.withType(tpe1) + case _ => tpt + override def transform(tree: Tree)(using Context): Tree = try tree match { // TODO move CaseDef case lower: keep most probable trees first for performance @@ -388,7 +400,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => registerIfHasMacroAnnotations(tree) checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) - val tree1 = cpy.ValDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) + val tree1 = cpy.ValDef(tree)(tpt = cleanupRetainsAnnot(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) if tree1.removeAttachment(desugar.UntupledParam).isDefined then checkStableSelection(tree.rhs) processValOrDefDef(super.transform(tree1)) @@ -398,7 +410,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) annotateContextResults(tree) - val tree1 = cpy.DefDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) + val tree1 = cpy.DefDef(tree)(tpt = cleanupRetainsAnnot(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) processValOrDefDef(superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef])) case tree: TypeDef => registerIfHasMacroAnnotations(tree) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 7a82ac78b75c..39afb1e73b1d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2187,12 +2187,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer errorTree(tree, em"Something's wrong: missing original symbol for type tree") } case _ => - val pt1 = cleanupRetains(pt) - // Cleans up retains annotations in inferred type trees. This is needed because - // during the typer, it is infeasible to correctly infer the capture sets in most - // cases, resulting ill-formed capture sets that could crash the pickler later on. - // See #20035. - completeTypeTree(InferredTypeTree(), pt1, tree) + completeTypeTree(InferredTypeTree(), pt, tree) def typedInLambdaTypeTree(tree: untpd.InLambdaTypeTree, pt: Type)(using Context): Tree = val tp = diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index 226bee2cd0e5..c54fe7d4208e 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -9,7 +9,7 @@ | Found: (x$0: Int) ->{cap2} Int | Required: (x$0: Int) -> Int | - | Note that the expected type Int => Int + | Note that the expected type Int -> Int | is the previously inferred result type of method test | which is also the type seen in separately compiled sources. | The new inferred type (x$0: Int) ->{cap2} Int From 52f3c08ac2e7525c758f78d6cdb10e3b216a8869 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Fri, 5 Apr 2024 00:11:35 +0200 Subject: [PATCH 051/465] Dealias types when checking contra-variant caps --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 7c75ed833945..12ec1fdafb32 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -289,7 +289,7 @@ extension (tp: Type) var ok = true def traverse(t: Type): Unit = if ok then - t match + t.dealias match case CapturingType(_, cs) if cs.isUniversal && variance <= 0 => ok = false case _ => From 8ade78ed432960dd8e43746b22f6bff8b9d65d0c Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 4 Apr 2024 23:45:44 +0200 Subject: [PATCH 052/465] Allow SAM types to contain match alias refinements Fixes #20080 --- .../src/dotty/tools/dotc/core/Types.scala | 2 +- tests/pos/i20080.scala | 32 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i20080.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 513e00347251..e793c1977e80 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5892,7 +5892,7 @@ object Types extends TypeUtils { /** Copy type aliases refinements to `toTp` from `fromTp` */ def withRefinements(toType: Type, fromTp: Type): Type = fromTp.dealias match - case RefinedType(fromParent, name, info: TypeAlias) if tp0.member(name).exists => + case RefinedType(fromParent, name, info: AliasingBounds) if tp0.member(name).exists => val parent1 = withRefinements(toType, fromParent) RefinedType(toType, name, info) case _ => toType diff --git a/tests/pos/i20080.scala b/tests/pos/i20080.scala new file mode 100644 index 000000000000..dbf6843fcbc4 --- /dev/null +++ b/tests/pos/i20080.scala @@ -0,0 +1,32 @@ + +trait Zippable[-A, -B]: + type Out + def zip(left: A, right: B): Out + +object Zippable extends ZippableLowPrio: + given append[A <: Tuple, B]: (Zippable[A, B] { type Out = Tuple.Append[A, B] }) = + (left, right) => left :* right + +trait ZippableLowPrio: + given pair[A, B]: (Zippable[A, B] { type Out = (A, B) }) = + (left, right) => (left, right) + + +object Minimization: + + trait Fun1: + type Out + def apply(x: Any): Out + + type M[X] = X match + case String => X + + def test[A] = + + val _: Fun1 { type Out = M[A] } = new Fun1: + type Out = M[A] + def apply(x: Any): Out = ??? + + val _: Fun1 { type Out = M[A] } = x => ??? + + val _: Fun1 { type Out = A match {case String => A} } = x => ??? From 613824c85eebd582c0bb579dc40b53311d3a381b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 19 May 2023 15:47:03 -0700 Subject: [PATCH 053/465] Warn if extension is hidden by member of receiver --- .../tools/dotc/core/SymDenotations.scala | 2 +- .../tools/dotc/reporting/ErrorMessageID.scala | 1 + .../dotty/tools/dotc/reporting/messages.scala | 11 ++ .../dotty/tools/dotc/typer/Applications.scala | 32 ++--- .../dotty/tools/dotc/typer/RefChecks.scala | 62 ++++++++- .../src/dotty/tools/dotc/typer/Typer.scala | 14 +-- .../tools/dotc/printing/PrintingTest.scala | 2 +- .../dotty/tools/scripting/ScriptTestEnv.scala | 4 +- .../src/tests/implicitConversions.scala | 8 +- .../src/tests/inheritedMembers1.scala | 1 + tests/warn/i16743.check | 84 +++++++++++++ tests/warn/i16743.scala | 119 ++++++++++++++++++ tests/warn/i9241.scala | 11 +- 13 files changed, 315 insertions(+), 36 deletions(-) create mode 100644 tests/warn/i16743.check create mode 100644 tests/warn/i16743.scala diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 5578028a82c2..7536e4bd76ef 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1355,7 +1355,7 @@ object SymDenotations { * @param inClass The class containing the result symbol's definition * @param site The base type from which member types are computed * - * inClass <-- find denot.symbol class C { <-- symbol is here + * inClass <-- find denot.symbol class C { <-- symbol is here } * * site: Subtype of both inClass and C */ diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 6011587a7100..33f5dcf1b1f5 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -207,6 +207,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case MatchTypeLegacyPatternID // errorNumber: 191 case UnstableInlineAccessorID // errorNumber: 192 case VolatileOnValID // errorNumber: 193 + case ExtensionNullifiedByMemberID // errorNumber: 194 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 484bc88c0983..bcdf65873008 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -2451,6 +2451,17 @@ class SynchronizedCallOnBoxedClass(stat: tpd.Tree)(using Context) |you intended.""" } +class ExtensionNullifiedByMember(method: Symbol, target: Symbol)(using Context) + extends Message(ExtensionNullifiedByMemberID): + def kind = MessageKind.PotentialIssue + def msg(using Context) = + i"""Extension method ${hl(method.name.toString)} will never be selected + |because ${hl(target.name.toString)} already has a member with the same name.""" + def explain(using Context) = + i"""An extension method can be invoked as a regular method, but if that is intended, + |it should not be defined as an extension. + |Although extensions can be overloaded, they do not overload existing member methods.""" + class TraitCompanionWithMutableStatic()(using Context) extends SyntaxMsg(TraitCompanionWithMutableStaticID) { def msg(using Context) = i"Companion of traits cannot define mutable @static fields" diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 3974dab75461..f88ce8d0a219 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -346,6 +346,22 @@ object Applications { val flags2 = sym1.flags | NonMember // ensures Select typing doesn't let TermRef#withPrefix revert the type val sym2 = sym1.copy(info = methType, flags = flags2) // symbol not entered, to avoid overload resolution problems fun.withType(sym2.termRef) + + /** Drop any leading implicit parameter sections */ + def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { + case mt: MethodType if mt.isImplicitMethod => + stripImplicit(resultTypeApprox(mt, wildcardOnly)) + case pt: PolyType => + pt.derivedLambdaType(pt.paramNames, pt.paramInfos, + stripImplicit(pt.resultType, wildcardOnly = true)) + // can't use TypeParamRefs for parameter references in `resultTypeApprox` + // since their bounds can refer to type parameters in `pt` that are not + // bound by the constraint. This can lead to hygiene violations if subsequently + // `pt` itself is added to the constraint. Test case is run/enrich-gentraversable.scala. + .asInstanceOf[PolyType].flatten + case _ => + tp + } } trait Applications extends Compatibility { @@ -1589,22 +1605,6 @@ trait Applications extends Compatibility { tp } - /** Drop any leading implicit parameter sections */ - def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { - case mt: MethodType if mt.isImplicitMethod => - stripImplicit(resultTypeApprox(mt, wildcardOnly)) - case pt: PolyType => - pt.derivedLambdaType(pt.paramNames, pt.paramInfos, - stripImplicit(pt.resultType, wildcardOnly = true)) - // can't use TypeParamRefs for parameter references in `resultTypeApprox` - // since their bounds can refer to type parameters in `pt` that are not - // bound by the constraint. This can lead to hygiene violations if subsequently - // `pt` itself is added to the constraint. Test case is run/enrich-gentraversable.scala. - .asInstanceOf[PolyType].flatten - case _ => - tp - } - /** Compare owner inheritance level. * @param sym1 The first owner * @param sym2 The second owner diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 173d5e6b1f7e..3a4c0dd24acb 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -1033,8 +1033,7 @@ object RefChecks { * surprising names at runtime. E.g. in neg/i4564a.scala, a private * case class `apply` method would have to be renamed to something else. */ - def checkNoPrivateOverrides(tree: Tree)(using Context): Unit = - val sym = tree.symbol + def checkNoPrivateOverrides(sym: Symbol)(using Context): Unit = if sym.maybeOwner.isClass && sym.is(Private) && (sym.isOneOf(MethodOrLazyOrMutable) || !sym.is(Local)) // in these cases we'll produce a getter later @@ -1100,6 +1099,55 @@ object RefChecks { end checkUnaryMethods + /** Check that an extension method is not hidden, i.e., that it is callable as an extension method. + * + * An extension method is hidden if it does not offer a parameter that is not subsumed + * by the corresponding parameter of the member with the same name (or of all alternatives of an overload). + * + * For example, it is not possible to define a type-safe extension `contains` for `Set`, + * since for any parameter type, the existing `contains` method will compile and would be used. + * + * If the member has a leading implicit parameter list, then the extension method must also have + * a leading implicit parameter list. The reason is that if the implicit arguments are inferred, + * either the member method is used or typechecking fails. If the implicit arguments are supplied + * explicitly and the member method is not applicable, the extension is checked, and its parameters + * must be implicit in order to be applicable. + * + * If the member does not have a leading implicit parameter list, then the argument cannot be explicitly + * supplied with `using`, as typechecking would fail. But the extension method may have leading implicit + * parameters, which are necessarily supplied implicitly in the application. The first non-implicit + * parameters of the extension method must be distinguishable from the member parameters, as described. + * + * If the extension method is nullary, it is always hidden by a member of the same name. + * (Either the member is nullary, or the reference is taken as the eta-expansion of the member.) + */ + def checkExtensionMethods(sym: Symbol)(using Context): Unit = if sym.is(Extension) then + extension (tp: Type) + def strippedResultType = Applications.stripImplicit(tp.stripPoly, wildcardOnly = true).resultType + def firstExplicitParamTypes = Applications.stripImplicit(tp.stripPoly, wildcardOnly = true).firstParamTypes + def hasImplicitParams = tp.stripPoly match { case mt: MethodType => mt.isImplicitMethod case _ => false } + val target = sym.info.firstExplicitParamTypes.head // required for extension method, the putative receiver + val methTp = sym.info.strippedResultType // skip leading implicits and the "receiver" parameter + def hidden = + target.nonPrivateMember(sym.name) + .filterWithPredicate: + member => + val memberIsImplicit = member.info.hasImplicitParams + val paramTps = + if memberIsImplicit then methTp.stripPoly.firstParamTypes + else methTp.firstExplicitParamTypes + + paramTps.isEmpty || memberIsImplicit && !methTp.hasImplicitParams || { + val memberParamTps = member.info.stripPoly.firstParamTypes + !memberParamTps.isEmpty + && memberParamTps.lengthCompare(paramTps) == 0 + && memberParamTps.lazyZip(paramTps).forall((m, x) => x frozen_<:< m) + } + .exists + if !target.typeSymbol.denot.isAliasType && !target.typeSymbol.denot.isOpaqueAlias && hidden + then report.warning(ExtensionNullifiedByMember(sym, target.typeSymbol), sym.srcPos) + end checkExtensionMethods + /** Verify that references in the user-defined `@implicitNotFound` message are valid. * (i.e. they refer to a type variable that really occurs in the signature of the annotated symbol.) */ @@ -1233,8 +1281,8 @@ class RefChecks extends MiniPhase { thisPhase => override def transformValDef(tree: ValDef)(using Context): ValDef = { if tree.symbol.exists then - checkNoPrivateOverrides(tree) val sym = tree.symbol + checkNoPrivateOverrides(sym) checkVolatile(sym) if (sym.exists && sym.owner.isTerm) { tree.rhs match { @@ -1246,9 +1294,11 @@ class RefChecks extends MiniPhase { thisPhase => } override def transformDefDef(tree: DefDef)(using Context): DefDef = { - checkNoPrivateOverrides(tree) - checkImplicitNotFoundAnnotation.defDef(tree.symbol.denot) - checkUnaryMethods(tree.symbol) + val sym = tree.symbol + checkNoPrivateOverrides(sym) + checkImplicitNotFoundAnnotation.defDef(sym.denot) + checkUnaryMethods(sym) + checkExtensionMethods(sym) tree } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index f28ec04be4af..fe76f408b927 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2564,17 +2564,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer vdef1.setDefTree } - def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = { - def canBeInvalidated(sym: Symbol): Boolean = + private def retractDefDef(sym: Symbol)(using Context): Tree = + // it's a discarded method (synthetic case class method or synthetic java record constructor or overridden member), drop it + val canBeInvalidated: Boolean = sym.is(Synthetic) && (desugar.isRetractableCaseClassMethodName(sym.name) || (sym.owner.is(JavaDefined) && sym.owner.derivesFrom(defn.JavaRecordClass) && sym.is(Method))) + assert(canBeInvalidated) + sym.owner.info.decls.openForMutations.unlink(sym) + EmptyTree - if !sym.info.exists then - // it's a discarded method (synthetic case class method or synthetic java record constructor or overriden member), drop it - assert(canBeInvalidated(sym)) - sym.owner.info.decls.openForMutations.unlink(sym) - return EmptyTree + def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else { // TODO: - Remove this when `scala.language.experimental.erasedDefinitions` is no longer experimental. // - Modify signature to `erased def erasedValue[T]: T` diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index 73118216d6fa..382c029c86e0 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -25,7 +25,7 @@ import java.io.File class PrintingTest { def options(phase: String, flags: List[String]) = - List(s"-Xprint:$phase", "-color:never", "-classpath", TestConfiguration.basicClasspath) ::: flags + List(s"-Xprint:$phase", "-color:never", "-nowarn", "-classpath", TestConfiguration.basicClasspath) ::: flags private def compileFile(path: JPath, phase: String): Boolean = { val baseFilePath = path.toString.stripSuffix(".scala") diff --git a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala index ebae5bfca6be..1db92d5415b4 100644 --- a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala +++ b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala @@ -217,8 +217,10 @@ object ScriptTestEnv { def toUrl: String = Paths.get(absPath).toUri.toURL.toString + // Used to be an extension on String // Treat norm paths with a leading '/' as absolute (Windows java.io.File#isAbsolute treats them as relative) - def isAbsolute = p.norm.startsWith("/") || (isWin && p.norm.secondChar == ":") + //@annotation.nowarn // hidden by Path#isAbsolute + //def isAbsolute = p.norm.startsWith("/") || (isWin && p.norm.secondChar == ":") } extension(f: File) { diff --git a/scaladoc-testcases/src/tests/implicitConversions.scala b/scaladoc-testcases/src/tests/implicitConversions.scala index 720eab1ccb1a..c3051e653663 100644 --- a/scaladoc-testcases/src/tests/implicitConversions.scala +++ b/scaladoc-testcases/src/tests/implicitConversions.scala @@ -6,7 +6,9 @@ given Conversion[A, B] with { def apply(a: A): B = ??? } -extension (a: A) def extended_bar(): String = ??? +extension (a: A) + @annotation.nowarn + def extended_bar(): String = ??? class A { implicit def conversion(c: C): D = ??? @@ -45,7 +47,7 @@ class B { class C { def extensionInCompanion: String = ??? } - +@annotation.nowarn // extensionInCompanion object C { implicit def companionConversion(c: C): B = ??? @@ -70,4 +72,4 @@ package nested { } class Z -} \ No newline at end of file +} diff --git a/scaladoc-testcases/src/tests/inheritedMembers1.scala b/scaladoc-testcases/src/tests/inheritedMembers1.scala index d8fa44607e5e..561e50ceaec2 100644 --- a/scaladoc-testcases/src/tests/inheritedMembers1.scala +++ b/scaladoc-testcases/src/tests/inheritedMembers1.scala @@ -2,6 +2,7 @@ package tests package inheritedMembers1 +/*<-*/@annotation.nowarn/*->*/ class A { def A: String diff --git a/tests/warn/i16743.check b/tests/warn/i16743.check new file mode 100644 index 000000000000..a81b322e8016 --- /dev/null +++ b/tests/warn/i16743.check @@ -0,0 +1,84 @@ +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:30:6 -------------------------------------------------------- +30 | def t = 27 // warn + | ^ + | Extension method t will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:32:6 -------------------------------------------------------- +32 | def g(x: String)(i: Int): String = x*i // warn + | ^ + | Extension method g will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:33:6 -------------------------------------------------------- +33 | def h(x: String): String = x // warn + | ^ + | Extension method h will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:35:6 -------------------------------------------------------- +35 | def j(x: Any, y: Int): String = (x.toString)*y // warn + | ^ + | Extension method j will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:36:6 -------------------------------------------------------- +36 | def k(x: String): String = x // warn + | ^ + | Extension method k will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:38:6 -------------------------------------------------------- +38 | def m(using String): String = "m" + summon[String] // warn + | ^ + | Extension method m will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:39:6 -------------------------------------------------------- +39 | def n(using String): String = "n" + summon[String] // warn + | ^ + | Extension method n will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:40:6 -------------------------------------------------------- +40 | def o: String = "42" // warn + | ^ + | Extension method o will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:41:6 -------------------------------------------------------- +41 | def u: Int = 27 // warn + | ^ + | Extension method u will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:44:6 -------------------------------------------------------- +44 | def at: Int = 42 // warn + | ^ + | Extension method at will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:46:6 -------------------------------------------------------- +46 | def x(using String)(n: Int): Int = summon[String].toInt + n // warn + | ^ + | Extension method x will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:47:6 -------------------------------------------------------- +47 | def y(using String)(s: String): String = s + summon[String] // warn + | ^ + | Extension method y will never be selected + | because T already has a member with the same name. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i16743.scala b/tests/warn/i16743.scala new file mode 100644 index 000000000000..4c9c99cf30d0 --- /dev/null +++ b/tests/warn/i16743.scala @@ -0,0 +1,119 @@ + +trait G +given G = new G { override def toString = "mygiven" } +given String = "aGivenString" + +trait T: + def t = 42 + def f(x: String): String = x*2 + def g(x: String)(y: String): String = (x+y)*2 + def h(x: Any): String = x.toString*2 + def i(x: Any, y: String): String = (x.toString+y)*2 + def j(x: Any, y: Any): String = (x.toString+y.toString) + def k(using G): String = summon[G].toString + def l(using G): String = summon[G].toString + def m: String = "mystring" + def n: Result = Result() + def o: Int = 42 + def u: Int = 42 + def u(n: Int): Int = u + n + def v(n: Int): Int = u + n + def v(s: String): String = s + u + def end: Int = 42 + def at(n: Int) = n + def w(n: Int): Int = 42 + n + def x(n: Int): Int = 42 + n + def y(n: Int): Int = u + n + def y(s: String): String = s + u + +extension (_t: T) + def t = 27 // warn + def f(i: Int): String = String.valueOf(i) + def g(x: String)(i: Int): String = x*i // warn + def h(x: String): String = x // warn + def i(x: Any, y: Int): String = (x.toString)*y + def j(x: Any, y: Int): String = (x.toString)*y // warn + def k(x: String): String = x // warn + def l(using String): String = summon[String] + def m(using String): String = "m" + summon[String] // warn + def n(using String): String = "n" + summon[String] // warn + def o: String = "42" // warn + def u: Int = 27 // warn + def v(d: Double) = 3.14 + def end(n: Int): Int = 42 + n + def at: Int = 42 // warn + def w(using String)(n: String): Int = (summon[String] + n).toInt + def x(using String)(n: Int): Int = summon[String].toInt + n // warn + def y(using String)(s: String): String = s + summon[String] // warn + +// deferred extension is defined in subclass +trait Foo: + type X + extension (x: X) def t: Int + +trait Bar extends Foo: + type X = T + extension (x: X) def t = x.t // nowarn see Quote below + +// extension on opaque type matches member of underlying type +object Dungeon: + opaque type IArray[+T] = Array[? <: T] + object IArray: + extension (arr: IArray[Byte]) def length: Int = arr.asInstanceOf[Array[Byte]].length +trait DungeonDweller: + extension (arr: Dungeon.IArray[Byte]) def length: Int = 42 // nowarn + def f[A <: Byte](x: Dungeon.IArray[A]) = x.length +trait SadDungeonDweller: + def f[A](x: Dungeon.IArray[A]) = 27 // x.length // just to confirm, length is not a member + +trait Quote: + type Tree <: AnyRef + given TreeMethods: TreeMethods + trait TreeMethods: + extension (self: Tree) + def length(): Int +class QuotesImpl extends Quote: + type Tree = String + given TreeMethods: TreeMethods with + extension (self: Tree) + def length(): Int = self.length() // nowarn Tree already has a member with the same name. + +class Result: + def apply(using String): String = s"result ${summon[String]}" + +class Depends: + type Thing = String + def thing: Thing = "" +object Depending: + extension (using depends: Depends)(x: depends.Thing) + def y = 42 + def length() = 42 // nowarn see Quote above + def f(using d: Depends) = d.thing.y + def g(using d: Depends) = d.thing.length() + +@main def test() = + val x = new T {} + println(x.f(42)) // OK! + //println(x.g("x")(42)) // NOT OK! + println(x.h("hi")) // member! + println(x.i("hi", 5)) // OK! + println(x.j("hi", 5)) // member! + println(x.k) + //println(x.k("hi")) // no, implicit is either omitted (supplied implicitly) or explicitly (using foo) + println(x.l) // usual, invokes member + println("L"+x.l(using "x")) // explicit, member doesn't check, try extension + println(x.m(using "x")) // same idea as previous, except member takes no implicits or any params + println(x.m(2)) // member checks by adapting result + println(x.n) // Result + println(x.n.apply) // apply Result with given + println(x.n(using "x")) // apply Result explicitly, not extension + println(x.end(2)) + println(x.at(2)) + println { + val p = x.at + p(2) + } + println { + given String = "42" + x.w("27") + } diff --git a/tests/warn/i9241.scala b/tests/warn/i9241.scala index ed1db2df0c8e..5b52bd8cd64d 100644 --- a/tests/warn/i9241.scala +++ b/tests/warn/i9241.scala @@ -22,22 +22,31 @@ final class Baz private (val x: Int) extends AnyVal { } extension (x: Int) + @annotation.nowarn def unary_- : Int = ??? + @annotation.nowarn def unary_+[T] : Int = ??? def unary_!() : Int = ??? // warn + @annotation.nowarn def unary_~(using Int) : Int = ??? end extension extension [T](x: Short) + @annotation.nowarn def unary_- : Int = ??? + @annotation.nowarn def unary_+[U] : Int = ??? def unary_!() : Int = ??? // warn + @annotation.nowarn def unary_~(using Int) : Int = ??? end extension extension (using Int)(x: Byte) + @annotation.nowarn def unary_- : Int = ??? + @annotation.nowarn def unary_+[U] : Int = ??? def unary_!() : Int = ??? // warn + @annotation.nowarn def unary_~(using Int) : Int = ??? -end extension \ No newline at end of file +end extension From fdb33a812fb74de1895d5b245ed543e873a39f82 Mon Sep 17 00:00:00 2001 From: som-snytt Date: Thu, 4 Apr 2024 08:31:27 -0700 Subject: [PATCH 054/465] More words in message Co-authored-by: Guillaume Martres --- .../dotty/tools/dotc/reporting/messages.scala | 2 +- tests/warn/i16743.check | 24 +++++++++---------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index bcdf65873008..63a8ed1e21fe 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -2456,7 +2456,7 @@ class ExtensionNullifiedByMember(method: Symbol, target: Symbol)(using Context) def kind = MessageKind.PotentialIssue def msg(using Context) = i"""Extension method ${hl(method.name.toString)} will never be selected - |because ${hl(target.name.toString)} already has a member with the same name.""" + |because ${hl(target.name.toString)} already has a member with the same name and compatible parameter types.""" def explain(using Context) = i"""An extension method can be invoked as a regular method, but if that is intended, |it should not be defined as an extension. diff --git a/tests/warn/i16743.check b/tests/warn/i16743.check index a81b322e8016..3010338cfb45 100644 --- a/tests/warn/i16743.check +++ b/tests/warn/i16743.check @@ -2,83 +2,83 @@ 30 | def t = 27 // warn | ^ | Extension method t will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:32:6 -------------------------------------------------------- 32 | def g(x: String)(i: Int): String = x*i // warn | ^ | Extension method g will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:33:6 -------------------------------------------------------- 33 | def h(x: String): String = x // warn | ^ | Extension method h will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:35:6 -------------------------------------------------------- 35 | def j(x: Any, y: Int): String = (x.toString)*y // warn | ^ | Extension method j will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:36:6 -------------------------------------------------------- 36 | def k(x: String): String = x // warn | ^ | Extension method k will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:38:6 -------------------------------------------------------- 38 | def m(using String): String = "m" + summon[String] // warn | ^ | Extension method m will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:39:6 -------------------------------------------------------- 39 | def n(using String): String = "n" + summon[String] // warn | ^ | Extension method n will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:40:6 -------------------------------------------------------- 40 | def o: String = "42" // warn | ^ | Extension method o will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:41:6 -------------------------------------------------------- 41 | def u: Int = 27 // warn | ^ | Extension method u will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:44:6 -------------------------------------------------------- 44 | def at: Int = 42 // warn | ^ | Extension method at will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:46:6 -------------------------------------------------------- 46 | def x(using String)(n: Int): Int = summon[String].toInt + n // warn | ^ | Extension method x will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` -- [E194] Potential Issue Warning: tests/warn/i16743.scala:47:6 -------------------------------------------------------- 47 | def y(using String)(s: String): String = s + summon[String] // warn | ^ | Extension method y will never be selected - | because T already has a member with the same name. + | because T already has a member with the same name and compatible parameter types. | | longer explanation available when compiling with `-explain` From 369ac1c5748000492acf4948271bf2d9ab7b7a33 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 5 Apr 2024 01:52:39 +0200 Subject: [PATCH 055/465] trigger GitHub actions From 0b512d681275f9ebeeed99275579664f8a9deea4 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 5 Apr 2024 09:34:59 +0200 Subject: [PATCH 056/465] Fix by-name parameter in beta-reduction Fixes #20095 --- .../dotty/tools/dotc/transform/BetaReduce.scala | 17 +++++++++++------ tests/run/i20095.check | 2 ++ tests/run/i20095.scala | 9 +++++++++ 3 files changed, 22 insertions(+), 6 deletions(-) create mode 100644 tests/run/i20095.check create mode 100644 tests/run/i20095.scala diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index 720ed45bbe6c..dd20ff9557ca 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -128,15 +128,20 @@ object BetaReduce: case ref @ TermRef(NoPrefix, _) if isPurePath(arg) => ref.symbol case _ => - val flags = Synthetic | (param.symbol.flags & Erased) - val tpe = + val isByNameArg = param.tpt.tpe.isInstanceOf[ExprType] + val flags = + if isByNameArg then Synthetic | Method | (param.symbol.flags & Erased) + else Synthetic | (param.symbol.flags & Erased) + val tpe0 = if arg.tpe.isBottomType then param.tpe.widenTermRefExpr else if arg.tpe.dealias.isInstanceOf[ConstantType] then arg.tpe.dealias else arg.tpe.widen - val binding = ValDef(newSymbol(ctx.owner, param.name, flags, tpe, coord = arg.span), arg).withSpan(arg.span) - if !((tpe.isInstanceOf[ConstantType] || tpe.derivesFrom(defn.UnitClass)) && isPureExpr(arg)) then - bindings += binding - binding.symbol + val tpe = if isByNameArg then ExprType(tpe0) else tpe0 + val bindingSymbol = newSymbol(ctx.owner, param.name, flags, tpe, coord = arg.span) + val binding = if isByNameArg then DefDef(bindingSymbol, arg) else ValDef(bindingSymbol, arg) + if isByNameArg || !((tpe.isInstanceOf[ConstantType] || tpe.derivesFrom(defn.UnitClass)) && isPureExpr(arg)) then + bindings += binding.withSpan(arg.span) + bindingSymbol val expansion = TreeTypeMap( oldOwners = ddef.symbol :: Nil, diff --git a/tests/run/i20095.check b/tests/run/i20095.check new file mode 100644 index 000000000000..0d55bed3a35c --- /dev/null +++ b/tests/run/i20095.check @@ -0,0 +1,2 @@ +foo +foo diff --git a/tests/run/i20095.scala b/tests/run/i20095.scala new file mode 100644 index 000000000000..e5761f49b539 --- /dev/null +++ b/tests/run/i20095.scala @@ -0,0 +1,9 @@ +inline def twice(inline thunk: =>Unit): Unit = + thunk + thunk + +inline def pipe(inline t: =>Unit, inline f: (=>Unit) => Unit): Unit = f(t) + +@main def Test = + pipe((), twice) + pipe(println("foo"), twice) From dbdfcffe40e9175868cfc3691133ef812a4bca8c Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 13 Mar 2024 11:25:49 +0100 Subject: [PATCH 057/465] Add message parameter to `@experimental` annotation --- .../src/dotty/tools/dotc/config/Feature.scala | 30 ++++++++++++------- .../dotty/tools/dotc/core/Annotations.scala | 26 ++++++++++++++++ .../tools/dotc/transform/AccessProxies.scala | 4 +-- .../tools/dotc/transform/PostTyper.scala | 9 +++--- .../scala/annotation/experimental.scala | 3 +- .../scala/annotation/experimental.scala | 4 +++ .../runtime/stdLibPatches/language.scala | 2 +- project/MiMaFilters.scala | 1 + tests/neg-macros/i18677-a.check | 4 +-- tests/neg-macros/i18677-b.check | 4 +-- ...perimental-message-experimental-flag.check | 10 +++++++ .../Lib_1.scala | 3 ++ .../Test_2.scala | 3 ++ tests/neg/experimental-message.check | 27 +++++++++++++++++ tests/neg/experimental-message.scala | 17 +++++++++++ tests/neg/use-experimental-def.check | 5 ++-- 16 files changed, 125 insertions(+), 27 deletions(-) rename library/{src => src-bootstrapped}/scala/annotation/experimental.scala (77%) create mode 100644 library/src-non-bootstrapped/scala/annotation/experimental.scala create mode 100644 tests/neg/experimental-message-experimental-flag.check create mode 100644 tests/neg/experimental-message-experimental-flag/Lib_1.scala create mode 100644 tests/neg/experimental-message-experimental-flag/Test_2.scala create mode 100644 tests/neg/experimental-message.check create mode 100644 tests/neg/experimental-message.scala diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 7eb95badd4d0..c90338302ce7 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -10,6 +10,7 @@ import util.{SrcPos, NoSourcePosition} import SourceVersion.* import reporting.Message import NameKinds.QualifiedName +import Annotations.ExperimentalAnnotation object Feature: @@ -131,12 +132,7 @@ object Feature: def checkExperimentalFeature(which: String, srcPos: SrcPos, note: => String = "")(using Context) = if !isExperimentalEnabled then - report.error( - em"""Experimental $which may only be used under experimental mode: - | 1. in a definition marked as @experimental, or - | 2. compiling with the -experimental compiler flag, or - | 3. with a nightly or snapshot version of the compiler.$note - """, srcPos) + report.error(experimentalUseSite(which) + note, srcPos) private def ccException(sym: Symbol)(using Context): Boolean = ccEnabled && defn.ccExperimental.contains(sym) @@ -146,12 +142,24 @@ object Feature: if sym.hasAnnotation(defn.ExperimentalAnnot) then sym else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then sym.owner else NoSymbol - if !ccException(experimentalSym) then - val note = + if !isExperimentalEnabled && !ccException(experimentalSym) then + val msg = + experimentalSym.getAnnotation(defn.ExperimentalAnnot).map { + case ExperimentalAnnotation(msg) if msg.nonEmpty => s": $msg" + case _ => "" + }.getOrElse("") + val markedExperimental = if experimentalSym.exists - then i"$experimentalSym is marked @experimental" - else i"$sym inherits @experimental" - checkExperimentalFeature("definition", srcPos, s"\n\n$note") + then i"$experimentalSym is marked @experimental$msg" + else i"$sym inherits @experimental$msg" + report.error(markedExperimental + "\n\n" + experimentalUseSite("definition"), srcPos) + + private def experimentalUseSite(which: String): String = + s"""Experimental $which may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. compiling with the -experimental compiler flag, or + | 3. with a nightly or snapshot version of the compiler. + |""".stripMargin /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ def checkExperimentalSettings(using Context): Unit = diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index 45dba97a79f7..a5ef4c26eed1 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -275,4 +275,30 @@ object Annotations { } } } + + object ExperimentalAnnotation { + + /** Create an instance of `@experimental()` */ + def apply(msg: String, span: Span)(using Context): Annotation = + Annotation(defn.ExperimentalAnnot, Literal(Constant(msg)), span) + + /** Matches and extracts the message from an instance of `@experimental(msg)` + * Returns `Some("")` for `@experimental` with no message. + */ + def unapply(a: Annotation)(using Context): Option[String] = + if a.symbol ne defn.ExperimentalAnnot then + None + else a.argumentConstant(0) match + case Some(Constant(msg: String)) => Some(msg) + case _ => Some("") + + /** Makes a copy of the `@experimental(msg)` annotation on `sym` + * None is returned if the symbol does not have an `@experimental` annotation. + */ + def copy(sym: Symbol)(using Context): Option[Annotation] = + sym.getAnnotation(defn.ExperimentalAnnot).map { + case annot @ ExperimentalAnnotation(msg) => ExperimentalAnnotation(msg, annot.tree.span) + } + } + } diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 6d445887e1d9..1f9334164496 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -12,6 +12,7 @@ import Decorators.* import Types.* import util.Spans.Span import config.Printers.transforms +import Annotations.ExperimentalAnnotation /** A utility class for generating access proxies. Currently used for * inline accessors and protected accessors. @@ -84,8 +85,7 @@ abstract class AccessProxies { val sym = newSymbol(owner, name, Synthetic | Method, info, coord = accessed.span).entered if accessed.is(Private) then sym.setFlag(Final) else if sym.allOverriddenSymbols.exists(!_.is(Deferred)) then sym.setFlag(Override) - if accessed.hasAnnotation(defn.ExperimentalAnnot) then - sym.addAnnotation(defn.ExperimentalAnnot) + ExperimentalAnnotation.copy(accessed).foreach(sym.addAnnotation) sym } diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 3bcec80b5b10..25fbae31bfe7 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -548,11 +548,10 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => def isTopLevelDefinitionInSource(sym: Symbol) = !sym.is(Package) && !sym.name.isPackageObjectName && (sym.owner.is(Package) || (sym.owner.isPackageObject && !sym.isConstructor)) - if !sym.hasAnnotation(defn.ExperimentalAnnot) - && (ctx.settings.experimental.value && isTopLevelDefinitionInSource(sym)) - || (sym.is(Module) && sym.companionClass.hasAnnotation(defn.ExperimentalAnnot)) - then - sym.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) + if sym.is(Module) then + ExperimentalAnnotation.copy(sym.companionClass).foreach(sym.addAnnotation) + if !sym.hasAnnotation(defn.ExperimentalAnnot) && ctx.settings.experimental.value && isTopLevelDefinitionInSource(sym) then + sym.addAnnotation(ExperimentalAnnotation("Added by -experimental", sym.span)) private def scala2LibPatch(tree: TypeDef)(using Context) = val sym = tree.symbol diff --git a/library/src/scala/annotation/experimental.scala b/library/src-bootstrapped/scala/annotation/experimental.scala similarity index 77% rename from library/src/scala/annotation/experimental.scala rename to library/src-bootstrapped/scala/annotation/experimental.scala index 69ab5b9c7221..185db51c07c1 100644 --- a/library/src/scala/annotation/experimental.scala +++ b/library/src-bootstrapped/scala/annotation/experimental.scala @@ -6,4 +6,5 @@ package scala.annotation * @syntax markdown */ @deprecatedInheritance("Scheduled for being final in the future", "3.4.0") -class experimental extends StaticAnnotation +class experimental(message: String) extends StaticAnnotation: + def this() = this("") diff --git a/library/src-non-bootstrapped/scala/annotation/experimental.scala b/library/src-non-bootstrapped/scala/annotation/experimental.scala new file mode 100644 index 000000000000..dbc3296aa1ab --- /dev/null +++ b/library/src-non-bootstrapped/scala/annotation/experimental.scala @@ -0,0 +1,4 @@ +package scala.annotation + +@deprecatedInheritance("Scheduled for being final in the future", "3.4.0") +class experimental extends StaticAnnotation diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 70d5f2d41907..3c9c172918d2 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -84,7 +84,7 @@ object language: object captureChecking /** Experimental support for automatic conversions of arguments, without requiring - * a langauge import `import scala.language.implicitConversions`. + * a language import `import scala.language.implicitConversions`. * * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] */ diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 7565d23b2c1b..e45e7a81904b 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -8,6 +8,7 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of the library Build.previousDottyVersion -> Seq( + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.annotation.experimental.this"), ), // Additions since last LTS diff --git a/tests/neg-macros/i18677-a.check b/tests/neg-macros/i18677-a.check index 963affc47181..d190ce36318a 100644 --- a/tests/neg-macros/i18677-a.check +++ b/tests/neg-macros/i18677-a.check @@ -7,10 +7,10 @@ |The tree does not conform to the compiler's tree invariants. | |Macro was: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental @extendFoo class AFoo() + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() | |The macro returned: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental @extendFoo class AFoo() extends Foo + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() extends Foo | |Error: |assertion failed: Parents of class symbol differs from the parents in the tree for class AFoo diff --git a/tests/neg-macros/i18677-b.check b/tests/neg-macros/i18677-b.check index b8fecde2e36d..eca2bdcde726 100644 --- a/tests/neg-macros/i18677-b.check +++ b/tests/neg-macros/i18677-b.check @@ -7,10 +7,10 @@ |The tree does not conform to the compiler's tree invariants. | |Macro was: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental @extendFoo class AFoo() + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() | |The macro returned: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental @extendFoo class AFoo() extends Foo + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() extends Foo | |Error: |assertion failed: Parents of class symbol differs from the parents in the tree for class AFoo diff --git a/tests/neg/experimental-message-experimental-flag.check b/tests/neg/experimental-message-experimental-flag.check new file mode 100644 index 000000000000..ce3a85a89916 --- /dev/null +++ b/tests/neg/experimental-message-experimental-flag.check @@ -0,0 +1,10 @@ + +-- Error: tests/neg/experimental-message-experimental-flag/Test_2.scala:3:10 ------------------------------------------- +3 |def g() = f() // error + | ^ + | method f is marked @experimental: Added by -experimental + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. compiling with the -experimental compiler flag, or + | 3. with a nightly or snapshot version of the compiler. diff --git a/tests/neg/experimental-message-experimental-flag/Lib_1.scala b/tests/neg/experimental-message-experimental-flag/Lib_1.scala new file mode 100644 index 000000000000..dc0d774092a2 --- /dev/null +++ b/tests/neg/experimental-message-experimental-flag/Lib_1.scala @@ -0,0 +1,3 @@ +//> using options -Yno-experimental -experimental + +def f() = ??? diff --git a/tests/neg/experimental-message-experimental-flag/Test_2.scala b/tests/neg/experimental-message-experimental-flag/Test_2.scala new file mode 100644 index 000000000000..3c6309486778 --- /dev/null +++ b/tests/neg/experimental-message-experimental-flag/Test_2.scala @@ -0,0 +1,3 @@ +//> using options -Yno-experimental + +def g() = f() // error diff --git a/tests/neg/experimental-message.check b/tests/neg/experimental-message.check new file mode 100644 index 000000000000..d57fe58f27cf --- /dev/null +++ b/tests/neg/experimental-message.check @@ -0,0 +1,27 @@ +-- Error: tests/neg/experimental-message.scala:15:2 -------------------------------------------------------------------- +15 | f1() // error + | ^^ + | method f1 is marked @experimental + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. compiling with the -experimental compiler flag, or + | 3. with a nightly or snapshot version of the compiler. +-- Error: tests/neg/experimental-message.scala:16:2 -------------------------------------------------------------------- +16 | f2() // error + | ^^ + | method f2 is marked @experimental + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. compiling with the -experimental compiler flag, or + | 3. with a nightly or snapshot version of the compiler. +-- Error: tests/neg/experimental-message.scala:17:2 -------------------------------------------------------------------- +17 | f3() // error + | ^^ + | method f3 is marked @experimental: not yet stable + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. compiling with the -experimental compiler flag, or + | 3. with a nightly or snapshot version of the compiler. diff --git a/tests/neg/experimental-message.scala b/tests/neg/experimental-message.scala new file mode 100644 index 000000000000..1fe9b7f0d793 --- /dev/null +++ b/tests/neg/experimental-message.scala @@ -0,0 +1,17 @@ +//> using options -Yno-experimental + +import scala.annotation.experimental + +@experimental +def f1() = ??? + +@experimental() +def f2() = ??? + +@experimental("not yet stable") +def f3() = ??? + +def g() = + f1() // error + f2() // error + f3() // error diff --git a/tests/neg/use-experimental-def.check b/tests/neg/use-experimental-def.check index 66c4a7a305b5..cb8fc1402b69 100644 --- a/tests/neg/use-experimental-def.check +++ b/tests/neg/use-experimental-def.check @@ -1,10 +1,9 @@ -- Error: tests/neg/use-experimental-def.scala:7:15 -------------------------------------------------------------------- 7 |def bar: Int = foo // error | ^^^ + | method foo is marked @experimental + | | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or | 2. compiling with the -experimental compiler flag, or | 3. with a nightly or snapshot version of the compiler. - | - | method foo is marked @experimental - | From 235c047315572b09f44cd79430215c6f817116fb Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Apr 2024 12:50:43 +0200 Subject: [PATCH 058/465] Refactor constant folding of applications Move them in typedApply/typedTypeApply instead of leaving them until adapt. This aligns these folds with folds of uniary operations, which are done already in typedSelect and avoids potentially several calls to ConstFold when arguments are passed to overloaded methods. --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 6 ++++-- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- tests/pos/constfold.scala | 8 ++++++++ 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 82f4c89ae203..8f166f329dcb 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1124,12 +1124,13 @@ trait Applications extends Compatibility { } app } - app1 match { + val app2 = app1 match { case Apply(Block(stats, fn), args) => tpd.cpy.Block(app1)(stats, tpd.cpy.Apply(app1)(fn, args)) case _ => app1 } + ConstFold(app2) } /** Typecheck an Apply node with a typed function and possibly-typed arguments coming from `proto` */ @@ -1189,7 +1190,8 @@ trait Applications extends Compatibility { case _ => tree.withType(TryDynamicCallType) } if (typedFn.tpe eq TryDynamicCallType) tryDynamicTypeApply() - else assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) + else + ConstFold(assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 0b05bcd078ff..8ddfd3a20a6e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4200,7 +4200,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def adaptToSubType(wtp: Type): Tree = // try converting a constant to the target type - ConstFold(tree).tpe.widenTermRefExpr.normalized match + tree.tpe.widenTermRefExpr.normalized match case ConstantType(x) => val converted = x.convertTo(pt) if converted != null && (converted ne x) then diff --git a/tests/pos/constfold.scala b/tests/pos/constfold.scala index a45400d7f259..5a76d414032d 100644 --- a/tests/pos/constfold.scala +++ b/tests/pos/constfold.scala @@ -15,4 +15,12 @@ object Test extends App { Console.println(A.y); Console.println(A.z); Console.println(A.s); + + def f(x: 12): Int = 1 + def f(x: Int): Double = 2 + val x = f(12) + val _: Int = x + val y = f(2 * 6) + val _: Int = x + } From 8b3dadbd5308729743c3e64738f95f3cff2edf2b Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Fri, 5 Apr 2024 15:10:01 +0200 Subject: [PATCH 059/465] Fix TermRef prefixes not having their type healed --- compiler/src/dotty/tools/dotc/staging/HealType.scala | 2 +- tests/pos-macros/i19767.scala | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 tests/pos-macros/i19767.scala diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala index 08e1e91b0bad..5a26803c8137 100644 --- a/compiler/src/dotty/tools/dotc/staging/HealType.scala +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -35,7 +35,7 @@ class HealType(pos: SrcPos)(using Context) extends TypeMap { case tp: TermRef => val inconsistentRoot = levelInconsistentRootOfPath(tp) if inconsistentRoot.exists then levelError(inconsistentRoot, tp, pos) - else tp + else mapOver(tp) case tp: AnnotatedType => derivedAnnotatedType(tp, apply(tp.parent), tp.annot) case _ => diff --git a/tests/pos-macros/i19767.scala b/tests/pos-macros/i19767.scala new file mode 100644 index 000000000000..2fb655b58a1b --- /dev/null +++ b/tests/pos-macros/i19767.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +class ICons[K <: Singleton](val key: K) + +def filterX(using Quotes): Unit = + (??? : Expr[Any]) match + case '{ $y : ICons[k1] } => '{ ICons($y.key) } \ No newline at end of file From 93674c5722e53316791dcba46b2caa69c81c83e7 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 5 Apr 2024 16:15:14 +0200 Subject: [PATCH 060/465] add regression test for issue #13021 --- tests/pos-macros/i13021/DFBits.scala | 6 ++++++ tests/pos-macros/i13021/DFToken.scala | 14 ++++++++++++++ tests/pos-macros/i13021/Width.scala | 12 ++++++++++++ 3 files changed, 32 insertions(+) create mode 100644 tests/pos-macros/i13021/DFBits.scala create mode 100644 tests/pos-macros/i13021/DFToken.scala create mode 100644 tests/pos-macros/i13021/Width.scala diff --git a/tests/pos-macros/i13021/DFBits.scala b/tests/pos-macros/i13021/DFBits.scala new file mode 100644 index 000000000000..0ab76f1687ac --- /dev/null +++ b/tests/pos-macros/i13021/DFBits.scala @@ -0,0 +1,6 @@ +object DFBits: + opaque type Token[W <: Int] <: DFToken.Of[Int] = DFToken.Of[Int] + extension [W <: Int](token: Token[W]) + def data: Int = + token.asIR + 1 diff --git a/tests/pos-macros/i13021/DFToken.scala b/tests/pos-macros/i13021/DFToken.scala new file mode 100644 index 000000000000..ce8e2f11b733 --- /dev/null +++ b/tests/pos-macros/i13021/DFToken.scala @@ -0,0 +1,14 @@ +trait Token: + val data: Any + +opaque type DFToken = Token +object DFToken: + extension (of: DFToken) def asIR: Token = ??? + + opaque type Of[D] <: DFToken = DFToken + object Of: + extension [D](token: Of[D]) def width(using w: Width[?]): Int = ??? + +def getWidth[W <: Int](token: DFBits.Token[W]): Int = token.width +def getData[W <: Int](token: DFBits.Token[W]): Int = + token.data //error here diff --git a/tests/pos-macros/i13021/Width.scala b/tests/pos-macros/i13021/Width.scala new file mode 100644 index 000000000000..a163e1b5ebf1 --- /dev/null +++ b/tests/pos-macros/i13021/Width.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +trait Width[T]: + type Out <: Int +object Width: + transparent inline given [T]: Width[T] = ${ getWidthMacro[T] } + def getWidthMacro[T](using Quotes, Type[T]): Expr[Width[T]] = + '{ + new Width[T] { + type Out = Int + } + } From 5672caa1391d0efdc4ebc9e4f2cac087a46ea8fe Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Fri, 5 Apr 2024 00:38:35 +0200 Subject: [PATCH 061/465] Type desugared `transparent inline def unapply` call in the correct mode This regressed in 5648f12fb136886da4e1f00695627cbfde90ee9e. Fixes #20107. --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 3 ++- tests/pos/i20107.scala | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i20107.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 3974dab75461..ee38bf612c2d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1429,7 +1429,8 @@ trait Applications extends Compatibility { report.error(em"Structural unapply is not supported", unapplyFn.srcPos) (unapplyFn, unapplyAppCall) case Apply(fn, `dummyArg` :: Nil) => - val inlinedUnapplyFn = Inlines.inlinedUnapplyFun(fn) + val inlinedUnapplyFn = withoutMode(Mode.Pattern): + Inlines.inlinedUnapplyFun(fn) (inlinedUnapplyFn, inlinedUnapplyFn.appliedToArgs(`dummyArg` :: Nil)) case Apply(fn, args) => val (fn1, app) = rec(fn) diff --git a/tests/pos/i20107.scala b/tests/pos/i20107.scala new file mode 100644 index 000000000000..80ce350cf29d --- /dev/null +++ b/tests/pos/i20107.scala @@ -0,0 +1,6 @@ +object foo: + transparent inline def unapply[F](e: F): Option[F] = Some(e.asInstanceOf[F]) + +class A: + def test(x: Int) = x match + case foo(e) => e From 8e119ef23ce1244dfc29249e953a90035c174574 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 5 Apr 2024 19:46:42 +0200 Subject: [PATCH 062/465] Rollback constraints if isSameType failed in second direction --- compiler/src/dotty/tools/dotc/core/TypeComparer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 94872dc1e8d2..34614d3d0acd 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2304,7 +2304,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling Stats.record("cache same type") sames = new util.EqHashMap() val res = - try isSubType(tp1, tp2) && isSubType(tp2, tp1) + try rollbackConstraintsUnless(isSubType(tp1, tp2) && isSubType(tp2, tp1)) finally sameLevel -= 1 sames = savedSames From 0ddba5d2be669a4585ca1b74452c353c584c50b2 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Fri, 5 Apr 2024 19:30:42 +0200 Subject: [PATCH 063/465] Fix match type extraction of a MatchAlias Previously this failed with: ``` 14 | val x: Base.ExtractValue[Sub[Int *: EmptyTuple]] = 1 | ^ | Found: (1 : Int) | Required: Base.ExtractValue[Sub[Int *: EmptyTuple]] | | Note: a match type could not be fully reduced: | | trying to reduce Base.ExtractValue[Sub[Int *: EmptyTuple]] | failed since selector Sub[Int *: EmptyTuple] | does not uniquely determine parameter v in | case Base.BaseOf[v] => v | The computed bounds for the parameter are: | v = Tuple.Head[Int *: EmptyTuple] ``` Because the match type logic incorrectly believed that `v` was a non-alias TypeBounds. --- compiler/src/dotty/tools/dotc/core/TypeComparer.scala | 2 +- tests/pos/match-type-extract-matchalias.scala | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 tests/pos/match-type-extract-matchalias.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 94872dc1e8d2..0b978bd468ab 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3518,7 +3518,7 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { stableScrut.member(typeMemberName) match case denot: SingleDenotation if denot.exists => val info = denot.info match - case TypeAlias(alias) => alias // Extract the alias + case alias: AliasingBounds => alias.alias // Extract the alias case ClassInfo(prefix, cls, _, _, _) => prefix.select(cls) // Re-select the class from the prefix case info => info // Notably, RealTypeBounds, which will eventually give a MatchResult.NoInstances val infoRefersToSkolem = stableScrut.isInstanceOf[SkolemType] && stableScrut.occursIn(info) diff --git a/tests/pos/match-type-extract-matchalias.scala b/tests/pos/match-type-extract-matchalias.scala new file mode 100644 index 000000000000..b15f87fb271c --- /dev/null +++ b/tests/pos/match-type-extract-matchalias.scala @@ -0,0 +1,11 @@ +trait Base: + type Value +trait Sub[T <: Tuple] extends Base: + type Value = Tuple.Head[T] +object Base: + type BaseOf[V] = Base { type Value = V } + type ExtractValue[B <: Base] = B match + case BaseOf[v] => v + +class Test: + val test: Base.ExtractValue[Sub[Int *: EmptyTuple]] = 1 From 3f68b854e990e9abdc161058558707879dbcf00c Mon Sep 17 00:00:00 2001 From: Raphael Jolly Date: Fri, 5 Apr 2024 15:34:58 +0200 Subject: [PATCH 064/465] Improve documentation of implicit conversions --- .../reference/changed-features/implicit-conversions-spec.md | 5 +++-- docs/_spec/07-implicits.md | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/_docs/reference/changed-features/implicit-conversions-spec.md b/docs/_docs/reference/changed-features/implicit-conversions-spec.md index a70321b70c15..22e423629bad 100644 --- a/docs/_docs/reference/changed-features/implicit-conversions-spec.md +++ b/docs/_docs/reference/changed-features/implicit-conversions-spec.md @@ -43,8 +43,9 @@ Views are applied in three situations: `v` which is applicable to `e` and whose result contains a method `m` which is applicable to `args` is searched. The search proceeds as in the case of implicit parameters, where the implicit scope is - the one of `T`. If such a view is found, the application - `e.m(args)` is converted to `v(e).m(args)`. + the one of `T => pt`, with `pt` being the structural type + `{ def m(args: T_1 , ... , T_n): U }`. If such a view is found, + the application `e.m(args)` is converted to `v(e).m(args)`. # Differences with Scala 2 implicit conversions diff --git a/docs/_spec/07-implicits.md b/docs/_spec/07-implicits.md index dacc0c0c277e..b5718597e5bc 100644 --- a/docs/_spec/07-implicits.md +++ b/docs/_spec/07-implicits.md @@ -63,7 +63,7 @@ The _parts_ of a type ´T´ are: - if ´T´ is a type projection `´S´#´U´`, the parts of ´S´ as well as ´T´ itself; - if ´T´ is a type alias, the parts of its expansion; - if ´T´ is an abstract type, the parts of its upper bound; -- if ´T´ denotes an implicit conversion to a type with a method with argument types ´T_1, ..., T_n´ and result type ´U´, the union of the parts of ´T_1, ..., T_n´ and ´U´; +- if ´T´ is a structural type with a method with argument types ´T_1, ..., T_n´ and result type ´U´, the union of the parts of ´T_1, ..., T_n´ and ´U´; - in all other cases, just ´T´ itself. Note that packages are internally represented as classes with companion modules to hold the package members. @@ -288,7 +288,7 @@ The search proceeds as in the case of implicit parameters, where the implicit sc If such a view is found, the selection ´e.m´ is converted to `´v´(´e´).´m´`. 1. In a selection ´e.m(\mathit{args})´ with ´e´ of type ´T´, if the selector ´m´ denotes some member(s) of ´T´, but none of these members is applicable to the arguments ´\mathit{args}´. In this case a view ´v´ is searched which is applicable to ´e´ and whose result contains a method ´m´ which is applicable to ´\mathit{args}´. -The search proceeds as in the case of implicit parameters, where the implicit scope is the one of ´T´. If such a view is found, the selection ´e.m´ is converted to `´v´(´e´).´m(\mathit{args})´`. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of `´T´ => ´\mathit{pt}´`, with ´\mathit{pt}´ being the structural type ´{ def m(\mathit{args}: T_1 , ... , T_n): U }´. If such a view is found, the selection ´e.m´ is converted to `´v´(´e´).´m(\mathit{args})´`. The implicit view, if it is found, can accept its argument ´e´ as a call-by-value or as a call-by-name parameter. However, call-by-value implicits take precedence over call-by-name implicits. From 521ce95e21c8ecc9784f2178ef96133b95abafc0 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 26 Mar 2024 18:15:11 +0100 Subject: [PATCH 065/465] Always use baseType when constraining patternTp with scrutineeTp In the following example: ``` type Cond[B <: Boolean] <: Tuple2[String, String] = B match ... type Decoded[B <: Boolean] = Cond[B] match case (h1, _) => Int ``` When constraining the `(h1, _)` pattern with `Cond[B]`, we incorrectly assumed we could constrain h1 with B, because `Cond[B]` is an applied type of which the baseType is Tuple2. The issue can be fixed in constrainSimplePatternType by obtaining the baseType for both the patternTp and scrutineeTp, with the most general base of the two. So in the above example, we wound constrain `B` with String by obtaining `(String, String)` from `Cond[B]`. --- .../dotc/core/PatternTypeConstrainer.scala | 12 ++++---- tests/pos/i19706.scala | 29 +++++++++++++++++++ 2 files changed, 34 insertions(+), 7 deletions(-) create mode 100644 tests/pos/i19706.scala diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 38f8e19e2737..7942bbaa3d45 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -200,8 +200,8 @@ trait PatternTypeConstrainer { self: TypeComparer => * * This function expects to receive two types (scrutinee and pattern), both * of which have class symbols, one of which is derived from another. If the - * type "being derived from" is an applied type, it will 1) "upcast" the - * deriving type to an applied type with the same constructor and 2) infer + * type "being derived from" is an applied type, it will 1) "upcast" both + * types to an applied type with the same constructor and 2) infer * constraints for the applied types' arguments that follow from both * types being inhabited by one value (the scrutinee). * @@ -252,11 +252,9 @@ trait PatternTypeConstrainer { self: TypeComparer => val scrutineeCls = scrutineeTp.classSymbol // NOTE: we already know that there is a derives-from relationship in either direction - val upcastPattern = - patternCls.derivesFrom(scrutineeCls) - - val pt = if upcastPattern then patternTp.baseType(scrutineeCls) else patternTp - val tp = if !upcastPattern then scrutineeTp.baseType(patternCls) else scrutineeTp + val base = if patternCls.derivesFrom(scrutineeCls) then scrutineeCls else patternCls + val pt = patternTp.baseType(base) + val tp = scrutineeTp.baseType(base) val assumeInvariantRefinement = migrateTo3 || forceInvariantRefinement || refinementIsInvariant(patternTp) diff --git a/tests/pos/i19706.scala b/tests/pos/i19706.scala new file mode 100644 index 000000000000..ba66b3baf5c4 --- /dev/null +++ b/tests/pos/i19706.scala @@ -0,0 +1,29 @@ + +import scala.compiletime.ops.string.{Length, Matches, Substring} + +def emptyContext(): Unit = + summon[Decoded["Tuple(0, EmptyTuple)"] =:= 0 *: EmptyTuple] + +type Decoded[S <: String] = Matches[S, "Tuple(.+, .+)"] match + case true => Parsed[Substring[S, 6, 19], 0, ""] match + case (h, t) => Decoded["0"] *: EmptyTuple + case false => 0 + +type Parsed[S <: String, I <: Int, A <: String] <: (String, String) = Matches[S, "other"] match + case true => I match + case 1 => ("", "") + case _ => Parsed[Substring[S, 1, Length[S]], I, ""] + case false => ("0", "EmptyTuple") + + +object Minimization: + + type Cond[B <: Boolean] <: Tuple2[String, String] = B match + case true => ("", "") + case false => ("a", "b") + + type Decoded[B <: Boolean] = Cond[B] match + case (h1, _) => Int + + val _: Decoded[false] = 1 + From f7d7b658cb21530bef7034b27f9be60705da78ee Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 7 Apr 2024 18:44:09 +0200 Subject: [PATCH 066/465] Don't dealias when deciding which arguments to defer for subtype checking Fixes #10078 --- compiler/src/dotty/tools/dotc/core/TypeComparer.scala | 4 ++-- tests/pos/i20078/AbstractShapeBuilder.java | 3 +++ tests/pos/i20078/Shape.java | 1 + tests/pos/i20078/Test.scala | 3 +++ tests/pos/i20078/Trait.java | 1 + 5 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i20078/AbstractShapeBuilder.java create mode 100644 tests/pos/i20078/Shape.java create mode 100644 tests/pos/i20078/Test.scala create mode 100644 tests/pos/i20078/Trait.java diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 94872dc1e8d2..31863a8ea600 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1714,8 +1714,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * comparison will instantiate or constrain type variables first. */ def isIncomplete(arg1: Type, arg2: Type): Boolean = - val arg1d = arg1.strippedDealias - val arg2d = arg2.strippedDealias + val arg1d = arg1.stripped + val arg2d = arg2.stripped (v >= 0) && (arg1d.isInstanceOf[AndType] || arg2d.isInstanceOf[OrType]) || (v <= 0) && (arg1d.isInstanceOf[OrType] || arg2d.isInstanceOf[AndType]) diff --git a/tests/pos/i20078/AbstractShapeBuilder.java b/tests/pos/i20078/AbstractShapeBuilder.java new file mode 100644 index 000000000000..5ca57c4f70a6 --- /dev/null +++ b/tests/pos/i20078/AbstractShapeBuilder.java @@ -0,0 +1,3 @@ +public abstract class AbstractShapeBuilder, S extends Shape> { + abstract public B addTrait(Trait trait); +} \ No newline at end of file diff --git a/tests/pos/i20078/Shape.java b/tests/pos/i20078/Shape.java new file mode 100644 index 000000000000..e4b4540362bc --- /dev/null +++ b/tests/pos/i20078/Shape.java @@ -0,0 +1 @@ +public interface Shape {} diff --git a/tests/pos/i20078/Test.scala b/tests/pos/i20078/Test.scala new file mode 100644 index 000000000000..b529af7e40c3 --- /dev/null +++ b/tests/pos/i20078/Test.scala @@ -0,0 +1,3 @@ +@main def Test = + val builder: AbstractShapeBuilder[? <: AbstractShapeBuilder[?, ?], ? <: Shape] = ??? + List.empty[Trait].foreach(builder.addTrait(_)) \ No newline at end of file diff --git a/tests/pos/i20078/Trait.java b/tests/pos/i20078/Trait.java new file mode 100644 index 000000000000..8082056c1e26 --- /dev/null +++ b/tests/pos/i20078/Trait.java @@ -0,0 +1 @@ +public interface Trait {} From 615653011e806b661ffbddb693f4887203c1adf8 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Sun, 7 Apr 2024 23:05:01 +0200 Subject: [PATCH 067/465] Address review comments --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 13 +++++-------- .../src/dotty/tools/dotc/transform/PostTyper.scala | 8 ++++++-- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 5c0dbd8508bf..b0ad8719ccfb 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -445,15 +445,12 @@ extension (tp: AnnotatedType) case ann: CaptureAnnotation => ann.boxed case _ => false -class CleanupRetains(using Context) extends TypeMap: - def apply(tp: Type): Type = cleanupRetains(tp, this) - /** Drop retains annotations in the type. */ -def cleanupRetains(tp: Type, theMap: CleanupRetains | Null = null)(using Context): Type = - def mapOver = (if theMap != null then theMap else new CleanupRetains).mapOver(tp) - tp match - case RetainingType(tp, _) => tp - case _ => mapOver +class CleanupRetains(using Context) extends TypeMap: + def apply(tp: Type): Type = + tp match + case RetainingType(tp, _) => tp + case _ => mapOver(tp) /** An extractor for `caps.reachCapability(ref)`, which is used to express a reach * capability as a tree in a @retains annotation. diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index db451c64650a..a977694ded27 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -286,8 +286,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => // See #20035. private def cleanupRetainsAnnot(symbol: Symbol, tpt: Tree)(using Context): Tree = tpt match - case tpt: InferredTypeTree if !symbol.allOverriddenSymbols.hasNext => - val tpe1 = cleanupRetains(tpt.tpe) + case tpt: InferredTypeTree + if !symbol.allOverriddenSymbols.hasNext => + // if there are overridden symbols, the annotation comes from an explicit type of the overridden symbol + // and should be retained. + val tm = new CleanupRetains + val tpe1 = tm(tpt.tpe) tpt.withType(tpe1) case _ => tpt diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 39afb1e73b1d..0b05bcd078ff 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -48,7 +48,7 @@ import staging.StagingLevel import reporting.* import Nullables.* import NullOpsDecorator.* -import cc.{CheckCaptures, isRetainsLike, cleanupRetains} +import cc.{CheckCaptures, isRetainsLike} import config.Config import config.MigrationVersion From 447cdf48ef7f2cae11118ad9b31d7e62c2da3e43 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Mon, 3 Jul 2023 17:50:37 +0200 Subject: [PATCH 068/465] Allow to beta reduce curried function applications in quotes reflect Previously, the curried functions with multiple applications were not able to be beta-reduced in any way, which was unexpected. Now we allow reducing any number of top-level function applications for a curried function. This was also made clearer in the documentation for the affected (Expr.betaReduce and Term.betaReduce) methods. --- .../quoted/runtime/impl/QuotesImpl.scala | 27 ++++--- library/src/scala/quoted/Expr.scala | 43 ++++++++-- library/src/scala/quoted/Quotes.scala | 43 ++++++++-- tests/pos-macros/i17506/Macro_1.scala | 80 +++++++++++++++++++ tests/pos-macros/i17506/Test_2.scala | 11 +++ 5 files changed, 183 insertions(+), 21 deletions(-) create mode 100644 tests/pos-macros/i17506/Macro_1.scala create mode 100644 tests/pos-macros/i17506/Test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 256940645ec3..b837473ff22c 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -396,17 +396,22 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end TermTypeTest object Term extends TermModule: - def betaReduce(tree: Term): Option[Term] = - tree match - case tpd.Block(Nil, expr) => - for e <- betaReduce(expr) yield tpd.cpy.Block(tree)(Nil, e) - case tpd.Inlined(_, Nil, expr) => - betaReduce(expr) - case _ => - val tree1 = dotc.transform.BetaReduce(tree) - if tree1 eq tree then None - else Some(tree1.withSpan(tree.span)) - + def betaReduce(tree: Term): Option[Term] = + val tree1 = new dotty.tools.dotc.ast.tpd.TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match { + case tpd.Block(Nil, _) | tpd.Inlined(_, Nil, _) => + super.transform(tree) + case tpd.Apply(sel @ tpd.Select(expr, nme), args) => + val tree1 = cpy.Apply(tree)(cpy.Select(sel)(transform(expr), nme), args) + dotc.transform.BetaReduce(tree1).withSpan(tree.span) + case tpd.Apply(ta @ tpd.TypeApply(sel @ tpd.Select(expr: Apply, nme), tpts), args) => + val tree1 = cpy.Apply(tree)(cpy.TypeApply(ta)(cpy.Select(sel)(transform(expr), nme), tpts), args) + dotc.transform.BetaReduce(tree1).withSpan(tree.span) + case _ => + dotc.transform.BetaReduce(tree).withSpan(tree.span) + } + }.transform(tree) + if tree1 == tree then None else Some(tree1) end Term given TermMethods: TermMethods with diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala index 525f647eaaac..f1045e5bdaca 100644 --- a/library/src/scala/quoted/Expr.scala +++ b/library/src/scala/quoted/Expr.scala @@ -10,12 +10,45 @@ abstract class Expr[+T] private[scala] () object Expr { /** `e.betaReduce` returns an expression that is functionally equivalent to `e`, - * however if `e` is of the form `((y1, ..., yn) => e2)(e1, ..., en)` - * then it optimizes this the top most call by returning the result of beta-reducing the application. - * Otherwise returns `expr`. + * however if `e` is of the form `((y1, ..., yn) => e2)(e1, ..., en)` + * then it optimizes the top most call by returning the result of beta-reducing the application. + * Similarly, all outermost curried function applications will be beta-reduced, if possible. + * Otherwise returns `expr`. * - * To retain semantics the argument `ei` is bound as `val yi = ei` and by-name arguments to `def yi = ei`. - * Some bindings may be elided as an early optimization. + * To retain semantics the argument `ei` is bound as `val yi = ei` and by-name arguments to `def yi = ei`. + * Some bindings may be elided as an early optimization. + * + * Example: + * ```scala sc:nocompile + * ((a: Int, b: Int) => a + b).apply(x, y) + * ``` + * will be reduced to + * ```scala sc:nocompile + * val a = x + * val b = y + * a + b + * ``` + * + * Generally: + * ```scala sc:nocompile + * ([X1, Y1, ...] => (x1, y1, ...) => ... => [Xn, Yn, ...] => (xn, yn, ...) => f[X1, Y1, ..., Xn, Yn, ...](x1, y1, ..., xn, yn, ...))).apply[Tx1, Ty1, ...](myX1, myY1, ...)....apply[Txn, Tyn, ...](myXn, myYn, ...) + * ``` + * will be reduced to + * ```scala sc:nocompile + * type X1 = Tx1 + * type Y1 = Ty1 + * ... + * val x1 = myX1 + * val y1 = myY1 + * ... + * type Xn = Txn + * type Yn = Tyn + * ... + * val xn = myXn + * val yn = myYn + * ... + * f[X1, Y1, ..., Xn, Yn, ...](x1, y1, ..., xn, yn, ...) + * ``` */ def betaReduce[T](expr: Expr[T])(using Quotes): Expr[T] = import quotes.reflect.* diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index fa96b73551d1..55e66ff90da8 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -774,14 +774,47 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Methods of the module object `val Term` */ trait TermModule { this: Term.type => - /** Returns a term that is functionally equivalent to `t`, + /** Returns a term that is functionally equivalent to `t`, * however if `t` is of the form `((y1, ..., yn) => e2)(e1, ..., en)` - * then it optimizes this the top most call by returning the `Some` - * with the result of beta-reducing the application. + * then it optimizes the top most call by returning `Some` + * with the result of beta-reducing the function application. + * Similarly, all outermost curried function applications will be beta-reduced, if possible. * Otherwise returns `None`. * - * To retain semantics the argument `ei` is bound as `val yi = ei` and by-name arguments to `def yi = ei`. - * Some bindings may be elided as an early optimization. + * To retain semantics the argument `ei` is bound as `val yi = ei` and by-name arguments to `def yi = ei`. + * Some bindings may be elided as an early optimization. + * + * Example: + * ```scala sc:nocompile + * ((a: Int, b: Int) => a + b).apply(x, y) + * ``` + * will be reduced to + * ```scala sc:nocompile + * val a = x + * val b = y + * a + b + * ``` + * + * Generally: + * ```scala sc:nocompile + * ([X1, Y1, ...] => (x1, y1, ...) => ... => [Xn, Yn, ...] => (xn, yn, ...) => f[X1, Y1, ..., Xn, Yn, ...](x1, y1, ..., xn, yn, ...))).apply[Tx1, Ty1, ...](myX1, myY1, ...)....apply[Txn, Tyn, ...](myXn, myYn, ...) + * ``` + * will be reduced to + * ```scala sc:nocompile + * type X1 = Tx1 + * type Y1 = Ty1 + * ... + * val x1 = myX1 + * val y1 = myY1 + * ... + * type Xn = Txn + * type Yn = Tyn + * ... + * val xn = myXn + * val yn = myYn + * ... + * f[X1, Y1, ..., Xn, Yn, ...](x1, y1, ..., xn, yn, ...) + * ``` */ def betaReduce(term: Term): Option[Term] diff --git a/tests/pos-macros/i17506/Macro_1.scala b/tests/pos-macros/i17506/Macro_1.scala new file mode 100644 index 000000000000..a66428a126be --- /dev/null +++ b/tests/pos-macros/i17506/Macro_1.scala @@ -0,0 +1,80 @@ +class Foo +class Bar +class Baz + +import scala.quoted._ + +def assertBetaReduction(using Quotes)(applied: Expr[Any], expected: String): quotes.reflect.Term = + import quotes.reflect._ + val reducedMaybe = Term.betaReduce(applied.asTerm) + assert(reducedMaybe.isDefined) + val reduced = reducedMaybe.get + assert(reduced.show == expected,s"obtained: ${reduced.show}, expected: ${expected}") + reduced + +inline def regularCurriedCtxFun2BetaReduceTest(inline f: Foo ?=> Bar ?=> Int): Unit = + ${regularCurriedCtxFun2BetaReduceTestImpl('f)} +def regularCurriedCtxFun2BetaReduceTestImpl(f: Expr[Foo ?=> Bar ?=> Int])(using Quotes): Expr[Int] = + val expected = + """|{ + | val contextual$3: Bar = new Bar() + | val contextual$2: Foo = new Foo() + | 123 + |}""".stripMargin + val applied = '{$f(using new Foo())(using new Bar())} + assertBetaReduction(applied, expected).asExprOf[Int] + +inline def regularCurriedFun2BetaReduceTest(inline f: Foo => Bar => Int): Int = + ${regularCurriedFun2BetaReduceTestImpl('f)} +def regularCurriedFun2BetaReduceTestImpl(f: Expr[Foo => Bar => Int])(using Quotes): Expr[Int] = + val expected = + """|{ + | val b: Bar = new Bar() + | val f: Foo = new Foo() + | 123 + |}""".stripMargin + val applied = '{$f(new Foo())(new Bar())} + assertBetaReduction(applied, expected).asExprOf[Int] + +inline def typeParamCurriedFun2BetaReduceTest(inline f: [A] => A => [B] => B => Unit): Unit = + ${typeParamCurriedFun2BetaReduceTestImpl('f)} +def typeParamCurriedFun2BetaReduceTestImpl(f: Expr[[A] => (a: A) => [B] => (b: B) => Unit])(using Quotes): Expr[Unit] = + val expected = + """|{ + | type Y = Bar + | val y: Bar = new Bar() + | type X = Foo + | val x: Foo = new Foo() + | typeParamFun2[Y, X](y, x) + |}""".stripMargin + val applied = '{$f.apply[Foo](new Foo()).apply[Bar](new Bar())} + assertBetaReduction(applied, expected).asExprOf[Unit] + +inline def regularCurriedFun3BetaReduceTest(inline f: Foo => Bar => Baz => Int): Int = + ${regularCurriedFun3BetaReduceTestImpl('f)} +def regularCurriedFun3BetaReduceTestImpl(f: Expr[Foo => Bar => Baz => Int])(using Quotes): Expr[Int] = + val expected = + """|{ + | val i: Baz = new Baz() + | val b: Bar = new Bar() + | val f: Foo = new Foo() + | 123 + |}""".stripMargin + val applied = '{$f(new Foo())(new Bar())(new Baz())} + assertBetaReduction(applied, expected).asExprOf[Int] + +inline def typeParamCurriedFun3BetaReduceTest(inline f: [A] => A => [B] => B => [C] => C => Unit): Unit = + ${typeParamCurriedFun3BetaReduceTestImpl('f)} +def typeParamCurriedFun3BetaReduceTestImpl(f: Expr[[A] => A => [B] => B => [C] => C => Unit])(using Quotes): Expr[Unit] = + val expected = + """|{ + | type Z = Baz + | val z: Baz = new Baz() + | type Y = Bar + | val y: Bar = new Bar() + | type X = Foo + | val x: Foo = new Foo() + | typeParamFun3[Z, Y, X](z, y, x) + |}""".stripMargin + val applied = '{$f.apply[Foo](new Foo()).apply[Bar](new Bar()).apply[Baz](new Baz())} + assertBetaReduction(applied, expected).asExprOf[Unit] diff --git a/tests/pos-macros/i17506/Test_2.scala b/tests/pos-macros/i17506/Test_2.scala new file mode 100644 index 000000000000..97a146ecba93 --- /dev/null +++ b/tests/pos-macros/i17506/Test_2.scala @@ -0,0 +1,11 @@ +@main def run() = + def typeParamFun2[A, B](a: A, b: B): Unit = println(a.toString + " " + b.toString) + def typeParamFun3[A, B, C](a: A, b: B, c: C): Unit = println(a.toString + " " + b.toString) + + regularCurriedCtxFun2BetaReduceTest((f: Foo) ?=> (b: Bar) ?=> 123) + regularCurriedCtxFun2BetaReduceTest(123) + regularCurriedFun2BetaReduceTest(((f: Foo) => (b: Bar) => 123)) + typeParamCurriedFun2BetaReduceTest([X] => (x: X) => [Y] => (y: Y) => typeParamFun2[Y, X](y, x)) + + regularCurriedFun3BetaReduceTest((f: Foo) => (b: Bar) => (i: Baz) => 123) + typeParamCurriedFun3BetaReduceTest([X] => (x: X) => [Y] => (y: Y) => [Z] => (z: Z) => typeParamFun3[Z, Y, X](z, y, x)) From be0fb7cf5bfca0a495fea00622aa84a7df3abd0c Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 8 Apr 2024 14:07:45 +0200 Subject: [PATCH 069/465] better ergnomics for -Yno-suspended-units --- .../src/dotty/tools/dotc/CompilationUnit.scala | 15 ++++++--------- .../src/dotty/tools/dotc/inlines/Inliner.scala | 9 +++++++-- .../src/dotty/tools/dotc/quoted/Interpreter.scala | 9 ++++++--- 3 files changed, 19 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 4ea9b558ea7f..0889550a4d74 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -97,15 +97,12 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn // when this unit is unsuspended. depRecorder.clear() if !suspended then - if ctx.settings.YnoSuspendedUnits.value then - report.error(i"Compilation unit suspended $this (-Yno-suspended-units is set)") - else - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspended: $this") - suspended = true - ctx.run.nn.suspendedUnits += this - if ctx.phase == Phases.inliningPhase then - suspendedAtInliningPhase = true + if ctx.settings.XprintSuspension.value then + report.echo(i"suspended: $this") + suspended = true + ctx.run.nn.suspendedUnits += this + if ctx.phase == Phases.inliningPhase then + suspendedAtInliningPhase = true throw CompilationUnit.SuspendException() private var myAssignmentSpans: Map[Int, List[Span]] | Null = null diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 1b4d985c7c4c..1ab58040603f 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -1033,7 +1033,7 @@ class Inliner(val call: tpd.Tree)(using Context): } } - private def expandMacro(body: Tree, splicePos: SrcPos)(using Context) = { + private def expandMacro(body: Tree, splicePos: SrcPos)(using Context): Tree = { assert(level == 0) val inlinedFrom = enclosingInlineds.last val dependencies = macroDependencies(body)(using spliceContext) @@ -1048,7 +1048,12 @@ class Inliner(val call: tpd.Tree)(using Context): if (suspendable && ctx.settings.XprintSuspension.value) report.echo(i"suspension triggered by macro call to ${sym.showLocated} in ${sym.associatedFile}", call.srcPos) if suspendable then - ctx.compilationUnit.suspend() // this throws a SuspendException + if ctx.settings.YnoSuspendedUnits.value then + return ref(defn.Predef_undefined) + .withType(ErrorType(em"could not expand macro, suspended units are disabled by -Yno-suspended-units")) + .withSpan(splicePos.span) + else + ctx.compilationUnit.suspend() // this throws a SuspendException val evaluatedSplice = inContext(quoted.MacroExpansion.context(inlinedFrom)) { Splicer.splice(body, splicePos, inlinedFrom.srcPos, MacroClassLoader.fromContext) diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala index 17e23ebcf014..2acc82c90594 100644 --- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -368,6 +368,9 @@ object Interpreter: } def suspendOnMissing(sym: Symbol, pos: SrcPos)(using Context): Nothing = - if ctx.settings.XprintSuspension.value then - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException + if ctx.settings.YnoSuspendedUnits.value then + throw StopInterpretation(em"suspension triggered by a dependency on missing $sym not allowed with -Yno-suspended-units", pos) + else + if ctx.settings.XprintSuspension.value then + report.echo(i"suspension triggered by a dependency on missing $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException From 5aca2dac3292d9e5a1f12b07d53cb70905d8c560 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 8 Apr 2024 14:39:48 +0200 Subject: [PATCH 070/465] actually print suspension hints, report.echo was swallowed --- compiler/src/dotty/tools/dotc/CompilationUnit.scala | 8 ++++---- compiler/src/dotty/tools/dotc/Driver.scala | 3 +++ compiler/src/dotty/tools/dotc/Run.scala | 1 + compiler/src/dotty/tools/dotc/inlines/Inliner.scala | 9 ++++++--- compiler/src/dotty/tools/dotc/quoted/Interpreter.scala | 4 +--- compiler/src/dotty/tools/dotc/typer/Namer.scala | 2 +- 6 files changed, 16 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 0889550a4d74..adced57d5801 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -90,17 +90,17 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn /** Suspends the compilation unit by thowing a SuspendException * and recording the suspended compilation unit */ - def suspend()(using Context): Nothing = + def suspend(hint: => String)(using Context): Nothing = assert(isSuspendable) // Clear references to symbols that may become stale. No need to call // `depRecorder.sendToZinc()` since all compilation phases will be rerun // when this unit is unsuspended. depRecorder.clear() if !suspended then - if ctx.settings.XprintSuspension.value then - report.echo(i"suspended: $this") suspended = true ctx.run.nn.suspendedUnits += this + if ctx.settings.XprintSuspension.value then + ctx.run.nn.suspendedHints += (this -> hint) if ctx.phase == Phases.inliningPhase then suspendedAtInliningPhase = true throw CompilationUnit.SuspendException() @@ -120,7 +120,7 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn override def isJava: Boolean = false - override def suspend()(using Context): Nothing = + override def suspend(hint: => String)(using Context): Nothing = throw CompilationUnit.SuspendException() override def assignmentSpans(using Context): Map[Int, List[Span]] = Map.empty diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index ae2219a4f049..dcc6cf8d71c0 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -52,7 +52,10 @@ class Driver { if !ctx.reporter.errorsReported && run.suspendedUnits.nonEmpty then val suspendedUnits = run.suspendedUnits.toList if (ctx.settings.XprintSuspension.value) + val suspendedHints = run.suspendedHints.toList report.echo(i"compiling suspended $suspendedUnits%, %") + for (unit, hint) <- suspendedHints do + report.echo(s" $unit: $hint") val run1 = compiler.newRun run1.compileSuspendedUnits(suspendedUnits) finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index d18a2ddc7db0..a7107656889d 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -130,6 +130,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint myUnits = us var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() + var suspendedHints: mutable.Map[CompilationUnit, String] = mutable.HashMap() def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 1ab58040603f..dbac6478e9d8 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -1038,22 +1038,25 @@ class Inliner(val call: tpd.Tree)(using Context): val inlinedFrom = enclosingInlineds.last val dependencies = macroDependencies(body)(using spliceContext) val suspendable = ctx.compilationUnit.isSuspendable + val printSuspensions = ctx.settings.XprintSuspension.value if dependencies.nonEmpty && !ctx.reporter.errorsReported then + val hints: mutable.ListBuffer[String] | Null = + if printSuspensions then mutable.ListBuffer.empty[String] else null for sym <- dependencies do if ctx.compilationUnit.source.file == sym.associatedFile then report.error(em"Cannot call macro $sym defined in the same source file", call.srcPos) else if ctx.settings.YnoSuspendedUnits.value then val addendum = ", suspension prevented by -Yno-suspended-units" report.error(em"Cannot call macro $sym defined in the same compilation run$addendum", call.srcPos) - if (suspendable && ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by macro call to ${sym.showLocated} in ${sym.associatedFile}", call.srcPos) + if suspendable && printSuspensions then + hints.nn += i"suspension triggered by macro call to ${sym.showLocated} in ${sym.associatedFile}" if suspendable then if ctx.settings.YnoSuspendedUnits.value then return ref(defn.Predef_undefined) .withType(ErrorType(em"could not expand macro, suspended units are disabled by -Yno-suspended-units")) .withSpan(splicePos.span) else - ctx.compilationUnit.suspend() // this throws a SuspendException + ctx.compilationUnit.suspend(hints.nn.toList.mkString(", ")) // this throws a SuspendException val evaluatedSplice = inContext(quoted.MacroExpansion.context(inlinedFrom)) { Splicer.splice(body, splicePos, inlinedFrom.srcPos, MacroClassLoader.fromContext) diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala index 2acc82c90594..bbf6c40cfc1b 100644 --- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -371,6 +371,4 @@ object Interpreter: if ctx.settings.YnoSuspendedUnits.value then throw StopInterpretation(em"suspension triggered by a dependency on missing $sym not allowed with -Yno-suspended-units", pos) else - if ctx.settings.XprintSuspension.value then - report.echo(i"suspension triggered by a dependency on missing $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException + ctx.compilationUnit.suspend(i"suspension triggered by a dependency on missing $sym") // this throws a SuspendException diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 15d7885776c5..c3c1e39fccc6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1667,7 +1667,7 @@ class Namer { typer: Typer => final override def complete(denot: SymDenotation)(using Context): Unit = denot.resetFlag(Touched) // allow one more completion - ctx.compilationUnit.suspend() + ctx.compilationUnit.suspend(i"reset $denot") } /** Typecheck `tree` during completion using `typed`, and remember result in TypedAhead map */ From 1b61ed621bc21a29f9200436a9c68bf28dedbc6d Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 8 Apr 2024 15:40:34 +0200 Subject: [PATCH 071/465] Drop redundant ConstFold --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 8f166f329dcb..dcbae084acf3 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1190,8 +1190,7 @@ trait Applications extends Compatibility { case _ => tree.withType(TryDynamicCallType) } if (typedFn.tpe eq TryDynamicCallType) tryDynamicTypeApply() - else - ConstFold(assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)) + else assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) } } From 20ae56337a813d4b62c408d825eb0dad1af0a7f4 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 8 Apr 2024 16:19:47 +0200 Subject: [PATCH 072/465] fix/scripts: Propagate runner JVM version and skip testing invalid compiler commits (#19656) * Bisect script used JVM 17 by default, can be overriden using: `scala-cli run project/scripts/bisect.scala --jvm=21 -- ` * Bisect runner JVM version is propagated to scala-cli verification script and sbt when building compiler * Skip testing commits for which compiler compilation fails --- project/scripts/bisect.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/project/scripts/bisect.scala b/project/scripts/bisect.scala index e97fbb3fafd7..c429fe06f81b 100755 --- a/project/scripts/bisect.scala +++ b/project/scripts/bisect.scala @@ -1,3 +1,4 @@ +//> using jvm 17 // Maximal JDK version which can be used with all Scala 3 versions, can be overriden via command line arguments '--jvm=21' /* This script will bisect a problem with the compiler based on success/failure of the validation script passed as an argument. It starts with a fast bisection on released nightly builds. @@ -124,6 +125,7 @@ object ValidationScript: def tmpScalaCliScript(command: String, args: Seq[String]): File = tmpScript(s""" |#!/usr/bin/env bash + |export JAVA_HOME=${sys.props("java.home")} |scala-cli ${command} -S "$$1" --server=false ${args.mkString(" ")} |""".stripMargin ) @@ -242,8 +244,10 @@ class CommitBisect(validationScript: File, shouldFail: Boolean, bootstrapped: Bo val bisectRunScript = raw""" |scalaVersion=$$(sbt "print ${scala3CompilerProject}/version" | tail -n1) |rm -rf out - |sbt "clean; set every doc := new File(\"unused\"); set scaladoc/Compile/resourceGenerators := (\`${scala3Project}\`/Compile/resourceGenerators).value; ${scala3Project}/publishLocal" - |${validationCommandStatusModifier}${validationScript.getAbsolutePath} "$$scalaVersion" + |export JAVA_HOME=${sys.props("java.home")} + |(sbt "clean; set every doc := new File(\"unused\"); set scaladoc/Compile/resourceGenerators := (\`${scala3Project}\`/Compile/resourceGenerators).value; ${scala3Project}/publishLocal" \ + | || (echo "Failed to build compiler, skip $$scalaVersion"; git bisect skip) \ + |) && ${validationCommandStatusModifier}${validationScript.getAbsolutePath} "$$scalaVersion" """.stripMargin "git bisect start".! s"git bisect bad $fistBadHash".! From b87ff4b949ef86dcb7ea48209503430327f4d996 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Mon, 8 Apr 2024 10:08:33 +0200 Subject: [PATCH 073/465] Fix inline match on blocks with multiple statements Only the last expression of the block is considered as the inlined scrutinee. Otherwise we may not reduce as much as we should. We also need to make sure that side effects and bindings in the scrutinee are not duplicated. Fixes #18151 --- .../dotty/tools/dotc/inlines/Inliner.scala | 101 +++++++++++------- .../backend/jvm/InlineBytecodeTests.scala | 32 ++++++ tests/pos/i18151a.scala | 10 ++ tests/pos/i18151b.scala | 10 ++ tests/pos/i18151c.scala | 39 +++++++ 5 files changed, 154 insertions(+), 38 deletions(-) create mode 100644 tests/pos/i18151a.scala create mode 100644 tests/pos/i18151b.scala create mode 100644 tests/pos/i18151c.scala diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 1b4d985c7c4c..a9c9568d0d31 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -860,46 +860,71 @@ class Inliner(val call: tpd.Tree)(using Context): case _ => sel.tpe } val selType = if (sel.isEmpty) wideSelType else selTyped(sel) - reduceInlineMatch(sel, selType, cases.asInstanceOf[List[CaseDef]], this) match { - case Some((caseBindings, rhs0)) => - // drop type ascriptions/casts hiding pattern-bound types (which are now aliases after reducing the match) - // note that any actually necessary casts will be reinserted by the typing pass below - val rhs1 = rhs0 match { - case Block(stats, t) if t.span.isSynthetic => - t match { - case Typed(expr, _) => - Block(stats, expr) - case TypeApply(sel@Select(expr, _), _) if sel.symbol.isTypeCast => - Block(stats, expr) - case _ => - rhs0 + + /** Make an Inlined that has no bindings. */ + def flattenInlineBlock(tree: Tree): Tree = { + def inlineBlock(call: Tree, stats: List[Tree], expr: Tree): Block = + def inlinedTree(tree: Tree) = Inlined(call, Nil, tree).withSpan(tree.span) + val stats1 = stats.map: + case stat: ValDef => cpy.ValDef(stat)(rhs = inlinedTree(stat.rhs)) + case stat: DefDef => cpy.DefDef(stat)(rhs = inlinedTree(stat.rhs)) + case stat => inlinedTree(stat) + cpy.Block(tree)(stats1, flattenInlineBlock(inlinedTree(expr))) + + tree match + case tree @ Inlined(call, bindings, expr) if !bindings.isEmpty => + inlineBlock(call, bindings, expr) + case tree @ Inlined(call, Nil, Block(stats, expr)) => + inlineBlock(call, stats, expr) + case _ => + tree + } + + def reduceInlineMatchExpr(sel: Tree): Tree = flattenInlineBlock(sel) match + case Block(stats, expr) => + cpy.Block(sel)(stats, reduceInlineMatchExpr(expr)) + case _ => + reduceInlineMatch(sel, selType, cases.asInstanceOf[List[CaseDef]], this) match { + case Some((caseBindings, rhs0)) => + // drop type ascriptions/casts hiding pattern-bound types (which are now aliases after reducing the match) + // note that any actually necessary casts will be reinserted by the typing pass below + val rhs1 = rhs0 match { + case Block(stats, t) if t.span.isSynthetic => + t match { + case Typed(expr, _) => + Block(stats, expr) + case TypeApply(sel@Select(expr, _), _) if sel.symbol.isTypeCast => + Block(stats, expr) + case _ => + rhs0 + } + case _ => rhs0 } - case _ => rhs0 - } - val rhs2 = rhs1 match { - case Typed(expr, tpt) if rhs1.span.isSynthetic => constToLiteral(expr) - case _ => constToLiteral(rhs1) + val rhs2 = rhs1 match { + case Typed(expr, tpt) if rhs1.span.isSynthetic => constToLiteral(expr) + case _ => constToLiteral(rhs1) + } + val (usedBindings, rhs3) = dropUnusedDefs(caseBindings, rhs2) + val rhs = seq(usedBindings, rhs3) + inlining.println(i"""--- reduce: + |$tree + |--- to: + |$rhs""") + typedExpr(rhs, pt) + case None => + def guardStr(guard: untpd.Tree) = if (guard.isEmpty) "" else i" if $guard" + def patStr(cdef: untpd.CaseDef) = i"case ${cdef.pat}${guardStr(cdef.guard)}" + val msg = + if (tree.selector.isEmpty) + em"""cannot reduce summonFrom with + | patterns : ${tree.cases.map(patStr).mkString("\n ")}""" + else + em"""cannot reduce inline match with + | scrutinee: $sel : ${selType} + | patterns : ${tree.cases.map(patStr).mkString("\n ")}""" + errorTree(tree, msg) } - val (usedBindings, rhs3) = dropUnusedDefs(caseBindings, rhs2) - val rhs = seq(usedBindings, rhs3) - inlining.println(i"""--- reduce: - |$tree - |--- to: - |$rhs""") - typedExpr(rhs, pt) - case None => - def guardStr(guard: untpd.Tree) = if (guard.isEmpty) "" else i" if $guard" - def patStr(cdef: untpd.CaseDef) = i"case ${cdef.pat}${guardStr(cdef.guard)}" - val msg = - if (tree.selector.isEmpty) - em"""cannot reduce summonFrom with - | patterns : ${tree.cases.map(patStr).mkString("\n ")}""" - else - em"""cannot reduce inline match with - | scrutinee: $sel : ${selType} - | patterns : ${tree.cases.map(patStr).mkString("\n ")}""" - errorTree(tree, msg) - } + reduceInlineMatchExpr(sel) } override def newLikeThis(nestingLevel: Int): Typer = new InlineTyper(initialErrorCount, nestingLevel) diff --git a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala index fcbc738f2934..7172e19184cb 100644 --- a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala @@ -785,4 +785,36 @@ class InlineBytecodeTests extends DottyBytecodeTest { } } + @Test def inline_match_scrutinee_with_side_effect = { + val source = """class Test: + | inline def inlineTest(): Int = + | inline { + | println("scrutinee") + | (1, 2) + | } match + | case (e1, e2) => e1 + e2 + | + | def test: Int = inlineTest() + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = List( + Field(GETSTATIC, "scala/Predef$", "MODULE$", "Lscala/Predef$;"), + Ldc(LDC, "scrutinee"), + Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false), + Op(ICONST_3), + Op(IRETURN), + ) + + assert(instructions == expected, + "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + + } + } + } diff --git a/tests/pos/i18151a.scala b/tests/pos/i18151a.scala new file mode 100644 index 000000000000..6be2c5c23a30 --- /dev/null +++ b/tests/pos/i18151a.scala @@ -0,0 +1,10 @@ +case class El[A](attr: String, child: String) + +transparent inline def inlineTest(): String = + inline { + val el: El[Any] = El("1", "2") + El[Any](el.attr, el.child) + } match + case El(attr, child) => attr + child + +def test: Unit = inlineTest() diff --git a/tests/pos/i18151b.scala b/tests/pos/i18151b.scala new file mode 100644 index 000000000000..01d2aaee972a --- /dev/null +++ b/tests/pos/i18151b.scala @@ -0,0 +1,10 @@ +case class El[A](val attr: String, val child: String) + +transparent inline def tmplStr(inline t: El[Any]): String = + inline t match + case El(attr, child) => attr + child + +def test: Unit = tmplStr { + val el = El("1", "2") + El[Any](el.attr, null) +} diff --git a/tests/pos/i18151c.scala b/tests/pos/i18151c.scala new file mode 100644 index 000000000000..a46ec9dd927c --- /dev/null +++ b/tests/pos/i18151c.scala @@ -0,0 +1,39 @@ +import scala.compiletime.* +import scala.compiletime.ops.any.ToString + +trait Attr +case object EmptyAttr extends Attr +transparent inline def attrStr(inline a: Attr): String = inline a match + case EmptyAttr => "" +transparent inline def attrStrHelper(inline a: Attr): String = inline a match + case EmptyAttr => "" +trait TmplNode +case class El[T <: String & Singleton, A <: Attr, C <: Tmpl](val tag: T, val attr: A, val child: C) + extends TmplNode +case class Sib[L <: Tmpl, R <: Tmpl](left: L, right: R) extends TmplNode +type TmplSingleton = String | Char | Int | Long | Float | Double | Boolean +type Tmpl = TmplNode | Unit | (TmplSingleton & Singleton) +transparent inline def tmplStr(inline t: Tmpl): String = inline t match + case El(tag, attr, child) => inline attrStr(attr) match + case "" => "<" + tag + ">" + tmplStr(child) + case x => "<" + tag + " " + x + ">" + tmplStr(child) + case Sib(left, right) => inline tmplStr(right) match + case "" => tmplStr(left) + case right => tmplStrHelper(left) + right + case () => "" + case s: (t & TmplSingleton) => constValue[ToString[t]] +transparent inline def tmplStrHelper(inline t: Tmpl): String = inline t match + case El(tag, attr, child) => inline (tmplStr(child), attrStr(attr)) match + case ("", "") => "<" + tag + "/>" + case (child, "") => "<" + tag + ">" + child + "" + case ("", attr) => "<" + tag + " " + attr + "/>" + case (child, attr) => "<" + tag + " " + attr + ">" + child + "" + case Sib(left, right) => tmplStrHelper(left) + tmplStrHelper(right) + case () => "" + case s: (t & TmplSingleton) => constValue[ToString[t]] +transparent inline def el(tag: String & Singleton): El[tag.type, EmptyAttr.type, Unit] = + El(tag, EmptyAttr, ()) +extension [T <: String & Singleton, A <: Attr, C <: Tmpl](el: El[T, A, C]) + transparent inline def >>[C2 <: Tmpl](child: C2) = El(el.tag, el.attr, el.child ++ child) + +extension [L <: Tmpl](left: L) transparent inline def ++[R <: Tmpl](right: R) = Sib(left, right) From 822eec652213b0ff02f9af078129bf22e6268aac Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 8 Apr 2024 16:27:33 +0200 Subject: [PATCH 074/465] Do prefix comparison also in comparison including implicit parameters --- .../dotty/tools/dotc/typer/Applications.scala | 38 +++++++++---------- .../pos/implicit-prefix-disambiguation.scala | 9 +++-- 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 10886f676732..383f2fce636b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1831,18 +1831,20 @@ trait Applications extends Compatibility { * * In this case `b.M` would be regarded as more specific than `a.M`. */ - def comparePrefixes(pre1: Type, pre2: Type) = + def comparePrefixes = + val pre1 = widenPrefix(alt1) + val pre2 = widenPrefix(alt2) val winsPrefix1 = isAsSpecificValueType(pre1, pre2) val winsPrefix2 = isAsSpecificValueType(pre2, pre1) if winsPrefix1 == winsPrefix2 then 0 else if winsPrefix1 then 1 else -1 - def compareWithTypes(tp1: Type, tp2: Type) = { + def compareWithTypes(tp1: Type, tp2: Type) = val ownerScore = compareOwner(alt1.symbol.maybeOwner, alt2.symbol.maybeOwner) - def winsType1 = isAsSpecific(alt1, tp1, alt2, tp2) - def winsType2 = isAsSpecific(alt2, tp2, alt1, tp1) + val winsType1 = isAsSpecific(alt1, tp1, alt2, tp2) + val winsType2 = isAsSpecific(alt2, tp2, alt1, tp1) overload.println(i"compare($alt1, $alt2)? $tp1 $tp2 $ownerScore $winsType1 $winsType2") if winsType1 && winsType2 @@ -1851,15 +1853,14 @@ trait Applications extends Compatibility { // alternatives are the same after following ExprTypes, pick one of them // (prefer the one that is not a method, but that's arbitrary). if alt1.widenExpr =:= alt2 then -1 else 1 - else if ownerScore == 1 then - if winsType1 || !winsType2 then 1 else 0 - else if ownerScore == -1 then - if winsType2 || !winsType1 then -1 else 0 - else if winsType1 then - if winsType2 then 0 else 1 - else - if winsType2 then -1 else 0 - } + else ownerScore match + case 1 => if winsType1 || !winsType2 then 1 else 0 + case -1 => if winsType2 || !winsType1 then -1 else 0 + case 0 => + if winsType1 != winsType2 then if winsType1 then 1 else -1 + else if alt1.symbol == alt2.symbol then comparePrefixes + else 0 + end compareWithTypes if alt1.symbol.is(ConstructorProxy) && !alt2.symbol.is(ConstructorProxy) then -1 else if alt2.symbol.is(ConstructorProxy) && !alt1.symbol.is(ConstructorProxy) then 1 @@ -1870,14 +1871,11 @@ trait Applications extends Compatibility { val strippedType2 = stripImplicit(fullType2) val result = compareWithTypes(strippedType1, strippedType2) - if result != 0 then result - else if strippedType1 eq fullType1 then - if strippedType2 eq fullType2 then - if alt1.symbol != alt2.symbol then 0 // no implicits either side: it's a draw ... - else comparePrefixes( // ... unless the symbol is the same, in which case - widenPrefix(alt1), widenPrefix(alt2)) // we compare prefixes + if (result != 0) result + else if (strippedType1 eq fullType1) + if (strippedType2 eq fullType2) 0 // no implicits either side: its' a draw else 1 // prefer 1st alternative with no implicits - else if strippedType2 eq fullType2 then -1 // prefer 2nd alternative with no implicits + else if (strippedType2 eq fullType2) -1 // prefer 2nd alternative with no implicits else compareWithTypes(fullType1, fullType2) // continue by comparing implicits parameters } end compare diff --git a/tests/pos/implicit-prefix-disambiguation.scala b/tests/pos/implicit-prefix-disambiguation.scala index 5059aa2db4eb..f7843e7f5831 100644 --- a/tests/pos/implicit-prefix-disambiguation.scala +++ b/tests/pos/implicit-prefix-disambiguation.scala @@ -1,7 +1,10 @@ + class I[X] +class J[X] trait A: given I[B] = ??? + given (using I[B]): J[B] = ??? object A extends A trait B extends A @@ -9,6 +12,6 @@ object B extends B //import B.given, A.given -def Test = summon[I[B]] - - +def Test = + summon[I[B]] + summon[J[B]] From 5d0c47a3d4b8486357e31ce91e853a62d013d5f9 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Mon, 8 Apr 2024 19:24:59 +0200 Subject: [PATCH 075/465] Fix infinite loop in Mirror synthesis of unreducible match type This regressed in f7e2e7ce752f9c472c06fe1685464879fa06f6f7 (present in 3.4.0). --- compiler/src/dotty/tools/dotc/typer/Synthesizer.scala | 2 +- tests/neg/i19198.scala | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 tests/neg/i19198.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index c94724faf4d4..61cba4c80203 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -379,7 +379,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): // avoid type aliases for tuples Right(MirrorSource.GenericTuple(types)) case _ => reduce(tp.underlying) - case tp: MatchType => reduce(tp.normalized) + case tp: MatchType => reduce(tp.tryNormalize.orElse(tp.superType)) case _ => reduce(tp.superType) case tp @ AndType(l, r) => for diff --git a/tests/neg/i19198.scala b/tests/neg/i19198.scala new file mode 100644 index 000000000000..ad5ee29bb042 --- /dev/null +++ b/tests/neg/i19198.scala @@ -0,0 +1,9 @@ +import deriving.Mirror +import compiletime.summonInline + +type DoesNotReduce[T] = T match + case String => Any + +class Foo +@main def Test: Unit = + summonInline[Mirror.Of[DoesNotReduce[Option[Int]]]] // error From ef7db7ad8142d205feae1dfcfef59c61670767b6 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 28 Feb 2024 21:58:08 +0100 Subject: [PATCH 076/465] Make aliases of `MatchAlias`es normal `TypeAlias`es Make `isMatch` false for applied `MatchAlias`es, i.e. true only for `MatchType`s and higher-kinded abstraction of them. As a result, code using `isMatch` to choose between a `TypeAlias` and `MatchAlias` will now use a `TypeAlias` when aliasing a `MatchAlias`. Which in turn allows for better de-aliasing, since `dealias` only de-aliases standard type aliases. The logic for this distinction has also been extracted to the common `AliasingBounds` supertype. `tryNormalize` on `AppliedType`s should only attempt reduction if there is an underlying match type. This could previously be identified by a `MatchAlias` tycon. We now need a recursive check. --- .../tools/dotc/core/TypeApplications.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 32 ++++++++++++++++--- .../tools/dotc/core/tasty/TreeUnpickler.scala | 4 +-- .../dotty/tools/dotc/inlines/Inlines.scala | 5 ++- .../dotty/tools/dotc/typer/TypeAssigner.scala | 4 +-- .../test/dotc/pos-test-pickling.blacklist | 1 + tests/neg-macros/i11795.scala | 10 ------ tests/pos-macros/i11795.scala | 12 ++++++- tests/pos/i19821.scala | 26 +++++++++++++++ 9 files changed, 70 insertions(+), 26 deletions(-) delete mode 100644 tests/neg-macros/i11795.scala create mode 100644 tests/pos/i19821.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index eeb18eaa9cc7..efcad3307937 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -461,7 +461,7 @@ class TypeApplications(val self: Type) extends AnyVal { */ final def toBounds(using Context): TypeBounds = self match { case self: TypeBounds => self // this can happen for wildcard args - case _ => if (self.isMatch) MatchAlias(self) else TypeAlias(self) + case _ => AliasingBounds(self) } /** Translate a type of the form From[T] to either To[T] or To[? <: T] (if `wildcardArg` is set). Keep other types as they are. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 62844a54bf48..9158062e10b7 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -458,7 +458,10 @@ object Types extends TypeUtils { /** Is this a match type or a higher-kinded abstraction of one? */ - def isMatch(using Context): Boolean = underlyingMatchType.exists + def isMatch(using Context): Boolean = stripped match + case tp: MatchType => true + case tp: HKTypeLambda => tp.resType.isMatch + case _ => false def underlyingMatchType(using Context): Type = stripped match { case tp: MatchType => tp @@ -4587,16 +4590,22 @@ object Types extends TypeUtils { override def tryNormalize(using Context): Type = tycon.stripTypeVar match { case tycon: TypeRef => - def tryMatchAlias = tycon.info match { - case MatchAlias(alias) => + def tryMatchAlias = tycon.info match + case AliasingBounds(alias) if isMatchAlias => trace(i"normalize $this", typr, show = true) { MatchTypeTrace.recurseWith(this) { alias.applyIfParameterized(args.map(_.normalized)).tryNormalize + /* `applyIfParameterized` may reduce several HKTypeLambda applications + * before the underlying MatchType is reached. + * Even if they do not involve any match type normalizations yet, + * we still want to record these reductions in the MatchTypeTrace. + * They should however only be attempted if they eventually expand + * to a match type, which is ensured by the `isMatchAlias` guard. + */ } } case _ => NoType - } tryCompiletimeConstantFold.orElse(tryMatchAlias) case _ => NoType @@ -4606,7 +4615,12 @@ object Types extends TypeUtils { def isMatchAlias(using Context): Boolean = tycon.stripTypeVar match case tycon: TypeRef => tycon.info match - case _: MatchAlias => true + case AliasingBounds(alias) => + alias.underlyingMatchType.exists + /* This is the only true case since anything other than + * a TypeRef of an alias with an underlying match type + * should have been already reduced by `appliedTo` in the TypeAssigner. + */ case _ => false case _ => false @@ -5636,6 +5650,14 @@ object Types extends TypeUtils { def lower(lo: Type)(using Context): TypeBounds = apply(lo, defn.AnyType) } + object AliasingBounds: + /** A MatchAlias if alias is a match type and a TypeAlias o.w. + * Note that aliasing a MatchAlias returns a normal TypeAlias. + */ + def apply(alias: Type)(using Context): AliasingBounds = + if alias.isMatch then MatchAlias(alias) else TypeAlias(alias) + def unapply(tp: AliasingBounds): Option[Type] = Some(tp.alias) + object TypeAlias { def apply(alias: Type)(using Context): TypeAlias = unique(new TypeAlias(alias)) def unapply(tp: TypeAlias): Option[Type] = Some(tp.alias) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 57c0b2217e9d..a75cc6c666d0 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -412,9 +412,7 @@ class TreeUnpickler(reader: TastyReader, readType().appliedTo(until(end)(readType())) case TYPEBOUNDS => val lo = readType() - if nothingButMods(end) then - if lo.isMatch then MatchAlias(readVariances(lo)) - else TypeAlias(readVariances(lo)) + if nothingButMods(end) then AliasingBounds(readVariances(lo)) else val hi = readVariances(readType()) createNullableTypeBounds(lo, hi) diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 65792d09f88c..fffe87c3f57a 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -452,9 +452,8 @@ object Inlines: unrollTupleTypes(tail).map(head :: _) case tpe: TermRef if tpe.symbol == defn.EmptyTupleModule => Some(Nil) - case tpRef: TypeRef => tpRef.info match - case MatchAlias(alias) => unrollTupleTypes(alias.tryNormalize) - case _ => None + case tpe: AppliedType if tpe.isMatchAlias => + unrollTupleTypes(tpe.tryNormalize) case _ => None diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 96c5e57dde0e..c7476f5d9777 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -519,9 +519,7 @@ trait TypeAssigner { def assignType(tree: untpd.TypeBoundsTree, lo: Tree, hi: Tree, alias: Tree)(using Context): TypeBoundsTree = tree.withType( if !alias.isEmpty then alias.tpe - else if lo eq hi then - if lo.tpe.isMatch then MatchAlias(lo.tpe) - else TypeAlias(lo.tpe) + else if lo eq hi then AliasingBounds(lo.tpe) else TypeBounds(lo.tpe, hi.tpe)) def assignType(tree: untpd.Bind, sym: Symbol)(using Context): Bind = diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 3785f8fa6e06..81661e87b84e 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -64,6 +64,7 @@ i17149.scala tuple-fold.scala mt-redux-norm.perspective.scala i18211.scala +10867.scala # Opaque type i5720.scala diff --git a/tests/neg-macros/i11795.scala b/tests/neg-macros/i11795.scala deleted file mode 100644 index 2a7f89831e0e..000000000000 --- a/tests/neg-macros/i11795.scala +++ /dev/null @@ -1,10 +0,0 @@ -import scala.quoted._ -import scala.deriving._ - -def blah[P <: Product] - (m: Mirror.ProductOf[P]) - (using Quotes, Type[m.MirroredElemLabels], Type[m.MirroredElemTypes]) = { - type z = Tuple.Zip[m.MirroredElemLabels, m.MirroredElemTypes] - Type.of[z] // error - () -} diff --git a/tests/pos-macros/i11795.scala b/tests/pos-macros/i11795.scala index 32eaccf2f4e2..26d1c4da1417 100644 --- a/tests/pos-macros/i11795.scala +++ b/tests/pos-macros/i11795.scala @@ -1,7 +1,17 @@ import scala.quoted._ import scala.deriving._ -def blah2[P <: Product, MEL <: Tuple: Type, MET <: Tuple: Type](m: Mirror.ProductOf[P] { type MirroredElemLabels = MEL; type MirroredElemTypes = MET})(using Quotes) = { +def blah[P <: Product] + (m: Mirror.ProductOf[P]) + (using Quotes, Type[m.MirroredElemLabels], Type[m.MirroredElemTypes]) = { + type z = Tuple.Zip[m.MirroredElemLabels, m.MirroredElemTypes] + Type.of[z] // error + () +} + +def blah2[P <: Product, MEL <: Tuple: Type, MET <: Tuple: Type] + (m: Mirror.ProductOf[P] { type MirroredElemLabels = MEL; type MirroredElemTypes = MET}) + (using Quotes) = { Type.of[Tuple.Zip[MEL, MET]] () } diff --git a/tests/pos/i19821.scala b/tests/pos/i19821.scala new file mode 100644 index 000000000000..0dcad965a38b --- /dev/null +++ b/tests/pos/i19821.scala @@ -0,0 +1,26 @@ + +object Test: + + trait T: + type S + type F = T.F[S] + + def foo: F + def bar: T.F[S] + + object T: + type F[X] = X match + case String => Option[Int] + + type G[X] = X match + case Option[x] => Int + + val t: T {type S = String} = ??? + + val b = t.bar + val m1: T.G[b.type] = ??? + val _: Int = m1 // Ok + + val f = t.foo + val m: T.G[f.type] = ??? + val _: Int = m // Error before changes From 259a16c64fc7e2868cb71f988299d23dd1d8e887 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Sun, 31 Mar 2024 19:11:18 +0200 Subject: [PATCH 077/465] Replace usages of `MatchType.InDisguise` by `underlyingMatchType` --- .../src/dotty/tools/dotc/core/TypeOps.scala | 2 +- compiler/src/dotty/tools/dotc/core/Types.scala | 17 +++-------------- compiler/src/dotty/tools/dotc/typer/Typer.scala | 4 +--- 3 files changed, 5 insertions(+), 18 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 1bec455c5495..d88b61d41e2f 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -143,7 +143,7 @@ object TypeOps: defn.MatchCase(simplify(pat, theMap), body) case tp: AppliedType => tp.tycon match - case tycon: TypeRef if tycon.info.isInstanceOf[MatchAlias] => + case tycon: TypeRef if tp.isMatchAlias => isFullyDefined(tp, ForceDegree.all) case _ => val normed = tp.tryNormalize diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 9158062e10b7..bbfd3953c983 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5151,20 +5151,9 @@ object Types extends TypeUtils { def apply(bound: Type, scrutinee: Type, cases: List[Type])(using Context): MatchType = unique(new CachedMatchType(bound, scrutinee, cases)) - def thatReducesUsingGadt(tp: Type)(using Context): Boolean = tp match - case MatchType.InDisguise(mt) => mt.reducesUsingGadt - case mt: MatchType => mt.reducesUsingGadt - case _ => false - - /** Extractor for match types hidden behind an AppliedType/MatchAlias. */ - object InDisguise: - def unapply(tp: AppliedType)(using Context): Option[MatchType] = tp match - case AppliedType(tycon: TypeRef, args) => tycon.info match - case MatchAlias(alias) => alias.applyIfParameterized(args) match - case mt: MatchType => Some(mt) - case _ => None - case _ => None - case _ => None + def thatReducesUsingGadt(tp: Type)(using Context): Boolean = tp.underlyingMatchType match + case mt: MatchType => mt.reducesUsingGadt + case _ => false } enum MatchTypeCasePattern: diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 0b05bcd078ff..0ea83a5a011a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1841,11 +1841,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => false } - val result = pt match { + val result = pt.underlyingMatchType match { case mt: MatchType if isMatchTypeShaped(mt) => typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) - case MatchType.InDisguise(mt) if isMatchTypeShaped(mt) => - typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case _ => typedMatchFinish(tree, sel1, selType, tree.cases, pt) } From 389f5be29879f44178a61a262880ae986d1512a8 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 8 Apr 2024 19:59:17 +0200 Subject: [PATCH 078/465] Replace usages of `isInstanceOf[MatchAlias]` by `isMatchAlias` --- .../dotty/tools/dotc/core/TypeComparer.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 19 ++++--------------- .../dotty/tools/dotc/typer/Implicits.scala | 2 +- 3 files changed, 6 insertions(+), 17 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 7af2f21bc56d..cb9961defb17 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1375,7 +1375,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * tp1 <:< app2 using isSubType (this might instantiate params in tp2) */ def compareLower(tycon2bounds: TypeBounds, tyconIsTypeRef: Boolean): Boolean = - if ((tycon2bounds.lo `eq` tycon2bounds.hi) && !tycon2bounds.isInstanceOf[MatchAlias]) + if ((tycon2bounds.lo `eq` tycon2bounds.hi) && !tycon2bounds.isMatchAlias) if (tyconIsTypeRef) recur(tp1, tp2.superTypeNormalized) && recordGadtUsageIf(MatchType.thatReducesUsingGadt(tp2)) else isSubApproxHi(tp1, tycon2bounds.lo.applyIfParameterized(args2)) else diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index bbfd3953c983..68f76f2c2500 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -456,13 +456,15 @@ object Types extends TypeUtils { /** Is this a MethodType for which the parameters will not be used? */ def hasErasedParams(using Context): Boolean = false - /** Is this a match type or a higher-kinded abstraction of one? - */ + /** Is this a match type or a higher-kinded abstraction of one? */ def isMatch(using Context): Boolean = stripped match case tp: MatchType => true case tp: HKTypeLambda => tp.resType.isMatch case _ => false + /** Does this application expand to a match type? */ + def isMatchAlias(using Context): Boolean = underlyingMatchType.exists + def underlyingMatchType(using Context): Type = stripped match { case tp: MatchType => tp case tp: HKTypeLambda => tp.resType.underlyingMatchType @@ -4611,19 +4613,6 @@ object Types extends TypeUtils { NoType } - /** Does this application expand to a match type? */ - def isMatchAlias(using Context): Boolean = tycon.stripTypeVar match - case tycon: TypeRef => - tycon.info match - case AliasingBounds(alias) => - alias.underlyingMatchType.exists - /* This is the only true case since anything other than - * a TypeRef of an alias with an underlying match type - * should have been already reduced by `appliedTo` in the TypeAssigner. - */ - case _ => false - case _ => false - /** Is this an unreducible application to wildcard arguments? * This is the case if tycon is higher-kinded. This means * it is a subtype of a hk-lambda, but not a match alias. diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 949e791d0496..aa210a83cb4a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -622,7 +622,7 @@ trait ImplicitRunInfo: sym.isClass && !isExcluded(sym) || sym.isOpaqueAlias || sym.is(Deferred, butNot = Param) - || sym.info.isInstanceOf[MatchAlias] + || sym.info.isMatchAlias private def computeIScope(rootTp: Type): OfTypeImplicits = From 1dc5b995cfb84dd3e2673041d6e954ed6043900e Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 8 Apr 2024 20:00:47 +0200 Subject: [PATCH 079/465] Cache underlyingMatchType for AppliedTypes `def underlyingMatchType` had an `isMatchAlias` guard for `AppliedType`s. This used to be a quick check preventing unnecessary recursions and superType computations. But `isMatchAlias` is now itself mutually recursive with `underlyingMatchType`, so we cache it for AppliedTypes to alleviate this. --- compiler/src/dotty/tools/dotc/core/Types.scala | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 68f76f2c2500..7c647935ee32 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -468,7 +468,7 @@ object Types extends TypeUtils { def underlyingMatchType(using Context): Type = stripped match { case tp: MatchType => tp case tp: HKTypeLambda => tp.resType.underlyingMatchType - case tp: AppliedType if tp.isMatchAlias => tp.superType.underlyingMatchType + case tp: AppliedType => tp.underlyingMatchType case _ => NoType } @@ -4534,6 +4534,9 @@ object Types extends TypeUtils { private var myEvalRunId: RunId = NoRunId private var myEvalued: Type = uninitialized + private var validUnderlyingMatch: Period = Nowhere + private var cachedUnderlyingMatch: Type = uninitialized + def isGround(acc: TypeAccumulator[Boolean])(using Context): Boolean = if myGround == 0 then myGround = if acc.foldOver(true, this) then 1 else -1 myGround > 0 @@ -4590,6 +4593,15 @@ object Types extends TypeUtils { case nil => x foldArgs(op(x, tycon), args) + /** Exists if the tycon is a TypeRef of an alias with an underlying match type. + * Anything else should have already been reduced in `appliedTo` by the TypeAssigner. + */ + override def underlyingMatchType(using Context): Type = + if ctx.period != validUnderlyingMatch then + validUnderlyingMatch = if tycon.isProvisional then Nowhere else ctx.period + cachedUnderlyingMatch = superType.underlyingMatchType + cachedUnderlyingMatch + override def tryNormalize(using Context): Type = tycon.stripTypeVar match { case tycon: TypeRef => def tryMatchAlias = tycon.info match From 4699140dcdc76e00d3a85f3eeab25d843b938e5a Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Mon, 8 Apr 2024 20:43:52 +0200 Subject: [PATCH 080/465] Do not assume the superType of a match type gives us a sound mirror --- compiler/src/dotty/tools/dotc/typer/Synthesizer.scala | 4 +++- tests/neg/i19198.scala | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 61cba4c80203..d244af12dd91 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -379,7 +379,9 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): // avoid type aliases for tuples Right(MirrorSource.GenericTuple(types)) case _ => reduce(tp.underlying) - case tp: MatchType => reduce(tp.tryNormalize.orElse(tp.superType)) + case tp: MatchType => + val n = tp.tryNormalize + if n.exists then reduce(n) else Left(i"its subpart `$tp` is an unreducible match type.") case _ => reduce(tp.superType) case tp @ AndType(l, r) => for diff --git a/tests/neg/i19198.scala b/tests/neg/i19198.scala index ad5ee29bb042..be4fc1602697 100644 --- a/tests/neg/i19198.scala +++ b/tests/neg/i19198.scala @@ -4,6 +4,10 @@ import compiletime.summonInline type DoesNotReduce[T] = T match case String => Any +type DoesNotReduce2[T] <: T = T match + case String => T + class Foo @main def Test: Unit = summonInline[Mirror.Of[DoesNotReduce[Option[Int]]]] // error + summonInline[Mirror.Of[DoesNotReduce2[Option[Int]]]] // error From 08b0fec4ab98252df87660b931a2e3bcde892981 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 9 Apr 2024 15:51:35 +0200 Subject: [PATCH 081/465] Add flexible types for explicit nulls --- .../tools/dotc/config/ScalaSettings.scala | 1 + .../tools/dotc/core/ConstraintHandling.scala | 6 +- .../src/dotty/tools/dotc/core/Contexts.scala | 3 + .../dotty/tools/dotc/core/Definitions.scala | 6 +- .../tools/dotc/core/JavaNullInterop.scala | 30 +++--- .../tools/dotc/core/NullOpsDecorator.scala | 7 +- .../tools/dotc/core/OrderingConstraint.scala | 8 +- .../dotc/core/PatternTypeConstrainer.scala | 3 +- .../tools/dotc/core/TypeApplications.scala | 1 + .../dotty/tools/dotc/core/TypeComparer.scala | 6 ++ .../src/dotty/tools/dotc/core/Types.scala | 87 +++++++++++++++-- .../tools/dotc/core/tasty/TreePickler.scala | 3 + .../tools/dotc/core/tasty/TreeUnpickler.scala | 2 + .../tools/dotc/printing/PlainPrinter.scala | 2 + .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 2 + .../tools/dotc/transform/ElimRepeated.scala | 2 +- .../dotty/tools/dotc/transform/Recheck.scala | 2 +- .../dotc/transform/SyntheticMembers.scala | 2 +- .../tools/dotc/transform/TypeTestsCasts.scala | 3 +- .../dotty/tools/dotc/typer/Applications.scala | 2 +- .../dotty/tools/dotc/typer/Nullables.scala | 14 ++- .../src/dotty/tools/dotc/typer/Typer.scala | 28 +++--- .../dotty/tools/dotc/CompilationTests.scala | 12 ++- .../tools/vulpix/TestConfiguration.scala | 2 + .../reference/experimental/explicit-nulls.md | 97 +++++++++++++------ .../scala/runtime/stdLibPatches/Predef.scala | 3 +- project/Build.scala | 11 +++ project/MiMaFilters.scala | 1 + tasty/src/dotty/tools/tasty/TastyFormat.scala | 5 +- .../flexible-types-common/i7883.scala | 9 ++ .../interop-array-src/J.java | 0 .../interop-array-src/S.scala | 0 .../flexible-types-common/interop-chain.scala | 9 ++ .../interop-enum-src/Day.java | 0 .../interop-enum-src/Planet.java | 0 .../interop-enum-src/S.scala | 1 + .../interop-generics/J.java | 0 .../interop-generics/S.scala | 0 .../interop-implicit.scala | 10 ++ .../interop-java-call/J.java | 17 ++++ .../interop-java-call/S.scala | 37 +++++++ .../interop-java-chain/J.java | 7 ++ .../interop-java-chain/S.scala | 4 + .../interop-java-varargs-src/Names.java | 0 .../interop-java-varargs-src/S.scala | 10 ++ .../interop-java-varargs.scala | 38 ++++++++ .../interop-method-src/J.java | 0 .../interop-method-src/S.scala | 0 .../interop-propagate.scala | 18 ++++ .../interop-select-type-member.scala | 7 ++ tests/explicit-nulls/neg/i7883.check | 12 +-- tests/explicit-nulls/neg/i7883.scala | 2 + .../neg/interop-enum-src/S.scala | 6 -- .../neg/interop-propagate.scala | 10 -- tests/explicit-nulls/neg/interop-return.scala | 2 + tests/explicit-nulls/neg/notnull/S.scala | 2 + .../pos/interop-applied-types/J.java | 3 + .../pos/interop-applied-types/S.scala | 14 +++ .../pos/interop-constructor-src/S.scala | 1 + .../pos/interop-enum-src/Planet.java | 19 ---- .../pos/interop-generics/J.java | 13 --- .../pos/interop-generics/S.scala | 6 -- .../explicit-nulls/pos/interop-nn-src/S.scala | 4 +- .../pos/interop-ortype-src/J.java | 3 + .../pos/interop-ortype-src/S.scala | 7 ++ .../pos/interop-poly-src/S.scala | 17 ++++ .../pos/interop-static-src/S.scala | 2 + .../pos/match-with-applied-types.scala.scala | 7 ++ .../pos/sam-parameter-javadefined/injava.java | 6 ++ .../sam-parameter-javadefined/sam-test.scala | 23 +++++ .../pos/widen-nullable-union.scala | 12 +++ .../unsafe-java-varargs-src/S.scala | 8 +- .../neg-deep-subtype/interop-polytypes.scala | 2 +- 73 files changed, 541 insertions(+), 158 deletions(-) create mode 100644 tests/explicit-nulls/flexible-types-common/i7883.scala rename tests/explicit-nulls/{neg => flexible-types-common}/interop-array-src/J.java (100%) rename tests/explicit-nulls/{neg => flexible-types-common}/interop-array-src/S.scala (100%) create mode 100644 tests/explicit-nulls/flexible-types-common/interop-chain.scala rename tests/explicit-nulls/{pos => flexible-types-common}/interop-enum-src/Day.java (100%) rename tests/explicit-nulls/{neg => flexible-types-common}/interop-enum-src/Planet.java (100%) rename tests/explicit-nulls/{pos => flexible-types-common}/interop-enum-src/S.scala (60%) rename tests/explicit-nulls/{neg => flexible-types-common}/interop-generics/J.java (100%) rename tests/explicit-nulls/{neg => flexible-types-common}/interop-generics/S.scala (100%) create mode 100644 tests/explicit-nulls/flexible-types-common/interop-implicit.scala create mode 100644 tests/explicit-nulls/flexible-types-common/interop-java-call/J.java create mode 100644 tests/explicit-nulls/flexible-types-common/interop-java-call/S.scala create mode 100644 tests/explicit-nulls/flexible-types-common/interop-java-chain/J.java create mode 100644 tests/explicit-nulls/flexible-types-common/interop-java-chain/S.scala rename tests/explicit-nulls/{pos => flexible-types-common}/interop-java-varargs-src/Names.java (100%) rename tests/explicit-nulls/{pos => flexible-types-common}/interop-java-varargs-src/S.scala (64%) create mode 100644 tests/explicit-nulls/flexible-types-common/interop-java-varargs.scala rename tests/explicit-nulls/{neg => flexible-types-common}/interop-method-src/J.java (100%) rename tests/explicit-nulls/{neg => flexible-types-common}/interop-method-src/S.scala (100%) create mode 100644 tests/explicit-nulls/flexible-types-common/interop-propagate.scala create mode 100644 tests/explicit-nulls/flexible-types-common/interop-select-type-member.scala delete mode 100644 tests/explicit-nulls/neg/interop-enum-src/S.scala delete mode 100644 tests/explicit-nulls/neg/interop-propagate.scala create mode 100644 tests/explicit-nulls/pos/interop-applied-types/J.java create mode 100644 tests/explicit-nulls/pos/interop-applied-types/S.scala delete mode 100644 tests/explicit-nulls/pos/interop-enum-src/Planet.java delete mode 100644 tests/explicit-nulls/pos/interop-generics/J.java delete mode 100644 tests/explicit-nulls/pos/interop-generics/S.scala create mode 100644 tests/explicit-nulls/pos/interop-ortype-src/J.java create mode 100644 tests/explicit-nulls/pos/interop-ortype-src/S.scala create mode 100644 tests/explicit-nulls/pos/match-with-applied-types.scala.scala create mode 100644 tests/explicit-nulls/pos/sam-parameter-javadefined/injava.java create mode 100644 tests/explicit-nulls/pos/sam-parameter-javadefined/sam-test.scala diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 2e48ca78258f..fc7e61c8ec71 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -417,6 +417,7 @@ private sealed trait YSettings: // Experimental language features val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-kind-polymorphism", "Disable kind polymorphism.") val YexplicitNulls: Setting[Boolean] = BooleanSetting(ForkSetting, "Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") + val YnoFlexibleTypes: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-flexible-types", "Disable turning nullable Java return types and parameter types into flexible types, which behave like abstract types with a nullable lower bound and non-nullable upper bound.") val YcheckInit: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init", "Ensure safe initialization of objects.") val YcheckInitGlobal: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init-global", "Check safe initialization of global objects.") val YrequireTargetName: Setting[Boolean] = BooleanSetting(ForkSetting, "Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation.") diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 1870956357d6..109929f0c6f5 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -696,9 +696,11 @@ trait ConstraintHandling { tp.rebind(tp.parent.hardenUnions) case tp: HKTypeLambda => tp.derivedLambdaType(resType = tp.resType.hardenUnions) + case tp: FlexibleType => + tp.derivedFlexibleType(tp.hi.hardenUnions) case tp: OrType => - val tp1 = tp.stripNull - if tp1 ne tp then tp.derivedOrType(tp1.hardenUnions, defn.NullType) + val tp1 = tp.stripNull(stripFlexibleTypes = false) + if tp1 ne tp then tp.derivedOrType(tp1.hardenUnions, defn.NullType, soft = false) else tp.derivedOrType(tp.tp1.hardenUnions, tp.tp2.hardenUnions, soft = false) case _ => tp diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index ae21c6fb8763..73fea84a640b 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -472,6 +472,9 @@ object Contexts { /** Is the explicit nulls option set? */ def explicitNulls: Boolean = base.settings.YexplicitNulls.value + /** Is the flexible types option set? */ + def flexibleTypes: Boolean = base.settings.YexplicitNulls.value && !base.settings.YnoFlexibleTypes.value + /** A fresh clone of this context embedded in this context. */ def fresh: FreshContext = freshOver(this) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index f187498da1fb..932a7d72d33e 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -648,7 +648,7 @@ class Definitions { @tu lazy val StringModule: Symbol = StringClass.linkedClass @tu lazy val String_+ : TermSymbol = enterMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final) @tu lazy val String_valueOf_Object: Symbol = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match { - case List(pt) => pt.isAny || pt.stripNull.isAnyRef + case List(pt) => pt.isAny || pt.stripNull().isAnyRef case _ => false }).symbol @@ -660,13 +660,13 @@ class Definitions { @tu lazy val ClassCastExceptionClass: ClassSymbol = requiredClass("java.lang.ClassCastException") @tu lazy val ClassCastExceptionClass_stringConstructor: TermSymbol = ClassCastExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { case List(pt) => - pt.stripNull.isRef(StringClass) + pt.stripNull().isRef(StringClass) case _ => false }).symbol.asTerm @tu lazy val ArithmeticExceptionClass: ClassSymbol = requiredClass("java.lang.ArithmeticException") @tu lazy val ArithmeticExceptionClass_stringConstructor: TermSymbol = ArithmeticExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { case List(pt) => - pt.stripNull.isRef(StringClass) + pt.stripNull().isRef(StringClass) case _ => false }).symbol.asTerm diff --git a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala index 6244923cfb52..46ce0d2d7852 100644 --- a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala +++ b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala @@ -78,11 +78,11 @@ object JavaNullInterop { * but the result type is not nullable. */ private def nullifyExceptReturnType(tp: Type)(using Context): Type = - new JavaNullMap(true)(tp) + new JavaNullMap(outermostLevelAlreadyNullable = true)(tp) /** Nullifies a Java type by adding `| Null` in the relevant places. */ private def nullifyType(tp: Type)(using Context): Type = - new JavaNullMap(false)(tp) + new JavaNullMap(outermostLevelAlreadyNullable = false)(tp) /** A type map that implements the nullification function on types. Given a Java-sourced type, this adds `| Null` * in the right places to make the nulls explicit in Scala. @@ -96,25 +96,29 @@ object JavaNullInterop { * to `(A & B) | Null`, instead of `(A | Null & B | Null) | Null`. */ private class JavaNullMap(var outermostLevelAlreadyNullable: Boolean)(using Context) extends TypeMap { + def nullify(tp: Type): Type = if ctx.flexibleTypes then FlexibleType(tp) else OrNull(tp) + /** Should we nullify `tp` at the outermost level? */ def needsNull(tp: Type): Boolean = - !outermostLevelAlreadyNullable && (tp match { - case tp: TypeRef => + if outermostLevelAlreadyNullable then false + else tp match + case tp: TypeRef if // We don't modify value types because they're non-nullable even in Java. - !tp.symbol.isValueClass && + tp.symbol.isValueClass + // We don't modify unit types. + || tp.isRef(defn.UnitClass) // We don't modify `Any` because it's already nullable. - !tp.isRef(defn.AnyClass) && + || tp.isRef(defn.AnyClass) // We don't nullify Java varargs at the top level. // Example: if `setNames` is a Java method with signature `void setNames(String... names)`, // then its Scala signature will be `def setNames(names: (String|Null)*): Unit`. // This is because `setNames(null)` passes as argument a single-element array containing the value `null`, // and not a `null` array. - !tp.isRef(defn.RepeatedParamClass) + || !ctx.flexibleTypes && tp.isRef(defn.RepeatedParamClass) => false case _ => true - }) override def apply(tp: Type): Type = tp match { - case tp: TypeRef if needsNull(tp) => OrNull(tp) + case tp: TypeRef if needsNull(tp) => nullify(tp) case appTp @ AppliedType(tycon, targs) => val oldOutermostNullable = outermostLevelAlreadyNullable // We don't make the outmost levels of type arguments nullable if tycon is Java-defined. @@ -124,7 +128,7 @@ object JavaNullInterop { val targs2 = targs map this outermostLevelAlreadyNullable = oldOutermostNullable val appTp2 = derivedAppliedType(appTp, tycon, targs2) - if needsNull(tycon) then OrNull(appTp2) else appTp2 + if needsNull(tycon) then nullify(appTp2) else appTp2 case ptp: PolyType => derivedLambdaType(ptp)(ptp.paramInfos, this(ptp.resType)) case mtp: MethodType => @@ -138,12 +142,12 @@ object JavaNullInterop { // nullify(A & B) = (nullify(A) & nullify(B)) | Null, but take care not to add // duplicate `Null`s at the outermost level inside `A` and `B`. outermostLevelAlreadyNullable = true - OrNull(derivedAndType(tp, this(tp.tp1), this(tp.tp2))) - case tp: TypeParamRef if needsNull(tp) => OrNull(tp) + nullify(derivedAndType(tp, this(tp.tp1), this(tp.tp2))) + case tp: TypeParamRef if needsNull(tp) => nullify(tp) // In all other cases, return the type unchanged. // In particular, if the type is a ConstantType, then we don't nullify it because it is the // type of a final non-nullable field. case _ => tp } } -} +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala index 4f22f9d31e36..291498dbc558 100644 --- a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala +++ b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala @@ -14,7 +14,7 @@ object NullOpsDecorator: * If this type isn't (syntactically) nullable, then returns the type unchanged. * The type will not be changed if explicit-nulls is not enabled. */ - def stripNull(using Context): Type = { + def stripNull(stripFlexibleTypes: Boolean = true)(using Context): Type = { def strip(tp: Type): Type = val tpWiden = tp.widenDealias val tpStripped = tpWiden match { @@ -33,6 +33,9 @@ object NullOpsDecorator: if (tp1s ne tp1) && (tp2s ne tp2) then tp.derivedAndType(tp1s, tp2s) else tp + case tp: FlexibleType => + val hi1 = strip(tp.hi) + if stripFlexibleTypes then hi1 else tp.derivedFlexibleType(hi1) case tp @ TypeBounds(lo, hi) => tp.derivedTypeBounds(strip(lo), strip(hi)) case tp => tp @@ -44,7 +47,7 @@ object NullOpsDecorator: /** Is self (after widening and dealiasing) a type of the form `T | Null`? */ def isNullableUnion(using Context): Boolean = { - val stripped = self.stripNull + val stripped = self.stripNull() stripped ne self } end extension diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index e11ac26ef93c..dd2319ed508b 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -562,11 +562,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val underlying1 = recur(tp.underlying) if underlying1 ne tp.underlying then underlying1 else tp case CapturingType(parent, refs) => - val parent1 = recur(parent) - if parent1 ne parent then tp.derivedCapturingType(parent1, refs) else tp + tp.derivedCapturingType(recur(parent), refs) + case tp: FlexibleType => + tp.derivedFlexibleType(recur(tp.hi)) case tp: AnnotatedType => - val parent1 = recur(tp.parent) - if parent1 ne tp.parent then tp.derivedAnnotatedType(parent1, tp.annot) else tp + tp.derivedAnnotatedType(recur(tp.parent), tp.annot) case _ => val tp1 = tp.dealiasKeepAnnots if tp1 ne tp then diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 7942bbaa3d45..6d6a47cf6a1e 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -163,7 +163,7 @@ trait PatternTypeConstrainer { self: TypeComparer => } } - def dealiasDropNonmoduleRefs(tp: Type) = tp.dealias match { + def dealiasDropNonmoduleRefs(tp: Type): Type = tp.dealias match { case tp: TermRef => // we drop TermRefs that don't have a class symbol, as they can't // meaningfully participate in GADT reasoning and just get in the way. @@ -172,6 +172,7 @@ trait PatternTypeConstrainer { self: TypeComparer => // additional trait - argument-less enum cases desugar to vals. // See run/enum-Tree.scala. if tp.classSymbol.exists then tp else tp.info + case tp: FlexibleType => dealiasDropNonmoduleRefs(tp.underlying) case tp => tp } diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index efcad3307937..54636ff4ad58 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -541,6 +541,7 @@ class TypeApplications(val self: Type) extends AnyVal { */ final def argInfos(using Context): List[Type] = self.stripped match case AppliedType(tycon, args) => args + case tp: FlexibleType => tp.underlying.argInfos case _ => Nil /** If this is an encoding of a function type, return its arguments, otherwise return Nil. diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 8b6e099bfe41..a9b5a39c2a62 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -864,6 +864,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false } compareClassInfo + case tp2: FlexibleType => + recur(tp1, tp2.lo) case _ => fourthTry } @@ -1059,6 +1061,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp1: ExprType if ctx.phaseId > gettersPhase.id => // getters might have converted T to => T, need to compensate. recur(tp1.widenExpr, tp2) + case tp1: FlexibleType => + recur(tp1.hi, tp2) case _ => false } @@ -3437,6 +3441,8 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { isConcrete(tp1.underlying) case tp1: AndOrType => isConcrete(tp1.tp1) && isConcrete(tp1.tp2) + case tp1: FlexibleType => + isConcrete(tp1.hi) case _ => val tp2 = tp1.stripped.stripLazyRef (tp2 ne tp) && isConcrete(tp2) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 029bd97fa3c1..3c9f7e05b6e2 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -307,6 +307,7 @@ object Types extends TypeUtils { isRef(defn.ObjectClass) && (typeSymbol eq defn.FromJavaObjectSymbol) def containsFromJavaObject(using Context): Boolean = this match + case tp: FlexibleType => tp.underlying.containsFromJavaObject case tp: OrType => tp.tp1.containsFromJavaObject || tp.tp2.containsFromJavaObject case tp: AndType => tp.tp1.containsFromJavaObject && tp.tp2.containsFromJavaObject case _ => isFromJavaObject @@ -345,6 +346,7 @@ object Types extends TypeUtils { /** Is this type guaranteed not to have `null` as a value? */ final def isNotNull(using Context): Boolean = this match { case tp: ConstantType => tp.value.value != null + case tp: FlexibleType => false case tp: ClassInfo => !tp.cls.isNullableClass && tp.cls != defn.NothingClass case tp: AppliedType => tp.superType.isNotNull case tp: TypeBounds => tp.lo.isNotNull @@ -374,6 +376,7 @@ object Types extends TypeUtils { case AppliedType(tycon, args) => tycon.unusableForInference || args.exists(_.unusableForInference) case RefinedType(parent, _, rinfo) => parent.unusableForInference || rinfo.unusableForInference case TypeBounds(lo, hi) => lo.unusableForInference || hi.unusableForInference + case tp: FlexibleType => tp.underlying.unusableForInference case tp: AndOrType => tp.tp1.unusableForInference || tp.tp2.unusableForInference case tp: LambdaType => tp.resultType.unusableForInference || tp.paramInfos.exists(_.unusableForInference) case WildcardType(optBounds) => optBounds.unusableForInference @@ -930,7 +933,7 @@ object Types extends TypeUtils { // Selecting `name` from a type `T | Null` is like selecting `name` from `T`, if // unsafeNulls is enabled and T is a subtype of AnyRef. // This can throw at runtime, but we trade soundness for usability. - tp1.findMember(name, pre.stripNull, required, excluded) + tp1.findMember(name, pre.stripNull(), required, excluded) case _ => searchAfterJoin else searchAfterJoin @@ -1354,13 +1357,13 @@ object Types extends TypeUtils { * then the top-level union isn't widened. This is needed so that type inference can infer nullable types. */ def widenUnion(using Context): Type = widen match - case tp: OrType => tp match - case OrNull(tp1) => - // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. + case tp: OrType => + val tp1 = tp.stripNull(stripFlexibleTypes = false) + if tp1 ne tp then val tp1Widen = tp1.widenUnionWithoutNull - if (tp1Widen.isRef(defn.AnyClass)) tp1Widen + if tp1Widen.isRef(defn.AnyClass) then tp1Widen else tp.derivedOrType(tp1Widen, defn.NullType) - case _ => + else tp.widenUnionWithoutNull case tp => tp.widenUnionWithoutNull @@ -1375,6 +1378,8 @@ object Types extends TypeUtils { tp.rebind(tp.parent.widenUnion) case tp: HKTypeLambda => tp.derivedLambdaType(resType = tp.resType.widenUnion) + case tp: FlexibleType => + tp.derivedFlexibleType(tp.hi.widenUnionWithoutNull) case tp => tp @@ -3453,6 +3458,50 @@ object Types extends TypeUtils { } } + // --- FlexibleType ----------------------------------------------------------------- + + /* A flexible type is a type with a custom subtyping relationship. + * It is used by explicit nulls to represent a type coming from Java which can be + * considered as nullable or non-nullable depending on the context, in a similar way to Platform + * Types in Kotlin. A `FlexibleType(T)` generally behaves like a type variable with special bounds + * `T | Null .. T`, so that `T | Null <: FlexibleType(T) <: T`. + * A flexible type will be erased to its original type `T`. + */ + case class FlexibleType(lo: Type, hi: Type) extends CachedProxyType with ValueType { + + override def underlying(using Context): Type = hi + + def derivedFlexibleType(hi: Type)(using Context): Type = + if hi eq this.hi then this else FlexibleType(hi) + + override def computeHash(bs: Binders): Int = doHash(bs, hi) + + override final def baseClasses(using Context): List[ClassSymbol] = hi.baseClasses + } + + object FlexibleType { + def apply(tp: Type)(using Context): Type = tp match { + case ft: FlexibleType => ft + case _ => + // val tp1 = tp.stripNull() + // if tp1.isNullType then + // // (Null)? =:= ? >: Null <: (Object & Null) + // FlexibleType(tp, AndType(defn.ObjectType, defn.NullType)) + // else + // // (T | Null)? =:= ? >: T | Null <: T + // // (T)? =:= ? >: T | Null <: T + // val hi = tp1 + // val lo = if hi eq tp then OrNull(hi) else tp + // FlexibleType(lo, hi) + // + // The commented out code does more work to analyze the original type to ensure the + // flexible type is always a subtype of the original type and the Object type. + // It is not necessary according to the use cases, so we choose to use a simpler + // rule. + FlexibleType(OrNull(tp), tp) + } + } + // --- AndType/OrType --------------------------------------------------------------- abstract class AndOrType extends CachedGroundType with ValueType { @@ -3707,7 +3756,8 @@ object Types extends TypeUtils { assert(!ctx.isAfterTyper, s"$tp in $where") // we check correct kinds at PostTyper throw TypeError(em"$tp is not a value type, cannot be used $where") - /** An extractor object to pattern match against a nullable union. + /** An extractor object to pattern match against a nullable union + * (including flexible types). * e.g. * * (tp: Type) match @@ -3718,7 +3768,7 @@ object Types extends TypeUtils { def apply(tp: Type)(using Context) = if tp.isNullType then tp else OrType(tp, defn.NullType, soft = false) def unapply(tp: Type)(using Context): Option[Type] = - val tp1 = tp.stripNull + val tp1 = tp.stripNull() if tp1 ne tp then Some(tp1) else None } @@ -5962,6 +6012,8 @@ object Types extends TypeUtils { samClass(tp.underlying) case tp: AnnotatedType => samClass(tp.underlying) + case tp: FlexibleType => + samClass(tp.underlying) case _ => NoSymbol @@ -6092,6 +6144,8 @@ object Types extends TypeUtils { tp.derivedJavaArrayType(elemtp) protected def derivedExprType(tp: ExprType, restpe: Type): Type = tp.derivedExprType(restpe) + protected def derivedFlexibleType(tp: FlexibleType, hi: Type): Type = + tp.derivedFlexibleType(hi) // note: currying needed because Scala2 does not support param-dependencies protected def derivedLambdaType(tp: LambdaType)(formals: List[tp.PInfo], restpe: Type): Type = tp.derivedLambdaType(tp.paramNames, formals, restpe) @@ -6215,6 +6269,9 @@ object Types extends TypeUtils { case tp: OrType => derivedOrType(tp, this(tp.tp1), this(tp.tp2)) + case tp: FlexibleType => + derivedFlexibleType(tp, this(tp.hi)) + case tp: MatchType => val bound1 = this(tp.bound) val scrut1 = atVariance(0)(this(tp.scrutinee)) @@ -6502,6 +6559,17 @@ object Types extends TypeUtils { if (underlying.isExactlyNothing) underlying else tp.derivedAnnotatedType(underlying, annot) } + + override protected def derivedFlexibleType(tp: FlexibleType, hi: Type): Type = + hi match { + case Range(lo, hi) => + // We know FlexibleType(t).hi = t and FlexibleType(t).lo = OrNull(t) + range(OrNull(lo), hi) + case _ => + if (hi.isExactlyNothing) hi + else tp.derivedFlexibleType(hi) + } + override protected def derivedCapturingType(tp: Type, parent: Type, refs: CaptureSet): Type = parent match // TODO ^^^ handle ranges in capture sets as well case Range(lo, hi) => @@ -6631,6 +6699,9 @@ object Types extends TypeUtils { case tp: TypeVar => this(x, tp.underlying) + case tp: FlexibleType => + this(x, tp.underlying) + case ExprType(restpe) => this(x, restpe) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 7d2d95aa9601..0a8669292a74 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -272,6 +272,9 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { case tpe: OrType => writeByte(ORtype) withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) } + case tpe: FlexibleType => + writeByte(FLEXIBLEtype) + withLength { pickleType(tpe.underlying, richTypes) } case tpe: ExprType => writeByte(BYNAMEtype) pickleType(tpe.underlying) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index a75cc6c666d0..5f04418bbe7f 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -442,6 +442,8 @@ class TreeUnpickler(reader: TastyReader, readTypeRef() match { case binder: LambdaType => binder.paramRefs(readNat()) } + case FLEXIBLEtype => + FlexibleType(readType()) } assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") result diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index ac7b4ef39604..241bfb4f7c7b 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -294,6 +294,8 @@ class PlainPrinter(_ctx: Context) extends Printer { && !printDebug then atPrec(GlobalPrec)( Str("into ") ~ toText(tpe) ) else toTextLocal(tpe) ~ " " ~ toText(annot) + case FlexibleType(_, tpe) => + "(" ~ toText(tpe) ~ ")?" case tp: TypeVar => def toTextCaret(tp: Type) = if printDebug then toTextLocal(tp) ~ Str("^") else toText(tp) if (tp.isInstantiated) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index d43a2f22a7fb..138cda099040 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -681,6 +681,8 @@ private class ExtractAPICollector(nonLocalClassSymbols: mutable.HashSet[Symbol]) case tp: OrType => val s = combineApiTypes(apiType(tp.tp1), apiType(tp.tp2)) withMarker(s, orMarker) + case tp: FlexibleType => + apiType(tp.underlying) case ExprType(resultType) => withMarker(apiType(resultType), byNameMarker) case MatchType(bound, scrut, cases) => diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala index b98d7d525089..ae2fc578728f 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala @@ -293,7 +293,7 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => val element = array.elemType.hiBound // T if element <:< defn.AnyRefType - || ctx.mode.is(Mode.SafeNulls) && element.stripNull <:< defn.AnyRefType + || ctx.mode.is(Mode.SafeNulls) && element.stripNull() <:< defn.AnyRefType || element.typeSymbol.isPrimitiveValueClass then array else defn.ArrayOf(TypeBounds.upper(AndType(element, defn.AnyRefType))) // Array[? <: T & AnyRef] diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 44d5caba631a..f809fbd176ce 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -427,7 +427,7 @@ abstract class Recheck extends Phase, SymTransformer: TypeComparer.lub(bodyType :: casesTypes) def recheckSeqLiteral(tree: SeqLiteral, pt: Type)(using Context): Type = - val elemProto = pt.stripNull.elemType match + val elemProto = pt.stripNull().elemType match case NoType => WildcardType case bounds: TypeBounds => WildcardType(bounds) case elemtp => elemtp diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala index 6d2aedb9b47b..45606b0dbef5 100644 --- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala @@ -252,7 +252,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { // Second constructor of ioob that takes a String argument def filterStringConstructor(s: Symbol): Boolean = s.info match { case m: MethodType if s.isConstructor && m.paramInfos.size == 1 => - m.paramInfos.head.stripNull == defn.StringType + m.paramInfos.head.stripNull() == defn.StringType case _ => false } val constructor = ioob.typeSymbol.info.decls.find(filterStringConstructor _).asTerm diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index 74a4845424ea..509461c794f4 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -151,7 +151,8 @@ object TypeTestsCasts { // - T1 & T2 <:< T3 // See TypeComparer#either recur(tp1, P) && recur(tp2, P) - + case tpX: FlexibleType => + recur(tpX.underlying, P) case x => // always false test warnings are emitted elsewhere // provablyDisjoint wants fully applied types as input; because we're in the middle of erasure, we sometimes get raw types here diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 4cf0e6619772..bca832b0bfaf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -971,7 +971,7 @@ trait Applications extends Compatibility { // one can imagine the original signature-polymorphic method as // being infinitely overloaded, with each individual overload only // being brought into existence as needed - val originalResultType = funRef.symbol.info.resultType.stripNull + val originalResultType = funRef.symbol.info.resultType.stripNull() val resultType = if !originalResultType.isRef(defn.ObjectClass) then originalResultType else AvoidWildcardsMap()(proto.resultType.deepenProtoTrans) match diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala index 914fc0acb89d..3f071dad2d03 100644 --- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala +++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala @@ -33,20 +33,24 @@ object Nullables: && hi.isValueType // We cannot check if hi is nullable, because it can cause cyclic reference. + private def nullifiedHi(lo: Type, hi: Type)(using Context): Type = + if needNullifyHi(lo, hi) then + if ctx.flexibleTypes then FlexibleType(hi) else OrNull(hi) + else hi + /** Create a nullable type bound * If lo is `Null`, `| Null` is added to hi */ def createNullableTypeBounds(lo: Type, hi: Type)(using Context): TypeBounds = - val newHi = if needNullifyHi(lo, hi) then OrType(hi, defn.NullType, soft = false) else hi - TypeBounds(lo, newHi) + TypeBounds(lo, nullifiedHi(lo, hi)) /** Create a nullable type bound tree * If lo is `Null`, `| Null` is added to hi */ def createNullableTypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(using Context): TypeBoundsTree = - val hiTpe = hi.typeOpt - val newHi = if needNullifyHi(lo.typeOpt, hiTpe) then TypeTree(OrType(hiTpe, defn.NullType, soft = false)) else hi - TypeBoundsTree(lo, newHi, alias) + val hiTpe = nullifiedHi(lo.typeOpt, hi.typeOpt) + val hiTree = if(hiTpe eq hi.typeOpt) hi else TypeTree(hiTpe) + TypeBoundsTree(lo, hiTree, alias) /** A set of val or var references that are known to be not null, plus a set of * variable references that are not known (anymore) to be not null diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 6b9afab06e33..8c80cdf3100b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -981,17 +981,23 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } if (untpd.isWildcardStarArg(tree)) { - def typedWildcardStarArgExpr = { - // A sequence argument `xs: _*` can be either a `Seq[T]` or an `Array[_ <: T]`, - // irrespective of whether the method we're calling is a Java or Scala method, - // so the expected type is the union `Seq[T] | Array[_ <: T]`. - val ptArg = - // FIXME(#8680): Quoted patterns do not support Array repeated arguments + + def fromRepeated(pt: Type): Type = pt match + case pt: FlexibleType => + pt.derivedFlexibleType(fromRepeated(pt.hi)) + case _ => if ctx.mode.isQuotedPattern then + // FIXME(#8680): Quoted patterns do not support Array repeated arguments pt.translateFromRepeated(toArray = false, translateWildcard = true) else pt.translateFromRepeated(toArray = false, translateWildcard = true) - | pt.translateFromRepeated(toArray = true, translateWildcard = true) + | pt.translateFromRepeated(toArray = true, translateWildcard = true) + + def typedWildcardStarArgExpr = { + // A sequence argument `xs: _*` can be either a `Seq[T]` or an `Array[_ <: T]`, + // irrespective of whether the method we're calling is a Java or Scala method, + // so the expected type is the union `Seq[T] | Array[_ <: T]`. + val ptArg = fromRepeated(pt) val expr0 = typedExpr(tree.expr, ptArg) val expr1 = if ctx.explicitNulls && (!ctx.mode.is(Mode.Pattern)) then if expr0.tpe.isNullType then @@ -1079,7 +1085,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * with annotation contructor, as named arguments are not allowed anywhere else in Java. * Under explicit nulls, the pt could be nullable. We need to strip `Null` type first. */ - val arg1 = pt.stripNull match { + val arg1 = pt.stripNull() match { case AppliedType(a, typ :: Nil) if ctx.isJava && a.isRef(defn.ArrayClass) => tryAlternatively { typed(tree.arg, pt) } { val elemTp = untpd.TypedSplice(TypeTree(typ)) @@ -1906,7 +1912,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val case1 = typedCase(cas, sel, wideSelType, tpe)(using caseCtx) caseCtx = Nullables.afterPatternContext(sel, case1.pat) if !alreadyStripped && Nullables.matchesNull(case1) then - wideSelType = wideSelType.stripNull + wideSelType = wideSelType.stripNull() alreadyStripped = true case1 } @@ -1929,7 +1935,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val case1 = typedCase(cas, sel, wideSelType, pt)(using caseCtx) caseCtx = Nullables.afterPatternContext(sel, case1.pat) if !alreadyStripped && Nullables.matchesNull(case1) then - wideSelType = wideSelType.stripNull + wideSelType = wideSelType.stripNull() alreadyStripped = true case1 } @@ -2129,7 +2135,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else res def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(using Context): SeqLiteral = { - val elemProto = pt.stripNull.elemType match { + val elemProto = pt.stripNull().elemType match { case NoType => WildcardType case bounds: TypeBounds => WildcardType(bounds) case elemtp => elemtp diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index a96a4ea09102..542ef1897b74 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -194,22 +194,24 @@ class CompilationTests { @Test def explicitNullsNeg: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsNeg") aggregateTests( - compileFilesInDir("tests/explicit-nulls/neg", defaultOptions and "-Yexplicit-nulls"), - compileFilesInDir("tests/explicit-nulls/unsafe-common", defaultOptions and "-Yexplicit-nulls"), + compileFilesInDir("tests/explicit-nulls/neg", explicitNullsOptions), + compileFilesInDir("tests/explicit-nulls/flexible-types-common", explicitNullsOptions and "-Yno-flexible-types"), + compileFilesInDir("tests/explicit-nulls/unsafe-common", explicitNullsOptions and "-Yno-flexible-types"), ) }.checkExpectedErrors() @Test def explicitNullsPos: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsPos") aggregateTests( - compileFilesInDir("tests/explicit-nulls/pos", defaultOptions and "-Yexplicit-nulls"), - compileFilesInDir("tests/explicit-nulls/unsafe-common", defaultOptions and "-Yexplicit-nulls" and "-language:unsafeNulls"), + compileFilesInDir("tests/explicit-nulls/pos", explicitNullsOptions), + compileFilesInDir("tests/explicit-nulls/flexible-types-common", explicitNullsOptions), + compileFilesInDir("tests/explicit-nulls/unsafe-common", explicitNullsOptions and "-language:unsafeNulls" and "-Yno-flexible-types"), ) }.checkCompile() @Test def explicitNullsRun: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsRun") - compileFilesInDir("tests/explicit-nulls/run", defaultOptions and "-Yexplicit-nulls") + compileFilesInDir("tests/explicit-nulls/run", explicitNullsOptions) }.checkRuns() // initialization tests diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala index 1defe3f4f53d..f5540304da89 100644 --- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala +++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala @@ -89,6 +89,8 @@ object TestConfiguration { val picklingWithCompilerOptions = picklingOptions.withClasspath(withCompilerClasspath).withRunClasspath(withCompilerClasspath) + val explicitNullsOptions = defaultOptions and "-Yexplicit-nulls" + /** Default target of the generated class files */ private def defaultTarget: String = { import scala.util.Properties.isJavaAtLeast diff --git a/docs/_docs/reference/experimental/explicit-nulls.md b/docs/_docs/reference/experimental/explicit-nulls.md index 1925b0b3c925..bcbea34dd18d 100644 --- a/docs/_docs/reference/experimental/explicit-nulls.md +++ b/docs/_docs/reference/experimental/explicit-nulls.md @@ -111,17 +111,59 @@ y == x // ok (x: Any) == null // ok ``` -## Java Interoperability +## Java Interoperability and Flexible Types + +When dealing with reference types from Java, it's essential to address the implicit nullability of these types. +The most accurate way to represent them in Scala is to use nullable types, though working with lots of nullable types +directly can be annoying. +To streamline interactions with Java libraries, we introduce the concept of flexible types. + +The flexible type, denoted by `T?`, functions as an abstract type with unique bounds: `T | Null ... T`, +ensuring that `T | Null <: T? <: T`. +The subtyping rule treats a reference type coming from Java as either nullable or non-nullable depending on the context. +This concept draws inspiration from Kotlin's +[platform types](https://kotlinlang.org/docs/java-interop.html#null-safety-and-platform-types). +By relaxing null checks for such types, Scala aligns its safety guarantees with those of Java. +Notably, flexible types are non-denotable, meaning users cannot explicitly write them in the code; +only the compiler can construct or infer these types. + +Consequently, a value with a flexible type can serve as both a nullable and non-nullable value. +Additionally, both nullable and non-nullable values can be passed as parameters with flexible types during function calls. +Invoking the member functions of a flexible type is allowed, but it can trigger a `NullPointerException` +if the value is indeed `null` during runtime. -The Scala compiler can load Java classes in two ways: from source or from bytecode. In either case, -when a Java class is loaded, we "patch" the type of its members to reflect that Java types -remain implicitly nullable. - -Specifically, we patch +```scala +// Considering class J is from Java +class J { + // Translates to def f(s: String?): Unit + public void f(String s) { + } -- the type of fields + // Translates to def g(): String? + public String g() { + return ""; + } +} + +// Use J in Scala +def useJ(j: J) = + val x1: String = "" + val x2: String | Null = null + j.f(x1) // Passing String to String? + j.f(x2) // Passing String | Null to String? + j.f(null) // Passing Null to String? + + // Assign String? to String + val y1: String = j.g() + // Assign String? to String | Null + val y2: String | Null = j.g() + + // Calling member functions on flexible types + j.g().trim().length() +``` -- the argument type and return type of methods +Upon loading a Java class, whether from source or bytecode, the Scala compiler dynamically adjusts the type of its members to reflect nullability. +This adjustment involves adding flexible types to the reference types of fields, as well as the argument types and return types of methods We illustrate the rules with following examples: @@ -138,7 +180,7 @@ We illustrate the rules with following examples: ```scala class C: - val s: String | Null + val s: String? val x: Int ``` @@ -151,15 +193,7 @@ We illustrate the rules with following examples: ==> ```scala - class C[T] { def foo(): T | Null } - ``` - - Notice this is rule is sometimes too conservative, as witnessed by - - ```scala - class InScala: - val c: C[Bool] = ??? // C as above - val b: Bool = c.foo() // no longer typechecks, since foo now returns Bool | Null + class C[T] { def foo(): T? } ``` - We can reduce the number of redundant nullable types we need to add. Consider @@ -172,21 +206,21 @@ We illustrate the rules with following examples: ==> ```scala - class Box[T] { def get(): T | Null } - class BoxFactory[T] { def makeBox(): Box[T] | Null } + class Box[T] { def get(): T? } + class BoxFactory[T] { def makeBox(): Box[T]? } ``` Suppose we have a `BoxFactory[String]`. Notice that calling `makeBox()` on it returns a - `Box[String] | Null`, not a `Box[String | Null] | Null`. This seems at first + `Box[T]?`, not a `Box[T?]?`. This seems at first glance unsound ("What if the box itself has `null` inside?"), but is sound because calling - `get()` on a `Box[String]` returns a `String | Null`. + `get()` on a `Box[String]` returns a `String?`. Notice that we need to patch _all_ Java-defined classes that transitively appear in the argument or return type of a field or method accessible from the Scala code being compiled. Absent crazy reflection magic, we think that all such Java classes _must_ be visible to the Typer in the first place, so they will be patched. -- We will append `Null` to the type arguments if the generic class is defined in Scala. +- We will patch the type arguments if the generic class is defined in Scala. ```java class BoxFactory { @@ -199,16 +233,16 @@ We illustrate the rules with following examples: ```scala class BoxFactory[T]: - def makeBox(): Box[T | Null] | Null - def makeCrazyBoxes(): java.util.List[Box[java.util.List[T] | Null]] | Null + def makeBox(): Box[T?]? + def makeCrazyBoxes(): java.util.List[Box[java.util.List[T]?]]? ``` - In this case, since `Box` is Scala-defined, we will get `Box[T | Null] | Null`. + In this case, since `Box` is Scala-defined, we will get `Box[T?]?`. This is needed because our nullability function is only applied (modularly) to the Java classes, but not to the Scala ones, so we need a way to tell `Box` that it contains a nullable value. - The `List` is Java-defined, so we don't append `Null` to its type argument. But we + The `List` is Java-defined, so we don't patch its type argument. But we still need to nullify its inside. - We don't nullify _simple_ literal constant (`final`) fields, since they are known to be non-null @@ -234,7 +268,7 @@ We illustrate the rules with following examples: val NAME_GENERATED: String | Null = getNewName() ``` -- We don't append `Null` to a field nor to a return type of a method which is annotated with a +- We don't patch a field nor to a return type of a method which is annotated with a `NotNull` annotation. ```java @@ -250,8 +284,8 @@ We illustrate the rules with following examples: ```scala class C: val name: String - def getNames(prefix: String | Null): java.util.List[String] // we still need to nullify the paramter types - def getBoxedName(): Box[String | Null] // we don't append `Null` to the outmost level, but we still need to nullify inside + def getNames(prefix: String?): java.util.List[String] // we still need to nullify the paramter types + def getBoxedName(): Box[String?] // we don't append `Null` to the outmost level, but we still need to nullify inside ``` The annotation must be from the list below to be recognized as `NotNull` by the compiler. @@ -280,6 +314,9 @@ We illustrate the rules with following examples: "io.reactivex.annotations.NonNull" :: Nil map PreNamedString) ``` +Flexible types can be disabled by using `-Yno-flexible-types` flag. +The ordinary union type `| Null` will be used instead. + ### Override check When we check overriding between Scala classes and Java classes, the rules are relaxed for [`Null`](https://scala-lang.org/api/3.x/scala/Null.html) type with this feature, in order to help users to working with Java libraries. diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 09feaf11c31d..2146254a9467 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -1,5 +1,7 @@ package scala.runtime.stdLibPatches +import scala.annotation.experimental + object Predef: import compiletime.summonFrom @@ -60,5 +62,4 @@ object Predef: * `eq` or `ne` methods, only `==` and `!=` inherited from `Any`. */ inline def ne(inline y: AnyRef | Null): Boolean = !(x eq y) - end Predef diff --git a/project/Build.scala b/project/Build.scala index 336d576c7207..fef7a2bcb60b 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -908,6 +908,13 @@ object Build { "-Ddotty.tests.classes.dottyTastyInspector=" + jars("scala3-tasty-inspector"), ) }, + // For compatibility at this moment, both the bootstrapped and the non-bootstrapped + // compilers are compiled without flexible types. + // We should move the flag to commonDottyCompilerSettings once the reference + // compiler is updated. + // Then, the next step is to enable flexible types by default and reduce the use of + // `unsafeNulls`. + scalacOptions ++= Seq("-Yno-flexible-types"), packageAll := { (`scala3-compiler` / packageAll).value ++ Seq( "scala3-compiler" -> (Compile / packageBin).value.getAbsolutePath, @@ -1290,6 +1297,10 @@ object Build { .asScala3PresentationCompiler(NonBootstrapped) lazy val `scala3-presentation-compiler-bootstrapped` = project.in(file("presentation-compiler")) .asScala3PresentationCompiler(Bootstrapped) + .settings( + // Add `-Yno-flexible-types` flag for bootstrap, see comments for `bootstrappedDottyCompilerSettings` + Compile / scalacOptions += "-Yno-flexible-types" + ) def scala3PresentationCompiler(implicit mode: Mode): Project = mode match { case NonBootstrapped => `scala3-presentation-compiler` diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index e45e7a81904b..0051d744f787 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -72,6 +72,7 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of tasty core Build.previousDottyVersion -> Seq( + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype") ), // Additions since last LTS diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index e17c98234691..6cd63d0d8f01 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -176,6 +176,7 @@ Standard-Section: "ASTs" TopLevelStat* ORtype Length left_Type right_Type -- lefgt | right MATCHtype Length bound_Type sel_Type case_Type* -- sel match {cases} with optional upper `bound` MATCHCASEtype Length pat_type rhs_Type -- match cases are MATCHCASEtypes or TYPELAMBDAtypes over MATCHCASEtypes + FLEXIBLEtype Length underlying_Type -- (underlying)? BIND Length boundName_NameRef bounds_Type Modifier* -- boundName @ bounds, for type-variables defined in a type pattern BYNAMEtype underlying_Type -- => underlying PARAMtype Length binder_ASTRef paramNum_Nat -- A reference to parameter # paramNum in lambda type `binder` @@ -617,6 +618,7 @@ object TastyFormat { final val MATCHtype = 190 final val MATCHtpt = 191 final val MATCHCASEtype = 192 + final val FLEXIBLEtype = 193 final val HOLE = 255 @@ -648,7 +650,7 @@ object TastyFormat { firstNatTreeTag <= tag && tag <= RENAMED || firstASTTreeTag <= tag && tag <= BOUNDED || firstNatASTTreeTag <= tag && tag <= NAMEDARG || - firstLengthTreeTag <= tag && tag <= MATCHCASEtype || + firstLengthTreeTag <= tag && tag <= FLEXIBLEtype || tag == HOLE def isParamTag(tag: Int): Boolean = tag == PARAM || tag == TYPEPARAM @@ -850,6 +852,7 @@ object TastyFormat { case MATCHCASEtype => "MATCHCASEtype" case MATCHtpt => "MATCHtpt" case PARAMtype => "PARAMtype" + case FLEXIBLEtype => "FLEXIBLEtype" case ANNOTATION => "ANNOTATION" case PRIVATEqualified => "PRIVATEqualified" case PROTECTEDqualified => "PROTECTEDqualified" diff --git a/tests/explicit-nulls/flexible-types-common/i7883.scala b/tests/explicit-nulls/flexible-types-common/i7883.scala new file mode 100644 index 000000000000..9ee92553b60d --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/i7883.scala @@ -0,0 +1,9 @@ +import scala.util.matching.Regex + +object Test extends App { + def head(s: String, r: Regex): Option[(String, String)] = + s.trim match { + case r(hd, tl) => Some((hd, tl)) // error // error // error + case _ => None + } +} \ No newline at end of file diff --git a/tests/explicit-nulls/neg/interop-array-src/J.java b/tests/explicit-nulls/flexible-types-common/interop-array-src/J.java similarity index 100% rename from tests/explicit-nulls/neg/interop-array-src/J.java rename to tests/explicit-nulls/flexible-types-common/interop-array-src/J.java diff --git a/tests/explicit-nulls/neg/interop-array-src/S.scala b/tests/explicit-nulls/flexible-types-common/interop-array-src/S.scala similarity index 100% rename from tests/explicit-nulls/neg/interop-array-src/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-array-src/S.scala diff --git a/tests/explicit-nulls/flexible-types-common/interop-chain.scala b/tests/explicit-nulls/flexible-types-common/interop-chain.scala new file mode 100644 index 000000000000..27a2d507801e --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-chain.scala @@ -0,0 +1,9 @@ +// With flexible types, we can select a member of its underlying type. + +class Foo { + import java.util.ArrayList + import java.util.Iterator + + val x3 = new ArrayList[ArrayList[ArrayList[String]]]() + val x4: Int = x3.get(0).get(0).get(0).length() // error +} diff --git a/tests/explicit-nulls/pos/interop-enum-src/Day.java b/tests/explicit-nulls/flexible-types-common/interop-enum-src/Day.java similarity index 100% rename from tests/explicit-nulls/pos/interop-enum-src/Day.java rename to tests/explicit-nulls/flexible-types-common/interop-enum-src/Day.java diff --git a/tests/explicit-nulls/neg/interop-enum-src/Planet.java b/tests/explicit-nulls/flexible-types-common/interop-enum-src/Planet.java similarity index 100% rename from tests/explicit-nulls/neg/interop-enum-src/Planet.java rename to tests/explicit-nulls/flexible-types-common/interop-enum-src/Planet.java diff --git a/tests/explicit-nulls/pos/interop-enum-src/S.scala b/tests/explicit-nulls/flexible-types-common/interop-enum-src/S.scala similarity index 60% rename from tests/explicit-nulls/pos/interop-enum-src/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-enum-src/S.scala index 75e4654869a4..ce0935271d11 100644 --- a/tests/explicit-nulls/pos/interop-enum-src/S.scala +++ b/tests/explicit-nulls/flexible-types-common/interop-enum-src/S.scala @@ -3,4 +3,5 @@ class S { val d: Day = Day.MON val p: Planet = Planet.MARS + val p2: Planet = p.next() // error: expected Planet but got Planet|Null } diff --git a/tests/explicit-nulls/neg/interop-generics/J.java b/tests/explicit-nulls/flexible-types-common/interop-generics/J.java similarity index 100% rename from tests/explicit-nulls/neg/interop-generics/J.java rename to tests/explicit-nulls/flexible-types-common/interop-generics/J.java diff --git a/tests/explicit-nulls/neg/interop-generics/S.scala b/tests/explicit-nulls/flexible-types-common/interop-generics/S.scala similarity index 100% rename from tests/explicit-nulls/neg/interop-generics/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-generics/S.scala diff --git a/tests/explicit-nulls/flexible-types-common/interop-implicit.scala b/tests/explicit-nulls/flexible-types-common/interop-implicit.scala new file mode 100644 index 000000000000..4bbba8f11cab --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-implicit.scala @@ -0,0 +1,10 @@ +class S { + locally { + // OfType Implicits + + import java.nio.charset.StandardCharsets + import scala.io.Codec + + val c: Codec = StandardCharsets.UTF_8 // error + } +} \ No newline at end of file diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-call/J.java b/tests/explicit-nulls/flexible-types-common/interop-java-call/J.java new file mode 100644 index 000000000000..554b91749889 --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-call/J.java @@ -0,0 +1,17 @@ +public class J { + public String f1() { + return ""; + } + + public int f2() { + return 0; + } + + public T g1() { + return null; + } +} + +class J2 { + public T x = null; +} \ No newline at end of file diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-call/S.scala b/tests/explicit-nulls/flexible-types-common/interop-java-call/S.scala new file mode 100644 index 000000000000..acdbbafc3fab --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-call/S.scala @@ -0,0 +1,37 @@ +// Check Java calls have been cast to non-nullable. + +val j: J = new J + +val s1: String = j.f1() // error + +val s1n: String | Null = j.f1() + +val i1: Int = j.f2() + +val s2: String = j.g1[String]() // error + +val s2n: String | Null = j.g1[String]() + +// val s3: String = j.g1[String | Null]() error + +val s3n: String | Null = j.g1[String | Null]() + +val i2: Int = j.g1[Int]() // error + +val a1: Any = j.g1[Any]() + +val ar1: AnyRef = j.g1[AnyRef]() // error + +val n1: Null = j.g1[Null]() + +// val ar2: AnyRef = j.g1[Null]() error + +def clo1[T]: T = j.g1[T]() // error + +def clo2[T <: AnyRef]: T = j.g1[T]() // error + +def clo3[T >: Null <: AnyRef | Null]: T = j.g1[T]() + +def testJ2[T]: T = + val j2: J2[T] = new J2 + j2.x // error diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-chain/J.java b/tests/explicit-nulls/flexible-types-common/interop-java-chain/J.java new file mode 100644 index 000000000000..bd266bae13d9 --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-chain/J.java @@ -0,0 +1,7 @@ +class J1 { + J2 getJ2() { return new J2(); } +} + +class J2 { + J1 getJ1() { return new J1(); } +} \ No newline at end of file diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-chain/S.scala b/tests/explicit-nulls/flexible-types-common/interop-java-chain/S.scala new file mode 100644 index 000000000000..9fe5aa3f08ce --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-chain/S.scala @@ -0,0 +1,4 @@ +class S { + val j: J2 = new J2() + j.getJ1().getJ2().getJ1().getJ2().getJ1().getJ2() // error +} diff --git a/tests/explicit-nulls/pos/interop-java-varargs-src/Names.java b/tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/Names.java similarity index 100% rename from tests/explicit-nulls/pos/interop-java-varargs-src/Names.java rename to tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/Names.java diff --git a/tests/explicit-nulls/pos/interop-java-varargs-src/S.scala b/tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/S.scala similarity index 64% rename from tests/explicit-nulls/pos/interop-java-varargs-src/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/S.scala index e867202e506d..ef0b702b0006 100644 --- a/tests/explicit-nulls/pos/interop-java-varargs-src/S.scala +++ b/tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/S.scala @@ -16,4 +16,14 @@ class S { // Multiple arguments, some null. Names.setNames(null, null, "hello", "world", null) + + val arg1: Array[String] = ??? + val arg2: Array[String | Null] = ??? + val arg3: Array[String] | Null = ??? + val arg4: Array[String | Null] | Null = ??? + + Names.setNames(arg1*) + Names.setNames(arg2*) + Names.setNames(arg3*) // error + Names.setNames(arg4*) // error } diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-varargs.scala b/tests/explicit-nulls/flexible-types-common/interop-java-varargs.scala new file mode 100644 index 000000000000..9ec27cb090a1 --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-varargs.scala @@ -0,0 +1,38 @@ +import java.nio.file.Paths + +def test1 = { + Paths.get("") + Paths.get("", null) + Paths.get("", "") + Paths.get("", "", null) + + val x1: String = ??? + val x2: String | Null = ??? + + Paths.get("", x1) + Paths.get("", x2) +} + +def test2 = { + val xs1: Seq[String] = ??? + val xs2: Seq[String | Null] = ??? + val xs3: Seq[String | Null] | Null = ??? + val xs4: Seq[String] | Null = ??? + + val ys1: Array[String] = ??? + val ys2: Array[String | Null] = ??? + val ys3: Array[String | Null] | Null = ??? + val ys4: Array[String] | Null = ??? + + Paths.get("", xs1*) + Paths.get("", xs2*) + Paths.get("", xs3*) // error + Paths.get("", xs4*) // error + + Paths.get("", ys1*) + Paths.get("", ys2*) + Paths.get("", ys3*) // error + Paths.get("", ys4*) // error + + Paths.get("", null*) // error +} \ No newline at end of file diff --git a/tests/explicit-nulls/neg/interop-method-src/J.java b/tests/explicit-nulls/flexible-types-common/interop-method-src/J.java similarity index 100% rename from tests/explicit-nulls/neg/interop-method-src/J.java rename to tests/explicit-nulls/flexible-types-common/interop-method-src/J.java diff --git a/tests/explicit-nulls/neg/interop-method-src/S.scala b/tests/explicit-nulls/flexible-types-common/interop-method-src/S.scala similarity index 100% rename from tests/explicit-nulls/neg/interop-method-src/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-method-src/S.scala diff --git a/tests/explicit-nulls/flexible-types-common/interop-propagate.scala b/tests/explicit-nulls/flexible-types-common/interop-propagate.scala new file mode 100644 index 000000000000..40eb12dd287c --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-propagate.scala @@ -0,0 +1,18 @@ + class Foo { + import java.util.ArrayList + + // Test that type mapping works with flexible types. + val ll: ArrayList[ArrayList[ArrayList[String]]] = new ArrayList[ArrayList[ArrayList[String]]] + val level1: ArrayList[ArrayList[String]] = ll.get(0) // error + val level2: ArrayList[String] = ll.get(0).get(0) // error + val level3: String = ll.get(0).get(0).get(0) // error + + val lb = new ArrayList[ArrayList[ArrayList[String]]] + val levelA = lb.get(0) + val levelB = lb.get(0).get(0) // error + val levelC = lb.get(0).get(0).get(0) // error + + val x = levelA.get(0) // error + val y = levelB.get(0) + val z: String = levelA.get(0).get(0) // error +} diff --git a/tests/explicit-nulls/flexible-types-common/interop-select-type-member.scala b/tests/explicit-nulls/flexible-types-common/interop-select-type-member.scala new file mode 100644 index 000000000000..ddd402545edb --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-select-type-member.scala @@ -0,0 +1,7 @@ +import java.util.ArrayList + +def f[T]: ArrayList[T] = { + val cz = Class.forName("java.util.ArrayList") + val o = cz.newInstance() // error: T of Class[?] | Null + o.asInstanceOf[ArrayList[T]] +} \ No newline at end of file diff --git a/tests/explicit-nulls/neg/i7883.check b/tests/explicit-nulls/neg/i7883.check index e37285332359..f14e5d4e7481 100644 --- a/tests/explicit-nulls/neg/i7883.check +++ b/tests/explicit-nulls/neg/i7883.check @@ -1,19 +1,19 @@ --- [E134] Type Error: tests/explicit-nulls/neg/i7883.scala:6:11 -------------------------------------------------------- -6 | case r(hd, tl) => Some((hd, tl)) // error // error // error +-- [E134] Type Error: tests/explicit-nulls/neg/i7883.scala:8:11 -------------------------------------------------------- +8 | case r(hd, tl) => Some((hd, tl)) // error // error // error | ^ | None of the overloaded alternatives of method unapplySeq in class Regex with types | (m: scala.util.matching.Regex.Match): Option[List[String]] | (c: Char): Option[List[Char]] | (s: CharSequence): Option[List[String]] | match arguments (String | Null) --- [E006] Not Found Error: tests/explicit-nulls/neg/i7883.scala:6:30 --------------------------------------------------- -6 | case r(hd, tl) => Some((hd, tl)) // error // error // error +-- [E006] Not Found Error: tests/explicit-nulls/neg/i7883.scala:8:30 --------------------------------------------------- +8 | case r(hd, tl) => Some((hd, tl)) // error // error // error | ^^ | Not found: hd | | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/explicit-nulls/neg/i7883.scala:6:34 --------------------------------------------------- -6 | case r(hd, tl) => Some((hd, tl)) // error // error // error +-- [E006] Not Found Error: tests/explicit-nulls/neg/i7883.scala:8:34 --------------------------------------------------- +8 | case r(hd, tl) => Some((hd, tl)) // error // error // error | ^^ | Not found: tl | diff --git a/tests/explicit-nulls/neg/i7883.scala b/tests/explicit-nulls/neg/i7883.scala index 7938c92dce1e..10d2a6231dca 100644 --- a/tests/explicit-nulls/neg/i7883.scala +++ b/tests/explicit-nulls/neg/i7883.scala @@ -1,3 +1,5 @@ +//> using options -Yno-flexible-types + import scala.util.matching.Regex object Test extends App { diff --git a/tests/explicit-nulls/neg/interop-enum-src/S.scala b/tests/explicit-nulls/neg/interop-enum-src/S.scala deleted file mode 100644 index 99e92cedc68d..000000000000 --- a/tests/explicit-nulls/neg/interop-enum-src/S.scala +++ /dev/null @@ -1,6 +0,0 @@ -// Verify that enum values aren't nullified. - -class S { - val p: Planet = Planet.MARS // ok: accessing static member - val p2: Planet = p.next() // error: expected Planet but got Planet|Null -} diff --git a/tests/explicit-nulls/neg/interop-propagate.scala b/tests/explicit-nulls/neg/interop-propagate.scala deleted file mode 100644 index 6af7ee182cac..000000000000 --- a/tests/explicit-nulls/neg/interop-propagate.scala +++ /dev/null @@ -1,10 +0,0 @@ - class Foo { - import java.util.ArrayList - - // Test that the nullability is propagated to nested containers. - val ll = new ArrayList[ArrayList[ArrayList[String]]] - val level1: ArrayList[ArrayList[String]] = ll.get(0) // error - val level2: ArrayList[String] = ll.get(0).get(0) // error - val level3: String = ll.get(0).get(0).get(0) // error - val ok: String = ll.get(0).get(0).get(0) // error -} diff --git a/tests/explicit-nulls/neg/interop-return.scala b/tests/explicit-nulls/neg/interop-return.scala index 1d6df4da93bc..422d37882179 100644 --- a/tests/explicit-nulls/neg/interop-return.scala +++ b/tests/explicit-nulls/neg/interop-return.scala @@ -1,3 +1,5 @@ +//> using options -Yno-flexible-types + // Test that the return type of Java methods as well as the type of Java fields is marked as nullable. class Foo { diff --git a/tests/explicit-nulls/neg/notnull/S.scala b/tests/explicit-nulls/neg/notnull/S.scala index eada60eea6e7..a10bdaabc77c 100644 --- a/tests/explicit-nulls/neg/notnull/S.scala +++ b/tests/explicit-nulls/neg/notnull/S.scala @@ -1,3 +1,5 @@ +//> using options -Yno-flexible-types + // Test that NotNull annotations not in the list are not working in Java files. class S { diff --git a/tests/explicit-nulls/pos/interop-applied-types/J.java b/tests/explicit-nulls/pos/interop-applied-types/J.java new file mode 100644 index 000000000000..c85a921a81b9 --- /dev/null +++ b/tests/explicit-nulls/pos/interop-applied-types/J.java @@ -0,0 +1,3 @@ +public class J { + public J j = this; +} \ No newline at end of file diff --git a/tests/explicit-nulls/pos/interop-applied-types/S.scala b/tests/explicit-nulls/pos/interop-applied-types/S.scala new file mode 100644 index 000000000000..8ff50ab63840 --- /dev/null +++ b/tests/explicit-nulls/pos/interop-applied-types/S.scala @@ -0,0 +1,14 @@ +def test1[T](x: J[T]): J[T] = + x match { + case y: J[_] => y + } + +def test2[T](x: J[T]): J[T] = + x match { + case y: J[_] => y.j + } + +def test3[T](x: J[T]): J[T] = + x.j match { + case y: J[_] => y.j + } \ No newline at end of file diff --git a/tests/explicit-nulls/pos/interop-constructor-src/S.scala b/tests/explicit-nulls/pos/interop-constructor-src/S.scala index 3defd73f3945..be87b6052699 100644 --- a/tests/explicit-nulls/pos/interop-constructor-src/S.scala +++ b/tests/explicit-nulls/pos/interop-constructor-src/S.scala @@ -3,4 +3,5 @@ class S { val x1: J = new J("hello") val x2: J = new J(null) val x3: J = new J(null, null, null) + val x4: J = new J("hello", null, "world") } diff --git a/tests/explicit-nulls/pos/interop-enum-src/Planet.java b/tests/explicit-nulls/pos/interop-enum-src/Planet.java deleted file mode 100644 index 287aed6aecc5..000000000000 --- a/tests/explicit-nulls/pos/interop-enum-src/Planet.java +++ /dev/null @@ -1,19 +0,0 @@ -public enum Planet { - MERCURY (3.303e+23, 2.4397e6), - VENUS (4.869e+24, 6.0518e6), - EARTH (5.976e+24, 6.37814e6), - MARS (6.421e+23, 3.3972e6), - JUPITER (1.9e+27, 7.1492e7), - SATURN (5.688e+26, 6.0268e7), - URANUS (8.686e+25, 2.5559e7), - NEPTUNE (1.024e+26, 2.4746e7); - - private final double mass; // in kilograms - private final double radius; // in meters - Planet(double mass, double radius) { - this.mass = mass; - this.radius = radius; - } - private double mass() { return mass; } - private double radius() { return radius; } -} diff --git a/tests/explicit-nulls/pos/interop-generics/J.java b/tests/explicit-nulls/pos/interop-generics/J.java deleted file mode 100644 index 4bbdbd4cf319..000000000000 --- a/tests/explicit-nulls/pos/interop-generics/J.java +++ /dev/null @@ -1,13 +0,0 @@ - -class I {} - -class J { - I foo(T x) { - return new I(); - } - - I[] bar(T x) { - Object[] r = new Object[]{new I()}; - return (I[]) r; - } -} diff --git a/tests/explicit-nulls/pos/interop-generics/S.scala b/tests/explicit-nulls/pos/interop-generics/S.scala deleted file mode 100644 index 10a0572b0edf..000000000000 --- a/tests/explicit-nulls/pos/interop-generics/S.scala +++ /dev/null @@ -1,6 +0,0 @@ -class S { - val j = new J() - // Check that the inside of a Java generic isn't nullified - val x: I[String] | Null = j.foo("hello") - val y: Array[I[String] | Null] | Null = j.bar[String](null) -} diff --git a/tests/explicit-nulls/pos/interop-nn-src/S.scala b/tests/explicit-nulls/pos/interop-nn-src/S.scala index 6250c4c3c961..3f6cddb4731b 100644 --- a/tests/explicit-nulls/pos/interop-nn-src/S.scala +++ b/tests/explicit-nulls/pos/interop-nn-src/S.scala @@ -1,7 +1,7 @@ class S { val j = new J() - // Test that the `nn` extension method can be used to strip away - // nullability from a type. + + // Test that the `nn` extension method should work with flexible types. val s: String = j.foo.nn val a: Array[String | Null] = j.bar.nn diff --git a/tests/explicit-nulls/pos/interop-ortype-src/J.java b/tests/explicit-nulls/pos/interop-ortype-src/J.java new file mode 100644 index 000000000000..b0d767bccf3e --- /dev/null +++ b/tests/explicit-nulls/pos/interop-ortype-src/J.java @@ -0,0 +1,3 @@ +class J { + public static T foo(T t) { return null; } +} diff --git a/tests/explicit-nulls/pos/interop-ortype-src/S.scala b/tests/explicit-nulls/pos/interop-ortype-src/S.scala new file mode 100644 index 000000000000..8576ee0895ed --- /dev/null +++ b/tests/explicit-nulls/pos/interop-ortype-src/S.scala @@ -0,0 +1,7 @@ +// Tests that member finding works on (FlexibleType(T) | S) +class S { + def foo(a: J | String) = (a match { + case x: J => J.foo(x: J) + case y: String => "" + }).asInstanceOf[J] +} diff --git a/tests/explicit-nulls/pos/interop-poly-src/S.scala b/tests/explicit-nulls/pos/interop-poly-src/S.scala index 1fea277efe90..8aed9e99b689 100644 --- a/tests/explicit-nulls/pos/interop-poly-src/S.scala +++ b/tests/explicit-nulls/pos/interop-poly-src/S.scala @@ -9,12 +9,29 @@ class Test { // because JavaCat, being a Java class, _already_ nullifies its // fields. val jc: JavaCat[String]|Null = J.getJavaCat[String]() + val jc2: JavaCat[String] = J.getJavaCat[String]() // ScalaCat is Scala-defined, so we need the inner |Null. val sc: ScalaCat[String|Null]|Null = J.getScalaCat[String]() + val sc2: ScalaCat[String]|Null = J.getScalaCat[String]() + val sc3: ScalaCat[String|Null] = J.getScalaCat[String]() + val sc4: ScalaCat[String] = J.getScalaCat[String]() import java.util.List val las: List[Array[String|Null]]|Null = J.getListOfStringArray() + val las2: List[Array[String|Null]] = J.getListOfStringArray() + val las3: List[Array[String]]|Null = J.getListOfStringArray() + val las4: List[Array[String]] = J.getListOfStringArray() val als: Array[List[String]|Null]|Null = J.getArrayOfStringList() + val als2: Array[List[String]|Null] = J.getArrayOfStringList() + val als3: Array[List[String]]|Null = J.getArrayOfStringList() + val als4: Array[List[String]] = J.getArrayOfStringList() val css: List[Array[List[Array[String|Null]]|Null]]|Null = J.getComplexStrings() + val css2: List[Array[List[Array[String]]|Null]]|Null = J.getComplexStrings() + val css3: List[Array[List[Array[String|Null]]]]|Null = J.getComplexStrings() + val css4: List[Array[List[Array[String|Null]]|Null]] = J.getComplexStrings() + val css5: List[Array[List[Array[String|Null]]]] = J.getComplexStrings() + val css6: List[Array[List[Array[String]]]]|Null = J.getComplexStrings() + val css7: List[Array[List[Array[String]]|Null]] = J.getComplexStrings() + val css8: List[Array[List[Array[String]]]] = J.getComplexStrings() } diff --git a/tests/explicit-nulls/pos/interop-static-src/S.scala b/tests/explicit-nulls/pos/interop-static-src/S.scala index 3db9c3f6d281..7e0e4a34898e 100644 --- a/tests/explicit-nulls/pos/interop-static-src/S.scala +++ b/tests/explicit-nulls/pos/interop-static-src/S.scala @@ -1,5 +1,7 @@ class S { // Java static methods are also nullified val x: Int = J.foo(null) + val x2: Int = J.foo("hello") val y: String | Null = J.bar(0) + val y2: String = J.bar(0) } diff --git a/tests/explicit-nulls/pos/match-with-applied-types.scala.scala b/tests/explicit-nulls/pos/match-with-applied-types.scala.scala new file mode 100644 index 000000000000..7b9886ca60ed --- /dev/null +++ b/tests/explicit-nulls/pos/match-with-applied-types.scala.scala @@ -0,0 +1,7 @@ +class A + +def test = + val xs: java.util.LinkedHashMap[String, A | List[A]] = ??? + xs.get("a") match + case a: A => ??? + case as: List[A] => ??? \ No newline at end of file diff --git a/tests/explicit-nulls/pos/sam-parameter-javadefined/injava.java b/tests/explicit-nulls/pos/sam-parameter-javadefined/injava.java new file mode 100644 index 000000000000..28925b3c492a --- /dev/null +++ b/tests/explicit-nulls/pos/sam-parameter-javadefined/injava.java @@ -0,0 +1,6 @@ +class injava { + static void overloaded(Runnable r) {} + static void overloaded(int i) {} + + static void notoverloaded(Runnable r) {} +} diff --git a/tests/explicit-nulls/pos/sam-parameter-javadefined/sam-test.scala b/tests/explicit-nulls/pos/sam-parameter-javadefined/sam-test.scala new file mode 100644 index 000000000000..d3573f590713 --- /dev/null +++ b/tests/explicit-nulls/pos/sam-parameter-javadefined/sam-test.scala @@ -0,0 +1,23 @@ +def foo = { + def unit: Unit = () + + injava.overloaded({ () => unit } : Runnable ) + injava.overloaded({ () => unit } ) + + injava.notoverloaded({ () => unit } : Runnable ) + injava.notoverloaded({ () => unit } ) + + val list = new java.util.Vector[Int]() + java.util.Collections.sort[Int](list, { (a,b) => a - b } : java.util.Comparator[Int] ) + java.util.Collections.sort[Int](list, { (a,b) => a - b }) + + new Thread({ () => unit } : Runnable ) + new Thread({ () => unit } ) + + // See cats.effect.kernel.AsyncPlatform + val cf = new java.util.concurrent.CompletableFuture[String] + cf.handle[Unit]({ + case (string, null) => unit + case (string, throwable) => unit + }) +} diff --git a/tests/explicit-nulls/pos/widen-nullable-union.scala b/tests/explicit-nulls/pos/widen-nullable-union.scala index 9ffa767b84e5..f87b61f781ae 100644 --- a/tests/explicit-nulls/pos/widen-nullable-union.scala +++ b/tests/explicit-nulls/pos/widen-nullable-union.scala @@ -39,4 +39,16 @@ class Test { val y = x val _: (A & B) | Null = y } + + def test1(s: String): String = + val ss = if !s.isEmpty() then s.trim() else s + ss + "!" + + def test2(s: String): String = + val ss = if !s.isEmpty() then s.trim().nn else s + ss + "!" + + def test3(s: String): String = + val ss: String = if !s.isEmpty() then s.trim().nn else s + ss + "!" } diff --git a/tests/explicit-nulls/unsafe-common/unsafe-java-varargs-src/S.scala b/tests/explicit-nulls/unsafe-common/unsafe-java-varargs-src/S.scala index e27b0dcaacbf..67fa583a7b66 100644 --- a/tests/explicit-nulls/unsafe-common/unsafe-java-varargs-src/S.scala +++ b/tests/explicit-nulls/unsafe-common/unsafe-java-varargs-src/S.scala @@ -12,8 +12,8 @@ class S { val arg3: Array[String] | Null = ??? val arg4: Array[String | Null] | Null = ??? - j.foo(arg1: _*) - j.foo(arg2: _*) - j.foo(arg3: _*) // error - j.foo(arg4: _*) // error + j.foo(arg1*) + j.foo(arg2*) + j.foo(arg3*) // error + j.foo(arg4*) // error } \ No newline at end of file diff --git a/tests/neg-deep-subtype/interop-polytypes.scala b/tests/neg-deep-subtype/interop-polytypes.scala index 90922b63f7d0..987e4720bf13 100644 --- a/tests/neg-deep-subtype/interop-polytypes.scala +++ b/tests/neg-deep-subtype/interop-polytypes.scala @@ -1,4 +1,4 @@ -//> using options -Yexplicit-nulls +//> using options -Yexplicit-nulls -Yno-flexible-types class Foo { import java.util.ArrayList From b006ef9ec0452a355e94b3bbe472ac8d217f1ed0 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 9 Apr 2024 18:18:41 +0200 Subject: [PATCH 082/465] Add annotations in parameters for exports Co-authored-by: Jan-Pieter van den Heuvel Co-authored-by: Wessel W. Bakker --- compiler/src/dotty/tools/dotc/typer/Namer.scala | 11 +++++++++++ tests/neg/i20127.check | 8 ++++++++ tests/neg/i20127.scala | 14 ++++++++++++++ 3 files changed, 33 insertions(+) create mode 100644 tests/neg/i20127.check create mode 100644 tests/neg/i20127.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 15d7885776c5..4831c49f91bb 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1255,6 +1255,8 @@ class Namer { typer: Typer => newSymbol(cls, forwarderName, mbrFlags, mbrInfo, coord = span) forwarder.info = avoidPrivateLeaks(forwarder) + + // Add annotations at the member level forwarder.addAnnotations(sym.annotations.filterConserve { annot => annot.symbol != defn.BodyAnnot && annot.symbol != defn.TailrecAnnot @@ -1290,6 +1292,15 @@ class Namer { typer: Typer => foreachDefaultGetterOf(sym.asTerm, getter => addForwarder( getter.name.asTermName, getter.asSeenFrom(path.tpe), span)) + + // adding annotations at the parameter level + // TODO: This probably needs to be filtered to avoid adding some annotation + // such as MacroAnnotations + if sym.is(Method) then + for (orig, forwarded) <- sym.paramSymss.lazyZip(forwarder.paramSymss) + (origParameter, exportedParameter) <- orig.lazyZip(forwarded) + do + exportedParameter.addAnnotations(origParameter.annotations) end addForwarder def addForwardersNamed(name: TermName, alias: TermName, span: Span): Unit = diff --git a/tests/neg/i20127.check b/tests/neg/i20127.check new file mode 100644 index 000000000000..933dd0437eb5 --- /dev/null +++ b/tests/neg/i20127.check @@ -0,0 +1,8 @@ +-- [E172] Type Error: tests/neg/i20127.scala:13:9 ---------------------------------------------------------------------- +13 | Foo.foo // error + | ^ + | foo! +-- [E172] Type Error: tests/neg/i20127.scala:14:14 --------------------------------------------------------------------- +14 | FooClone.foo // error + | ^ + | foo! diff --git a/tests/neg/i20127.scala b/tests/neg/i20127.scala new file mode 100644 index 000000000000..a21e10a13e75 --- /dev/null +++ b/tests/neg/i20127.scala @@ -0,0 +1,14 @@ +import scala.annotation.* + +trait X + +object Foo: + def foo(using @implicitNotFound("foo!") x: X) = "foo" + +object FooClone: + export Foo.foo + +object Main: + val n = 10 + Foo.foo // error + FooClone.foo // error \ No newline at end of file From 98cbe060bc5cdaf241ebb26fbb57ccf2e94d50c3 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Mon, 8 Apr 2024 17:17:16 +0200 Subject: [PATCH 083/465] Drop retained elements and keep the annotation --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 3 ++- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- tests/neg-custom-args/captures/byname.check | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index b0ad8719ccfb..deeb474f018a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -449,7 +449,8 @@ extension (tp: AnnotatedType) class CleanupRetains(using Context) extends TypeMap: def apply(tp: Type): Type = tp match - case RetainingType(tp, _) => tp + case AnnotatedType(tp, annot) if annot.symbol == defn.RetainsAnnot || annot.symbol == defn.RetainsByNameAnnot => + RetainingType(tp, Nil, byName = annot.symbol == defn.RetainsByNameAnnot) case _ => mapOver(tp) /** An extractor for `caps.reachCapability(ref)`, which is used to express a reach diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index b9e25a84fc38..9ab41859f170 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -454,7 +454,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case _ => false def signatureChanges = - (tree.tpt.hasRememberedType || tree.tpt.isInstanceOf[InferredTypeTree]) && !sym.isConstructor || paramSignatureChanges + tree.tpt.hasRememberedType && !sym.isConstructor || paramSignatureChanges // Replace an existing symbol info with inferred types where capture sets of // TypeParamRefs and TermParamRefs put in correspondence by BiTypeMaps with the diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index c54fe7d4208e..e06a3a1f8268 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -9,7 +9,7 @@ | Found: (x$0: Int) ->{cap2} Int | Required: (x$0: Int) -> Int | - | Note that the expected type Int -> Int + | Note that the expected type Int ->{} Int | is the previously inferred result type of method test | which is also the type seen in separately compiled sources. | The new inferred type (x$0: Int) ->{cap2} Int From dd9831b25af3d69982ce8c28906872239538e57c Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 10 Apr 2024 00:12:17 +0200 Subject: [PATCH 084/465] Add tests for #20071 and #20136 Both observe different behaviours match type reductions depending on whether they are compiled together or separately. They both compile only with separate compilation. --- tests/neg/{i20071.scala => i20071a.scala} | 0 tests/neg/i20071b/A_1.scala | 13 +++++++++++++ tests/neg/i20071b/B_2.scala | 8 ++++++++ tests/pos/i20136a.scala | 14 ++++++++++++++ tests/pos/i20136b/A_1.scala | 8 ++++++++ tests/pos/i20136b/B_2.scala | 8 ++++++++ 6 files changed, 51 insertions(+) rename tests/neg/{i20071.scala => i20071a.scala} (100%) create mode 100644 tests/neg/i20071b/A_1.scala create mode 100644 tests/neg/i20071b/B_2.scala create mode 100644 tests/pos/i20136a.scala create mode 100644 tests/pos/i20136b/A_1.scala create mode 100644 tests/pos/i20136b/B_2.scala diff --git a/tests/neg/i20071.scala b/tests/neg/i20071a.scala similarity index 100% rename from tests/neg/i20071.scala rename to tests/neg/i20071a.scala diff --git a/tests/neg/i20071b/A_1.scala b/tests/neg/i20071b/A_1.scala new file mode 100644 index 000000000000..ea3aa97c6a6f --- /dev/null +++ b/tests/neg/i20071b/A_1.scala @@ -0,0 +1,13 @@ + +trait Scope +object Scope: + given i: Int = ??? + +type ReferencesScope[S] >: Int <: Int + +type ScopeToInt[Why] = Why match + case Scope => Int + +def foo[T](using d: ReferencesScope[T]): Any = ??? + +def bar[T](using d: ScopeToInt[T]): Any = ??? diff --git a/tests/neg/i20071b/B_2.scala b/tests/neg/i20071b/B_2.scala new file mode 100644 index 000000000000..0b5169cf901c --- /dev/null +++ b/tests/neg/i20071b/B_2.scala @@ -0,0 +1,8 @@ + +def test: Unit = + foo[Scope] // ok + bar[Scope] // error + + import Scope.i + bar[Scope] // ok + diff --git a/tests/pos/i20136a.scala b/tests/pos/i20136a.scala new file mode 100644 index 000000000000..5378119f14d2 --- /dev/null +++ b/tests/pos/i20136a.scala @@ -0,0 +1,14 @@ + +trait Expr: + type Value +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[E <: Expr] = E match + case Expr.Of[v] => v + +trait TC[E <: Expr]: + type Elem = Expr.ExtractValue[E] +class BIExpr extends Expr: + type Value = BigInt +class Foo extends TC[BIExpr]: + val v: Elem = 0 diff --git a/tests/pos/i20136b/A_1.scala b/tests/pos/i20136b/A_1.scala new file mode 100644 index 000000000000..7c8dc3ebbf52 --- /dev/null +++ b/tests/pos/i20136b/A_1.scala @@ -0,0 +1,8 @@ +package a + +trait Expr: + type Value +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[E <: Expr] = E match + case Expr.Of[v] => v diff --git a/tests/pos/i20136b/B_2.scala b/tests/pos/i20136b/B_2.scala new file mode 100644 index 000000000000..54a3da158f89 --- /dev/null +++ b/tests/pos/i20136b/B_2.scala @@ -0,0 +1,8 @@ +package a + +trait TC[E <: Expr]: + type Elem = Expr.ExtractValue[E] +class BIExpr extends Expr: + type Value = BigInt +class Foo extends TC[BIExpr]: + val v: Elem = 0 From d421f88a64d090493c323626550f93632d66f534 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 10 Apr 2024 00:23:44 +0200 Subject: [PATCH 085/465] Do not flag match types as `Deferred` This already wasn't the case for unpickled match types, which caused varying results for `ImplicitRunInfo#isAnchor`, by not reaching the `isMatchAlias` condition. Ensures both #20071 and #20136 each have the same result, when compiled with a classpath dependency as when merged. Note that they both still fail (20071 compiles but shouldn't), but at least do so consistently. Also update TreeUnpickler MATCHtpt doc to align with changes from #19871 Co-authored-by: Guillaume Martres --- compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala | 2 -- compiler/src/dotty/tools/dotc/typer/Namer.scala | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index a75cc6c666d0..25e5aa4fa040 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1542,8 +1542,6 @@ class TreeUnpickler(reader: TastyReader, // as the reduction of the match type definition! // // We also override the type, as that's what Typer does. - // The difference here is that a match type that reduces to a non-match type - // makes the TypeRef for that definition will have a TypeAlias info instead of a MatchAlias. tpt.overwriteType(tpt.tpe.normalized) tpt case TYPEBOUNDStpt => diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 15d7885776c5..6586eba9b933 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -268,8 +268,8 @@ class Namer { typer: Typer => else if flags.isAllOf(EnumValue) && ctx.owner.isStaticOwner then flags |= JavaStatic case tree: TypeDef => def analyzeRHS(rhs: Tree): Unit = rhs match - case _: TypeBoundsTree | _: MatchTypeTree => - flags |= Deferred // Typedefs with Match rhs classify as abstract + case _: TypeBoundsTree => + flags |= Deferred case LambdaTypeTree(_, body) => analyzeRHS(body) case _ => From c794eab1bf161c8d97381f9e793b735b5d01aa92 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 10 Apr 2024 00:40:04 +0200 Subject: [PATCH 086/465] Normalize types in `liftToAnchors` instead of in `collectParts` Fixes #20136 Co-authored-by: Guillaume Martres --- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5b233bd98730..5ffc81744d85 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -636,7 +636,7 @@ trait ImplicitRunInfo: else if implicitScopeCache.contains(t) then parts += t else partSeen += t - t.dealias.normalized match + t.dealias match case t: TypeRef => if isAnchor(t.symbol) then parts += t @@ -817,7 +817,7 @@ trait ImplicitRunInfo: else AndType.make(apply(lo), apply(hi)) case u => apply(u) - def apply(t: Type) = t.dealias match + def apply(t: Type) = t.dealias.normalized match case t: TypeRef => if t.symbol.isClass || isAnchor(t.symbol) then t else applyToUnderlying(t) case t: TypeVar => apply(t.underlying) From adf089bd38bd885983e4fc3506ec5f0aef796dd2 Mon Sep 17 00:00:00 2001 From: Lucas Nouguier Date: Wed, 10 Apr 2024 08:42:00 +0200 Subject: [PATCH 087/465] Fix active param index for empty param lists (#20142) Fixes #19969 with @mbovel @rochala --------- Co-authored-by: Lucas Nouguier --- .../dotty/tools/dotc/util/Signatures.scala | 7 ++++-- .../pc/base/BaseSignatureHelpSuite.scala | 2 +- .../signaturehelp/SignatureHelpSuite.scala | 25 +++++++++++++++++++ 3 files changed, 31 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 736633e0f6a7..3f7d7dd39531 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -196,7 +196,8 @@ object Signatures { fun: tpd.Tree, isTypeApply: Boolean = false )(using Context): (Int, Int, List[Signature]) = - def treeQualifier(tree: tpd.Tree): tpd.Tree = tree match + def treeQualifier(tree: tpd.Tree): tpd.Tree = + tree match case Apply(qual, _) => treeQualifier(qual) case TypeApply(qual, _) => treeQualifier(qual) case AppliedTypeTree(qual, _) => treeQualifier(qual) @@ -247,7 +248,9 @@ object Signatures { val alternativeSignatures = alternativesWithTypes .flatMap(toApplySignature(_, findOutermostCurriedApply(untpdPath), safeParamssListIndex)) - val finalParamIndex = currentParamsIndex + previousArgs + val finalParamIndex = + if currentParamsIndex == -1 then -1 + else previousArgs + currentParamsIndex (finalParamIndex, alternativeIndex, alternativeSignatures) else (0, 0, Nil) diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala index ca647502fabf..5f73b108e4de 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala @@ -43,7 +43,7 @@ abstract class BaseSignatureHelpSuite extends BasePCSuite: out .append(signature.getLabel) .append("\n") - if (result.getActiveSignature == i && result.getActiveParameter != null && signature.getParameters.size() > 0) { + if (result.getActiveSignature == i && result.getActiveParameter != null && result.getActiveParameter() >= 0 && signature.getParameters.size() > 0) { val param = signature.getParameters.get(result.getActiveParameter) val label = param.getLabel.getLeft() /* We need to find the label of the active parameter and show ^ at that spot diff --git a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala index 9e223cb094e3..2b458ced9683 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala @@ -1533,3 +1533,28 @@ class SignatureHelpSuite extends BaseSignatureHelpSuite: |foo(i: Boolean, s: String)(b: Int): Unit |""".stripMargin ) + + @Test def `proper-param-empty-list` = + check( + """ + |object x { + | def foo[K, V](): Unit = ??? + | foo(@@) + |} + |""".stripMargin, + "foo[K, V](): Unit" + ) + + @Test def `proper-param-list-after-param-empty-list` = + check( + """ + |object x { + | def foo[K, V]()(x: Int): Unit = ??? + | foo()(@@) + |} + |""".stripMargin, + """ + |foo[K, V]()(x: Int): Unit + | ^^^^^^ + """.stripMargin + ) From 530f77526d475d49f7ca394e21b70e8fb8c80389 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 10 Apr 2024 10:27:04 +0200 Subject: [PATCH 088/465] Update compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala Co-authored-by: Jan Chyb <48855024+jchyb@users.noreply.github.com> --- compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala index 7172e19184cb..b490d55bb43f 100644 --- a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala @@ -812,7 +812,7 @@ class InlineBytecodeTests extends DottyBytecodeTest { ) assert(instructions == expected, - "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + "`i was not properly inlined in `test`\n" + diffInstructions(instructions, expected)) } } From ab91dfe32e7608b9fe00a5294f237a51b6e4749d Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 9 Apr 2024 16:13:45 +0200 Subject: [PATCH 089/465] detect missing class file for classpath dependency in macro Previously, it was assumed that a missing class (that is valid in current run) during macro evaluation was due to the symbol being defined in the same project. If this condition is met, then compilation is suspended. This assumption breaks when the symbol comes from the classpath, but without a corresponding class file, leading a situation where the same file is always suspended, until it is the only one left, leading to the "cyclic macro dependencies" error. In this case we should assume that the class file will never become available because class path entries are supposed to be immutable. Therefore we should not suspend in this case. This commit therefore detects this situation. Instead of suspending the unit, the compiler aborts the macro expansion, reporting an error that the user will have to deal with - likely by changing the build definition --- .../src/dotty/tools/dotc/core/Symbols.scala | 4 ++ .../dotty/tools/dotc/quoted/Interpreter.scala | 58 +++++++++++++------ .../dotc/transform/MacroAnnotations.scala | 4 +- .../build.sbt | 9 +++ .../m/src/main/scala/a/A.scala | 3 + .../m/src/main/scala/b/B.scala | 26 +++++++++ .../m/src/main/scala/c/C.scala | 11 ++++ .../project/DottyInjectedPlugin.scala | 11 ++++ .../pipelining-scala-macro-splice-ok/test | 3 + .../a/src/main/scala/a/A.scala | 3 + .../pipelining-scala-macro-splice/a_alt/.keep | 0 .../pipelining-scala-macro-splice/build.sbt | 32 ++++++++++ .../m/src/main/scala/b/B.scala | 26 +++++++++ .../m/src/main/scala/c/C.scala | 11 ++++ .../pipelining-scala-macro-splice/m_alt/.keep | 0 .../project/DottyInjectedPlugin.scala | 11 ++++ .../pipelining-scala-macro-splice/test | 10 ++++ 17 files changed, 201 insertions(+), 21 deletions(-) create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice-ok/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/c/C.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice-ok/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice-ok/test create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice/a_alt/.keep create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/c/C.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice/m_alt/.keep create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-splice/test diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 78c736649605..7f6f13585efd 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -165,6 +165,10 @@ object Symbols extends SymUtils { final def isDefinedInSource(using Context): Boolean = span.exists && isValidInCurrentRun && associatedFileMatches(!_.isScalaBinary) + /** Is this symbol valid in the current run, but comes from the classpath? */ + final def isDefinedInBinary(using Context): Boolean = + isValidInCurrentRun && associatedFileMatches(_.isScalaBinary) + /** Is symbol valid in current run? */ final def isValidInCurrentRun(using Context): Boolean = (lastDenot.validFor.runId == ctx.runId || stillValid(lastDenot)) && diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala index bbf6c40cfc1b..e34d35065476 100644 --- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -166,8 +166,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): val inst = try loadModule(moduleClass) catch - case MissingClassDefinedInCurrentRun(sym) => - suspendOnMissing(sym, pos) + case MissingClassValidInCurrentRun(sym, origin) => + suspendOnMissing(sym, origin, pos) val clazz = inst.getClass val name = fn.name.asTermName val method = getMethod(clazz, name, paramsSig(fn)) @@ -213,8 +213,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): private def loadClass(name: String): Class[?] = try classLoader.loadClass(name) catch - case MissingClassDefinedInCurrentRun(sym) => - suspendOnMissing(sym, pos) + case MissingClassValidInCurrentRun(sym, origin) => + suspendOnMissing(sym, origin, pos) private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = @@ -223,8 +223,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): case _: NoSuchMethodException => val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" throw new StopInterpretation(msg, pos) - case MissingClassDefinedInCurrentRun(sym) => - suspendOnMissing(sym, pos) + case MissingClassValidInCurrentRun(sym, origin) => + suspendOnMissing(sym, origin, pos) } private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = @@ -242,8 +242,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): ex.getTargetException match { case ex: scala.quoted.runtime.StopMacroExpansion => throw ex - case MissingClassDefinedInCurrentRun(sym) => - suspendOnMissing(sym, pos) + case MissingClassValidInCurrentRun(sym, origin) => + suspendOnMissing(sym, origin, pos) case targetException => val sw = new StringWriter() sw.write("Exception occurred while executing macro expansion.\n") @@ -348,8 +348,11 @@ object Interpreter: } end Call - object MissingClassDefinedInCurrentRun { - def unapply(targetException: Throwable)(using Context): Option[Symbol] = { + enum ClassOrigin: + case Classpath, Source + + object MissingClassValidInCurrentRun { + def unapply(targetException: Throwable)(using Context): Option[(Symbol, ClassOrigin)] = { if !ctx.compilationUnit.isSuspendable then None else targetException match case _: NoClassDefFoundError | _: ClassNotFoundException => @@ -358,17 +361,34 @@ object Interpreter: else val className = message.replace('/', '.') val sym = - if className.endsWith(str.MODULE_SUFFIX) then staticRef(className.toTermName).symbol.moduleClass - else staticRef(className.toTypeName).symbol - // If the symbol does not a a position we assume that it came from the current run and it has an error - if sym.isDefinedInCurrentRun || (sym.exists && !sym.srcPos.span.exists) then Some(sym) - else None + if className.endsWith(str.MODULE_SUFFIX) then + staticRef(className.stripSuffix(str.MODULE_SUFFIX).toTermName).symbol.moduleClass + else + staticRef(className.toTypeName).symbol + if sym.isDefinedInBinary then + // i.e. the associated file is `.tasty`, if the macro classloader is not able to find the class, + // possibly it indicates that it comes from a pipeline-compiled dependency. + Some((sym, ClassOrigin.Classpath)) + else if sym.isDefinedInCurrentRun || (sym.exists && !sym.srcPos.span.exists) then + // If the symbol does not a a position we assume that it came from the current run and it has an error + Some((sym, ClassOrigin.Source)) + else + None case _ => None } } - def suspendOnMissing(sym: Symbol, pos: SrcPos)(using Context): Nothing = - if ctx.settings.YnoSuspendedUnits.value then - throw StopInterpretation(em"suspension triggered by a dependency on missing $sym not allowed with -Yno-suspended-units", pos) + def suspendOnMissing(sym: Symbol, origin: ClassOrigin, pos: SrcPos)(using Context): Nothing = + if origin == ClassOrigin.Classpath then + throw StopInterpretation( + em"""Macro code depends on ${sym.showLocated} found on the classpath, but could not be loaded while evaluating the macro. + | This is likely because class files could not be found in the classpath entry for the symbol. + | + | A possible cause is if the origin of this symbol was built with pipelined compilation; + | in which case, this problem may go away by disabling pipelining for that origin. + | + | $sym is defined in file ${sym.associatedFile}""", pos) + else if ctx.settings.YnoSuspendedUnits.value then + throw StopInterpretation(em"suspension triggered by a dependency on missing ${sym.showLocated} not allowed with -Yno-suspended-units", pos) else - ctx.compilationUnit.suspend(i"suspension triggered by a dependency on missing $sym") // this throws a SuspendException + ctx.compilationUnit.suspend(i"suspension triggered by a dependency on missing ${sym.showLocated}") // this throws a SuspendException diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala index 939497caf31c..c83e4d7b7819 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala @@ -107,8 +107,8 @@ class MacroAnnotations(phase: IdentityDenotTransformer): if !ctx.reporter.hasErrors then report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", annot.tree) List(tree) - case Interpreter.MissingClassDefinedInCurrentRun(sym) => - Interpreter.suspendOnMissing(sym, annot.tree) + case Interpreter.MissingClassValidInCurrentRun(sym, origin) => + Interpreter.suspendOnMissing(sym, origin, annot.tree) case NonFatal(ex) => val stack0 = ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.MacroAnnotations") val stack = stack0.take(1 + stack0.lastIndexWhere(_.getMethodName == "transform")) diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/build.sbt new file mode 100644 index 000000000000..3162b525fc06 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/build.sbt @@ -0,0 +1,9 @@ +ThisBuild / usePipelining := true + +// m defines a macro depending on b.B, it also tries to use the macro in the same project, +// which will succeed even though B.class is not available when running the macro, +// because compilation can suspend until B is available. +lazy val m = project.in(file("m")) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/a/A.scala new file mode 100644 index 000000000000..ded148f5f613 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/a/A.scala @@ -0,0 +1,3 @@ +package a + +class A(val i: Int) diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/b/B.scala new file mode 100644 index 000000000000..6b5337f96212 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/b/B.scala @@ -0,0 +1,26 @@ +package b + +import a.A +import scala.quoted.* + +object B { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + // this macro will cause a suspension in compilation of C.scala, because it calls + // transparentPower. This will try to invoke the macro but fail because A.class + // is not yet available until the run for A.scala completes. + + // see sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/b/B.scala + // for a corresponding implementation that uses a class from an upstream project + // instead, and fails because pipelining is turned on for the upstream project. + def impl(x: Double, n: A): Double = + if (n.i == 0) 1.0 + else if (n.i % 2 == 1) x * impl(x, A(n.i - 1)) + else impl(x * x, A(n.i / 2)) + + Expr(impl(x.valueOrError, A(n.valueOrError))) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/c/C.scala b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/c/C.scala new file mode 100644 index 000000000000..c88acf0c2b28 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/c/C.scala @@ -0,0 +1,11 @@ +package c + +import b.B + +object C { + @main def run = { + assert(B.transparentPower(2.0, 2) == 4.0) + assert(B.transparentPower(2.0, 3) == 8.0) + assert(B.transparentPower(2.0, 4) == 16.0) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..1c6c00400f04 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/test b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/test new file mode 100644 index 000000000000..78e8e230e0ef --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/test @@ -0,0 +1,3 @@ +# shows that it is ok to depend on a class, defined in the same project, +# in a macro implementation. Compilation will suspend at typer. +> m/run diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-splice/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..ded148f5f613 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/a/src/main/scala/a/A.scala @@ -0,0 +1,3 @@ +package a + +class A(val i: Int) diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/a_alt/.keep b/sbt-test/pipelining/pipelining-scala-macro-splice/a_alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-splice/build.sbt new file mode 100644 index 000000000000..91186af42ef3 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/build.sbt @@ -0,0 +1,32 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +// same as a, but does not use pipelining +lazy val a_alt = project.in(file("a_alt")) + .settings( + Compile / sources := (a / Compile / sources).value, + Compile / exportPipelining := false, + ) + + +// m defines a macro depending on a, it also tries to use the macro in the same project, +// which will fail because A.class is not available when running the macro, +// because the dependency on a is pipelined. +lazy val m = project.in(file("m")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) + +// same as m, but depends on a_alt, so it will compile +// because A.class will be available when running the macro. +lazy val m_alt = project.in(file("m_alt")) + .dependsOn(a_alt) + .settings( + Compile / sources := (m / Compile / sources).value, + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/b/B.scala new file mode 100644 index 000000000000..5da498a27355 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/b/B.scala @@ -0,0 +1,26 @@ +package b + +import a.A +import scala.quoted.* + +object B { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + // this macro is invoked during compilation of C.scala. When project a is pipelined + // This will fail because A.class will never be available, because the classpath entry + // is the early-output jar. The compiler detects this and aborts macro expansion with an error. + + // see sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/b/B.scala + // for a corresponding implementation that uses a class from the same project + // instead, but succeeds because it can suspend compilation until classes become available. + def impl(x: Double, n: A): Double = + if (n.i == 0) 1.0 + else if (n.i % 2 == 1) x * impl(x, A(n.i - 1)) + else impl(x * x, A(n.i / 2)) + + Expr(impl(x.valueOrError, A(n.valueOrError))) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/c/C.scala b/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/c/C.scala new file mode 100644 index 000000000000..c88acf0c2b28 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/c/C.scala @@ -0,0 +1,11 @@ +package c + +import b.B + +object C { + @main def run = { + assert(B.transparentPower(2.0, 2) == 4.0) + assert(B.transparentPower(2.0, 3) == 8.0) + assert(B.transparentPower(2.0, 4) == 16.0) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/m_alt/.keep b/sbt-test/pipelining/pipelining-scala-macro-splice/m_alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-splice/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..1c6c00400f04 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/test b/sbt-test/pipelining/pipelining-scala-macro-splice/test new file mode 100644 index 000000000000..db95a0ab56a8 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/test @@ -0,0 +1,10 @@ +# as described in build.sbt, this will fail to compile. +# m defines a macro, depending on a.A, defined in upstream project a +# however because m also tries to run the macro in the same project, +# a/A.class is not available yet, so a reflection error will occur. +# This is caught by the compiler and presents a pretty diagnostic to the user, +# suggesting to disable pipelining in the project defining A. +-> m/compile +# This will run, simulating a user following the suggestion to +# disable pipelining in project a. +> m_alt/run From ed9feccf9ae41ea4c965daba4966dec631e91769 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 10 Apr 2024 14:11:53 +0200 Subject: [PATCH 090/465] Add explanation to checkCaseClassInheritanceInvariant error msg (#20141) Closes #18552 which was actually not an error, see: https://github.com/scala/scala3/blob/73882c5b62b8cd96031ad975f7677949433a9f21/compiler/src/dotty/tools/dotc/typer/RefChecks.scala#L889-L893 --------- Co-authored-by: Anna Herlihy Co-authored-by: Natsu Kagami --- .../src/dotty/tools/dotc/typer/RefChecks.scala | 18 +++++++++++------- tests/neg/i18552.check | 13 +++++++++++++ tests/neg/i18552.scala | 9 +++++++++ 3 files changed, 33 insertions(+), 7 deletions(-) create mode 100644 tests/neg/i18552.check create mode 100644 tests/neg/i18552.scala diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 3a4c0dd24acb..e3d78e3c5707 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -891,11 +891,15 @@ object RefChecks { * can assume invariant refinement for case classes in `constrainPatternType`. */ def checkCaseClassInheritanceInvariant() = - for (caseCls <- clazz.info.baseClasses.tail.find(_.is(Case))) - for (baseCls <- caseCls.info.baseClasses.tail) - if (baseCls.typeParams.exists(_.paramVarianceSign != 0)) - for (problem <- variantInheritanceProblems(baseCls, caseCls, "non-variant", "case ")) - report.errorOrMigrationWarning(problem, clazz.srcPos, MigrationVersion.Scala2to3) + for + caseCls <- clazz.info.baseClasses.tail.find(_.is(Case)) + baseCls <- caseCls.info.baseClasses.tail + if baseCls.typeParams.exists(_.paramVarianceSign != 0) + problem <- variantInheritanceProblems(baseCls, caseCls, i"base $baseCls", "case ") + withExplain = problem.appendExplanation: + """Refining a basetype of a case class is not allowed. + |This is a limitation that enables better GADT constraints in case class patterns""".stripMargin + do report.errorOrMigrationWarning(withExplain, clazz.srcPos, MigrationVersion.Scala2to3) checkNoAbstractMembers() if (abstractErrors.isEmpty) checkNoAbstractDecls(clazz) @@ -924,7 +928,7 @@ object RefChecks { for { cls <- clazz.info.baseClasses.tail if cls.paramAccessors.nonEmpty && !mixins.contains(cls) - problem <- variantInheritanceProblems(cls, clazz.asClass.superClass, "parameterized", "super") + problem <- variantInheritanceProblems(cls, clazz.asClass.superClass, i"parameterized base $cls", "super") } report.error(problem, clazz.srcPos) } @@ -947,7 +951,7 @@ object RefChecks { if (combinedBT =:= thisBT) None // ok else Some( - em"""illegal inheritance: $clazz inherits conflicting instances of $baseStr base $baseCls. + em"""illegal inheritance: $clazz inherits conflicting instances of $baseStr. | | Direct basetype: $thisBT | Basetype via $middleStr$middle: $combinedBT""") diff --git a/tests/neg/i18552.check b/tests/neg/i18552.check new file mode 100644 index 000000000000..a7a04ed78b47 --- /dev/null +++ b/tests/neg/i18552.check @@ -0,0 +1,13 @@ +-- Error: tests/neg/i18552.scala:9:6 ----------------------------------------------------------------------------------- +9 |class MB(id:Int) extends MA(id) with M[B] // error + | ^ + | illegal inheritance: class MB inherits conflicting instances of base trait M. + | + | Direct basetype: M[B] + | Basetype via case class MA: M[A] + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Refining a basetype of a case class is not allowed. + | This is a limitation that enables better GADT constraints in case class patterns + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i18552.scala b/tests/neg/i18552.scala new file mode 100644 index 000000000000..29f928e1dcfa --- /dev/null +++ b/tests/neg/i18552.scala @@ -0,0 +1,9 @@ +//> using options -explain + +trait A +trait B extends A + +trait M[+T] + +case class MA(id:Int) extends M[A] +class MB(id:Int) extends MA(id) with M[B] // error From 4b711f584e37cd751b813d338a5f65ab09ba9fcb Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Wed, 10 Apr 2024 12:58:51 +0000 Subject: [PATCH 091/465] Optimize comparing types in mergeRefinedOrApplied --- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 4 +++- tests/pos/i19789.scala | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i19789.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index d88b61d41e2f..012464f71d9b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -249,7 +249,8 @@ object TypeOps: mergeRefinedOrApplied(tp1, tp21) & mergeRefinedOrApplied(tp1, tp22) case _ => fail - tp1 match { + if tp1 eq tp2 then tp1 + else tp1 match { case tp1 @ RefinedType(parent1, name1, rinfo1) => tp2 match { case RefinedType(parent2, `name1`, rinfo2) => @@ -273,6 +274,7 @@ object TypeOps: } case AndType(tp11, tp12) => mergeRefinedOrApplied(tp11, tp2) & mergeRefinedOrApplied(tp12, tp2) + case tp1: TypeParamRef if tp1 == tp2 => tp1 case _ => fail } } diff --git a/tests/pos/i19789.scala b/tests/pos/i19789.scala new file mode 100644 index 000000000000..24c3bdb1df8f --- /dev/null +++ b/tests/pos/i19789.scala @@ -0,0 +1,5 @@ +type Kinded[F[_]] = F[Any] | F[Nothing] + +def values[F[_]]: Vector[Kinded[F]] = ??? + +def mapValues[F[_], T](f: Kinded[F] => T): Vector[T] = values[F].map { case x => f(x) } From d0fae847e052731a9294ab4e417a0c11654735db Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 10 Apr 2024 16:03:04 +0200 Subject: [PATCH 092/465] Try to avoid timeouts in `scala2-library-from-tasty*.scala` Run these tests on their own to reduce the risk of timeouts. --- .../tools/dotc/BootstrappedOnlyCompilationTests.scala | 10 ++++++++++ .../scala2-library-from-tasty-jar.scala | 0 .../scala2-library-from-tasty.scala | 0 3 files changed, 10 insertions(+) rename tests/{run-with-compiler => run-custom-args}/scala2-library-from-tasty-jar.scala (100%) rename tests/{run-with-compiler => run-custom-args}/scala2-library-from-tasty.scala (100%) diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index 5e9a01a77ca7..a40c1ec1e5b2 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -138,6 +138,16 @@ class BootstrappedOnlyCompilationTests { aggregateTests(tests*).checkRuns() } + @Test def runScala2LibraryFromTasty: Unit = { + implicit val testGroup: TestGroup = TestGroup("runScala2LibraryFromTasty") + // These tests recompile the entire scala2-library from TASTy, + // they are resource intensive and should not run alongside other tests to avoid timeouts + aggregateTests( + compileFile("tests/run-custom-args/scala2-library-from-tasty-jar.scala", withCompilerOptions), + compileFile("tests/run-custom-args/scala2-library-from-tasty.scala", withCompilerOptions), + ).limitThreads(2).checkRuns() // TODO reduce to limitThreads(1) if it still causes problems, this would be around 50% slower based on local benchmarking + } + @Test def runBootstrappedOnly: Unit = { implicit val testGroup: TestGroup = TestGroup("runBootstrappedOnly") aggregateTests( diff --git a/tests/run-with-compiler/scala2-library-from-tasty-jar.scala b/tests/run-custom-args/scala2-library-from-tasty-jar.scala similarity index 100% rename from tests/run-with-compiler/scala2-library-from-tasty-jar.scala rename to tests/run-custom-args/scala2-library-from-tasty-jar.scala diff --git a/tests/run-with-compiler/scala2-library-from-tasty.scala b/tests/run-custom-args/scala2-library-from-tasty.scala similarity index 100% rename from tests/run-with-compiler/scala2-library-from-tasty.scala rename to tests/run-custom-args/scala2-library-from-tasty.scala From f4ff6e374aa4112b240177e810887fb9f5ecbde3 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 14 Feb 2024 09:16:23 +0100 Subject: [PATCH 093/465] Only allow erased parameters in erased definitions So far, we do not have any use case for them. We could enable them in a later version. The current implementation does not handle correctly the non-erased arguments to erased definitions. These should always be evaluated, but in some cases we can dorp them by mistake. --- .../tools/dotc/transform/PostTyper.scala | 7 ++++ tests/coverage/run/erased/test.scala | 2 +- .../coverage/run/erased/test.scoverage.check | 42 +++++++++---------- tests/neg/erased-1.scala | 17 ++------ tests/neg/erased-2.scala | 21 ++-------- tests/neg/erased-3.scala | 21 ++-------- tests/neg/erased-args-lifted.scala | 2 +- tests/neg/erased-params.scala | 12 ++++++ tests/pos/i7741.scala | 2 +- 9 files changed, 54 insertions(+), 72 deletions(-) create mode 100644 tests/neg/erased-params.scala diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index cc175fc5c222..60e4075e8674 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -553,7 +553,14 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => report.error("classes that extend MacroAnnotation must not be inner/local classes", sym.srcPos) private def checkErasedDef(tree: ValOrDefDef)(using Context): Unit = + def checkOnlyErasedParams(): Unit = tree match + case tree: DefDef => + for params <- tree.paramss; param <- params if !param.symbol.isType && !param.symbol.is(Erased) do + report.error("erased definition can only have erased parameters", param.srcPos) + case _ => + if tree.symbol.is(Erased, butNot = Macro) then + checkOnlyErasedParams() val tpe = tree.rhs.tpe if tpe.derivesFrom(defn.NothingClass) then report.error("`erased` definition cannot be implemented with en expression of type Nothing", tree.srcPos) diff --git a/tests/coverage/run/erased/test.scala b/tests/coverage/run/erased/test.scala index 15a067e9ed50..6645020cac80 100644 --- a/tests/coverage/run/erased/test.scala +++ b/tests/coverage/run/erased/test.scala @@ -2,7 +2,7 @@ import scala.language.experimental.erasedDefinitions erased def parameterless: String = "y" -erased def e(x: String): String = "x" +erased def e(erased x: String): String = "x" def foo(erased a: String)(b: String): String = println(s"foo(a)($b)") b diff --git a/tests/coverage/run/erased/test.scoverage.check b/tests/coverage/run/erased/test.scoverage.check index dedf5689c490..aed6338099aa 100644 --- a/tests/coverage/run/erased/test.scoverage.check +++ b/tests/coverage/run/erased/test.scoverage.check @@ -25,8 +25,8 @@ test$package Object .test$package foo -181 -203 +188 +210 7 println Apply @@ -42,8 +42,8 @@ test$package Object .test$package foo -189 -202 +196 +209 7 s Apply @@ -59,8 +59,8 @@ test$package Object .test$package foo -132 139 +146 6 foo DefDef @@ -76,8 +76,8 @@ test$package Object .test$package identity -245 -269 +252 +276 11 println Apply @@ -93,8 +93,8 @@ test$package Object .test$package identity -253 -268 +260 +275 11 s Apply @@ -110,8 +110,8 @@ test$package Object .test$package identity -209 -221 +216 +228 10 identity DefDef @@ -127,8 +127,8 @@ test$package Object .test$package Test -300 -323 +307 +330 16 foo Apply @@ -144,8 +144,8 @@ test$package Object .test$package Test -326 -342 +333 +349 17 foo Apply @@ -161,8 +161,8 @@ test$package Object .test$package Test -345 -374 +352 +381 18 foo Apply @@ -178,8 +178,8 @@ test$package Object .test$package Test -357 -373 +364 +380 18 identity Apply @@ -195,8 +195,8 @@ test$package Object .test$package Test -275 -289 +282 +296 15 Test DefDef diff --git a/tests/neg/erased-1.scala b/tests/neg/erased-1.scala index 62a1024e80f5..deaa2a6d750e 100644 --- a/tests/neg/erased-1.scala +++ b/tests/neg/erased-1.scala @@ -12,25 +12,14 @@ object Test { }) foo1(a) // OK foo2( // error - a // error - ) - foo3( // error - a + a // Ok ) a // error } - erased def foo2(a: Int): Int = { - foo0(a) // OK - foo1(a) // OK - foo2(a) // OK - foo3(a) // OK - a // OK - } - erased def foo3(erased a: Int): Int = { + erased def foo2(erased a: Int): Int = { foo0(a) // OK foo1(a) // OK foo2(a) // OK - foo3(a) // OK a // OK } -} \ No newline at end of file +} diff --git a/tests/neg/erased-2.scala b/tests/neg/erased-2.scala index 02e4b56e11ac..3b51d9a4a40b 100644 --- a/tests/neg/erased-2.scala +++ b/tests/neg/erased-2.scala @@ -8,39 +8,26 @@ object Test { ) foo1(u) // OK foo2( // error - u // error - ) - foo3( // error - u + u // Ok ) u // error u // error } - erased def foo2(a: Int): Int = { - foo0(u) // OK - foo1(u) // OK - foo2(u) // OK - foo3(u) // OK - u // warn - u // OK - } - erased def foo3(erased a: Int): Int = { + erased def foo2(erased a: Int): Int = { foo0(u) // OK foo1(u) // OK foo2(u) // OK - foo3(u) // OK u // warn u // OK } - erased val foo4: Int = { + erased val foo3: Int = { foo0(u) // OK foo1(u) // OK foo2(u) // OK - foo3(u) // OK u // warn u // OK } erased def u: Int = 42 -} \ No newline at end of file +} diff --git a/tests/neg/erased-3.scala b/tests/neg/erased-3.scala index 5c6a31860b11..7b33794791b6 100644 --- a/tests/neg/erased-3.scala +++ b/tests/neg/erased-3.scala @@ -8,40 +8,27 @@ object Test { ) foo1(u()) // OK foo2( // error - u() // error - ) - foo3( // error - u() + u() // Ok ) u() // error u() // error } - erased def foo2(a: Int): Int = { - foo0(u()) // OK - foo1(u()) // OK - foo2(u()) // OK - foo3(u()) // OK - u() // warn - u() // OK - } - erased def foo3(erased a: Int): Int = { + erased def foo2(erased a: Int): Int = { foo0(u()) // OK foo1(u()) // OK foo2(u()) // OK - foo3(u()) // OK u() // warn u() // OK } - erased val foo4: Int = { + erased val foo3: Int = { foo0(u()) // OK foo1(u()) // OK foo2(u()) // OK - foo3(u()) // OK println() u() // warn u() // OK } erased def u(): Int = 42 -} \ No newline at end of file +} diff --git a/tests/neg/erased-args-lifted.scala b/tests/neg/erased-args-lifted.scala index 2deee749ed3d..dfa7b74ee3d4 100644 --- a/tests/neg/erased-args-lifted.scala +++ b/tests/neg/erased-args-lifted.scala @@ -2,7 +2,7 @@ object Test { def foo(a: Int)(b: Int, c: Int) = 42 - erased def bar(i: Int): Int = { + erased def bar(erased i: Int): Int = { println(1) 42 } diff --git a/tests/neg/erased-params.scala b/tests/neg/erased-params.scala new file mode 100644 index 000000000000..2315b6bdf54d --- /dev/null +++ b/tests/neg/erased-params.scala @@ -0,0 +1,12 @@ +import scala.language.experimental.erasedDefinitions + +erased def test1(x: Int): Int = x // error +erased def test2(erased x: Int): Int = x +erased def test3(erased x: Int, erased y: Int): Int = x +erased def test4(erased x: Int, y: Int): Int = x // error +erased def test5(x: Int, erased y: Int): Int = y // error +erased def test6(x: Int, y: Int): Int = y // error // error +erased def test7(erased x: Int)(erased y: Int): Int = x +erased def test8(erased x: Int)(y: Int): Int = x // error +erased def test9(x: Int)(erased y: Int): Int = y // error +erased def test10(x: Int)(y: Int): Int = y // error // error diff --git a/tests/pos/i7741.scala b/tests/pos/i7741.scala index 237616d04d2a..af9912915cc0 100644 --- a/tests/pos/i7741.scala +++ b/tests/pos/i7741.scala @@ -4,7 +4,7 @@ class A1 { @native private def a: Unit } trait A2 { - erased def i(a: Int): Int + erased def i(erased a: Int): Int } trait A3 { erased val a: Int From c34ac1c9c4ad93c780105a3842de0f969b7923cb Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 10 Apr 2024 18:40:05 +0200 Subject: [PATCH 094/465] Make parameter types of synthesized context functions inferred type trees A non-sensical capture reference appeared in the type of a synthesized context function literal. We do clean out @retains annotations that can contain such references, but only for inferred type trees. The problem was that context function parameters were treated like explicitly given types before. Fixes #20135 --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- tests/pos/i20135.scala | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i20135.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 7c278505f38b..612bd22ef19d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3271,7 +3271,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val paramTypes = { val hasWildcard = formals.exists(_.existsPart(_.isInstanceOf[WildcardType], StopAt.Static)) if hasWildcard then formals.map(_ => untpd.TypeTree()) - else formals.map(formal => untpd.TypeTree(formal.loBound)) // about loBound, see tests/pos/i18649.scala + else formals.map(formal => untpd.InferredTypeTree(formal.loBound)) // about loBound, see tests/pos/i18649.scala } val erasedParams = pt match { diff --git a/tests/pos/i20135.scala b/tests/pos/i20135.scala new file mode 100644 index 000000000000..6143d642fbbb --- /dev/null +++ b/tests/pos/i20135.scala @@ -0,0 +1,11 @@ +import language.experimental.captureChecking + +class Network + +class Page(using nw: Network^): + def render(client: Page^{nw} ?-> Unit) = client(using this) + +def main(net: Network^) = + var page = Page(using net) + page.render(()) + From db529d9bff6757c4f2fe139de898fb1dc969a0bd Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 10 Apr 2024 18:43:06 +0200 Subject: [PATCH 095/465] Rewrite reaches tests to fix tasty-cc nightly failures The reaches.scala pos test failed the nightly with tasty cc since one line violated the new condition for reaches capabilities. The problem was not discoveed in normal testing since it used the `List` class, which is capture checked only in the nightly. We now make the relevant bits of `List` explicit in the test itself, so that it is always capture checked. The failing line got moved to neg. --- tests/neg-custom-args/captures/reaches.check | 5 +++++ tests/neg-custom-args/captures/reaches.scala | 18 ++++++++++++++++++ tests/pos-custom-args/captures/reaches.scala | 12 +++++++++++- 3 files changed, 34 insertions(+), 1 deletion(-) diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index ee628a212ce7..a1c5a56369e9 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -41,3 +41,8 @@ | Required: File^{id*} | | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/reaches.scala:77:5 ------------------------------------------------------------ +77 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * + | ^^^^^^ + | Reach capability cap and universal capability cap cannot both + | appear in the type [B](f: ((box A ->{ps*} A, box A ->{ps*} A)) => B): List[B] of this expression diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index 8ba7f60d6335..de5e4362cdf2 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -59,3 +59,21 @@ def attack2 = val leaked = usingFile[File^{id*}]: f => val f1: File^{id*} = id(f) // error f1 + +class List[+A]: + def head: A = ??? + def tail: List[A] = ??? + def map[B](f: A => B): List[B] = ??? + def nonEmpty: Boolean = ??? + +extension [A](x: A) def :: (xs: List[A]): List[A] = ??? + +object Nil extends List[Nothing] + +def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = + z => g(f(z)) + +def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = + ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * + + diff --git a/tests/pos-custom-args/captures/reaches.scala b/tests/pos-custom-args/captures/reaches.scala index 0f7df02e13b1..f17c25712c39 100644 --- a/tests/pos-custom-args/captures/reaches.scala +++ b/tests/pos-custom-args/captures/reaches.scala @@ -10,6 +10,16 @@ class Ref[T](init: T): def get: T = x def set(y: T) = { x = y } +class List[+A]: + def head: A = ??? + def tail: List[A] = ??? + def map[B](f: A -> B): List[B] = ??? + def nonEmpty: Boolean = ??? + +extension [A](x: A) def :: (xs: List[A]): List[A] = ??? + +object Nil extends List[Nothing] + def runAll(xs: List[Proc]): Unit = var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR while cur.nonEmpty do @@ -36,7 +46,7 @@ def compose2[A, B, C](f: A => B, g: B => C): A => C = z => g(f(z)) def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = - ps.map((x, y) => compose1(x, y)) + ps.map((x, y) => compose1(x, y)) // Does not work if map takes an impure function, see reaches in neg @annotation.capability class IO From cd313fb0dcc2cebaf6d49f16d5e37ed696c3a0d3 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 10 Apr 2024 14:13:27 +0200 Subject: [PATCH 096/465] Make error reporting resilient to exception thrown while reporting Previously the added test failed with `1 error reported` but no actual error message printed, because a stack overflow is thrown while reporting the original error. This is then caught and handled to emit a RecursionOverflow error, but that second error is non-sensical and non-sensical errors are only printed if `hasErrors` returns false. We fix this by deferring incrementing the error count (and therefore having `hasErrors` return true) until after having displayed the error. We also defer calling `markReported` otherwise the second error will also be suppressed. A similar change is necessary in our testing infrastructure to keep the error count is coherent. --- .../dotty/tools/dotc/reporting/Reporter.scala | 2 +- .../tools/dotc/reporting/TestReporter.scala | 2 +- tests/neg/mt-deskolemize.scala | 16 ++++++++++++++++ 3 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 tests/neg/mt-deskolemize.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index 22500cbbaa48..847b34bf236f 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -155,6 +155,7 @@ abstract class Reporter extends interfaces.ReporterResult { addUnreported(key, 1) case _ => if !isHidden(dia) then // avoid isHidden test for summarized warnings so that message is not forced + withMode(Mode.Printing)(doReport(dia)) dia match { case w: Warning => warnings = w :: warnings @@ -168,7 +169,6 @@ abstract class Reporter extends interfaces.ReporterResult { // match error if d is something else } markReported(dia) - withMode(Mode.Printing)(doReport(dia)) end issueUnconfigured def issueIfNotSuppressed(dia: Diagnostic)(using Context): Unit = diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala index a96a2765d56a..3b30742a8d4f 100644 --- a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala +++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala @@ -71,8 +71,8 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M } if dia.level >= WARNING then - _diagnosticBuf.append(dia) _consoleReporter.doReport(dia) + _diagnosticBuf.append(dia) printMessageAndPos(dia, extra) } } diff --git a/tests/neg/mt-deskolemize.scala b/tests/neg/mt-deskolemize.scala new file mode 100644 index 000000000000..0a58d5db7bc4 --- /dev/null +++ b/tests/neg/mt-deskolemize.scala @@ -0,0 +1,16 @@ +trait Expr: + type Value +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[F <: Expr] = F match + case Expr.Of[v] => v +import Expr.ExtractValue + +class SimpleLoop1 extends Expr: + type Value = ExtractValue[SimpleLoop2] + +class SimpleLoop2 extends Expr: + type Value = ExtractValue[SimpleLoop1] + +object Test1: + val x: ExtractValue[SimpleLoop1] = 1 // error From e7a1f7ba1504ba32b17ac0fbf36835b0fac629ae Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 10 Apr 2024 23:22:58 +0200 Subject: [PATCH 097/465] More explicit handling of exception in error reporting --- compiler/src/dotty/tools/dotc/reporting/Reporter.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index 847b34bf236f..237a3f166fe8 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -15,6 +15,7 @@ import java.io.{BufferedReader, PrintWriter} import scala.annotation.internal.sharable import scala.collection.mutable import core.Decorators.em +import core.handleRecursive object Reporter { /** Convert a SimpleReporter into a real Reporter */ @@ -155,7 +156,12 @@ abstract class Reporter extends interfaces.ReporterResult { addUnreported(key, 1) case _ => if !isHidden(dia) then // avoid isHidden test for summarized warnings so that message is not forced - withMode(Mode.Printing)(doReport(dia)) + try + withMode(Mode.Printing)(doReport(dia)) + catch case ex: Throwable => + // #20158: Don't increment the error count, otherwise we might suppress + // the RecursiveOverflow error and not print any error at all. + handleRecursive("error reporting", dia.message, ex) dia match { case w: Warning => warnings = w :: warnings From 1ac72fd8e04d7a42ee17397c3b5dc1765218f23c Mon Sep 17 00:00:00 2001 From: Jisoo Park Date: Thu, 11 Apr 2024 10:58:33 +0900 Subject: [PATCH 098/465] Remove duplicate comma from Matchable selector warning --- compiler/src/dotty/tools/dotc/reporting/messages.scala | 2 +- tests/pos-with-compiler-cc/dotc/reporting/messages.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 63a8ed1e21fe..5fc5b4ae66b0 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -924,7 +924,7 @@ class MatchableWarning(tp: Type, pattern: Boolean)(using Context) extends TypeMsg(MatchableWarningID) { def msg(using Context) = val kind = if pattern then "pattern selector" else "value" - i"""${kind} should be an instance of Matchable,, + i"""${kind} should be an instance of Matchable, |but it has unmatchable type $tp instead""" def explain(using Context) = diff --git a/tests/pos-with-compiler-cc/dotc/reporting/messages.scala b/tests/pos-with-compiler-cc/dotc/reporting/messages.scala index 142ac63af0f3..cd5c5f67ff41 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/messages.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/messages.scala @@ -873,7 +873,7 @@ class MatchableWarning(tp: Type, pattern: Boolean)(using DetachedContext) extends TypeMsg(MatchableWarningID) { def msg(using Context) = val kind = if pattern then "pattern selector" else "value" - i"""${kind} should be an instance of Matchable,, + i"""${kind} should be an instance of Matchable, |but it has unmatchable type $tp instead""" def explain(using Context) = From 43d33a008a233b691c0c6697908424ba35c3c876 Mon Sep 17 00:00:00 2001 From: Lucy Martin Date: Thu, 28 Mar 2024 10:33:20 +0000 Subject: [PATCH 099/465] Extra check to avoid converting block expressions on the rhs of a symbolic infix expression. Tests added for: * Original cast as per the ticket should not be changed * Similar match statement that should update * Code blocks in this position, as opposed to a partial function, cant update here * Simple change that should apply but in a code position where the op stack is nonempty * Equivalent code, but passing in the partial function as a single parameter, again, not updating --- .../dotty/tools/dotc/parsing/Parsers.scala | 22 ++++++- .../dotty/tools/dotc/CompilationTests.scala | 1 + tests/rewrites/i20002.check | 51 +++++++++++++++ tests/rewrites/i20002.scala | 62 +++++++++++++++++++ 4 files changed, 135 insertions(+), 1 deletion(-) create mode 100644 tests/rewrites/i20002.check create mode 100644 tests/rewrites/i20002.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index addd54df9d69..f7618b623541 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -814,6 +814,8 @@ object Parsers { * 6. the opening brace does not follow a `=>`. The reason for this condition is that * rewriting back to braces does not work after `=>` (since in most cases braces are omitted * after a `=>` it would be annoying if braces were inserted). + * 7. not a code block being the input to a direct symbolic function call `inst method {\n expr \n}` cannot + * become `inst method :\n expr` for a fully symbolic method */ def bracesToIndented[T](body: => T, rewriteWithColon: Boolean): T = { val underColonSyntax = possibleColonOffset == in.lastOffset @@ -827,10 +829,28 @@ object Parsers { } var canRewrite = allBraces(in.currentRegion) && // test (1) !testChars(in.lastOffset - 3, " =>") // test(6) + + def isStartOfSymbolicFunction: Boolean = + opStack.headOption.exists { x => + val bq = x.operator.isBackquoted + val op = x.operator.name.toSimpleName.decode.forall { + Chars.isOperatorPart + } + val loc = startOpening < x.offset && x.offset < endOpening + val res = !bq && op && loc + res + } val t = enclosed(LBRACE, { canRewrite &= in.isAfterLineEnd // test (2) val curOffset = in.offset - try body + try { + val bodyResolved = body + bodyResolved match + case x:(Match | Block) => + canRewrite &= !isStartOfSymbolicFunction // test (7) + case _ => + bodyResolved + } finally { canRewrite &= in.isAfterLineEnd && in.offset != curOffset // test (3)(4) } diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index a96a4ea09102..52b058032fdc 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -75,6 +75,7 @@ class CompilationTests { compileFile("tests/rewrites/i12340.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i17187.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i17399.scala", unindentOptions.and("-rewrite")), + compileFile("tests/rewrites/i20002.scala", defaultOptions.and("-indent", "-rewrite")), ).checkRewrites() } diff --git a/tests/rewrites/i20002.check b/tests/rewrites/i20002.check new file mode 100644 index 000000000000..70c9411eb4b2 --- /dev/null +++ b/tests/rewrites/i20002.check @@ -0,0 +1,51 @@ +object Reactions: + def main: Unit = + Reactions += { + case 0 => + case 1 => + } + + Reactions run: + case 0 => + case 1 => + + Reactions run_+ : + case 0 => + case 1 => + + Reactions `+=`: + case 0 => + case 1 => + + def bar: Int = ??? + + bar match + case 0 => + case 1 => + + def partPartial(i: Int): PartialFunction[Int, Unit] = + case `i` => + + Reactions += { + val pp1 = partPartial(1) + val pp2 = partPartial(2) + def codeBlock = + ??? + ??? + pp1 orElse pp2 + } + + val partialFunction = partPartial(1) orElse partPartial(2) + Reactions += { + partialFunction + } + + def +=(f: PartialFunction[Int, Unit]) = + ??? + + def run (f: PartialFunction[Int, Unit]) = + ??? + + def run_+ (f: PartialFunction[Int, Unit]) = + ??? + diff --git a/tests/rewrites/i20002.scala b/tests/rewrites/i20002.scala new file mode 100644 index 000000000000..56ea023b63b0 --- /dev/null +++ b/tests/rewrites/i20002.scala @@ -0,0 +1,62 @@ +object Reactions { + def main: Unit = { + Reactions += { + case 0 => + case 1 => + } + + Reactions run { + case 0 => + case 1 => + } + + Reactions run_+ { + case 0 => + case 1 => + } + + Reactions `+=` { + case 0 => + case 1 => + } + + def bar: Int = ??? + + bar match { + case 0 => + case 1 => + } + + def partPartial(i: Int): PartialFunction[Int, Unit] = { + case `i` => + } + + Reactions += { + val pp1 = partPartial(1) + val pp2 = partPartial(2) + def codeBlock = { + ??? + ??? + } + pp1 orElse pp2 + } + + val partialFunction = partPartial(1) orElse partPartial(2) + Reactions += { + partialFunction + } + } + + def +=(f: PartialFunction[Int, Unit]) = { + ??? + } + + def run (f: PartialFunction[Int, Unit]) = { + ??? + } + + def run_+ (f: PartialFunction[Int, Unit]) = { + ??? + } + +} From 2863a29f9e6c7685bd0bf632fa02b322f59ce99f Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 11 Apr 2024 12:31:17 +0200 Subject: [PATCH 100/465] Avoid the TypeVar.inst trap `tvar.inst` gives the _permanent_ instance of a type variable `tvar`. Even if `tvar.isInstantiated` is true its `inst` can still be NoType. This is a trap that caused a regression in the code of glb. This commit fixes the regression and introduces different names that will hopefully avoid the trap in the future. Fixes #20154 --- .../tools/dotc/core/OrderingConstraint.scala | 12 +++---- .../tools/dotc/core/SymDenotations.scala | 2 +- .../dotty/tools/dotc/core/TypeComparer.scala | 10 +++--- .../dotty/tools/dotc/core/TyperState.scala | 6 ++-- .../src/dotty/tools/dotc/core/Types.scala | 31 ++++++++++++------- tests/pos/i20154.scala | 15 +++++++++ 6 files changed, 50 insertions(+), 26 deletions(-) create mode 100644 tests/pos/i20154.scala diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index dd2319ed508b..8256a3cdbab1 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -315,7 +315,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, override def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = def tparams(tycon: Type): List[ParamInfo] = tycon match - case tycon: TypeVar if !tycon.inst.exists => tparams(tycon.origin) + case tycon: TypeVar if !tycon.isPermanentlyInstantiated => tparams(tycon.origin) case tycon: TypeParamRef if !hasBounds(tycon) => val entryParams = entry(tycon).typeParams if entryParams.nonEmpty then entryParams @@ -715,7 +715,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, var newDepEntry = newEntry replacedTypeVar match case tvar: TypeVar => - if tvar.inst.exists // `isInstantiated` would use ctx.typerState.constraint rather than the current constraint + if tvar.isPermanentlyInstantiated // `isInstantiated` would use ctx.typerState.constraint rather than the current constraint then // If the type variable has been instantiated, we need to forget about // the instantiation for old dependencies. @@ -781,7 +781,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, @tailrec def allRemovable(last: Int): Boolean = if (last < 0) true else typeVar(entries, last) match { - case tv: TypeVar => tv.inst.exists && allRemovable(last - 1) + case tv: TypeVar => tv.isPermanentlyInstantiated && allRemovable(last - 1) case _ => false } allRemovable(paramCount(entries) - 1) @@ -887,7 +887,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val limit = paramCount(entries) while i < limit do typeVar(entries, i) match - case tv: TypeVar if !tv.inst.exists => op(tv) + case tv: TypeVar if !tv.isPermanentlyInstantiated => op(tv) case _ => i += 1 } @@ -896,12 +896,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, /** The uninstantiated typevars of this constraint */ def uninstVars: collection.Seq[TypeVar] = { - if (myUninstVars == null || myUninstVars.uncheckedNN.exists(_.inst.exists)) { + if (myUninstVars == null || myUninstVars.uncheckedNN.exists(_.isPermanentlyInstantiated)) { myUninstVars = new mutable.ArrayBuffer[TypeVar] boundsMap.foreachBinding { (poly, entries) => for (i <- 0 until paramCount(entries)) typeVar(entries, i) match { - case tv: TypeVar if !tv.inst.exists && isBounds(entries(i)) => myUninstVars.uncheckedNN += tv + case tv: TypeVar if !tv.isPermanentlyInstantiated && isBounds(entries(i)) => myUninstVars.uncheckedNN += tv case _ => } } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 7536e4bd76ef..bfaaf78883ae 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1609,7 +1609,7 @@ object SymDenotations { case tp: RefinedType => hasSkolems(tp.parent) || hasSkolems(tp.refinedInfo) case tp: RecType => hasSkolems(tp.parent) case tp: TypeBounds => hasSkolems(tp.lo) || hasSkolems(tp.hi) - case tp: TypeVar => hasSkolems(tp.inst) + case tp: TypeVar => hasSkolems(tp.permanentInst) case tp: ExprType => hasSkolems(tp.resType) case tp: AppliedType => hasSkolems(tp.tycon) || tp.args.exists(hasSkolems) case tp: LambdaType => tp.paramInfos.exists(hasSkolems) || hasSkolems(tp.resType) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index a9b5a39c2a62..cee1ec7fffa8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1600,7 +1600,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val tycon1 = liftToThis(tp.tycon) if (tycon1 ne tp.tycon) tp.derivedAppliedType(tycon1, tp.args) else tp case tp: TypeVar if tp.isInstantiated => - liftToThis(tp.inst) + liftToThis(tp.instanceOpt) case tp: AnnotatedType => val parent1 = liftToThis(tp.parent) if (parent1 ne tp.parent) tp.derivedAnnotatedType(parent1, tp.annot) else tp @@ -2521,14 +2521,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def isSuperOf(sub: Type): Boolean = sub match case AndType(sub1, sub2) => isSuperOf(sub1) || isSuperOf(sub2) - case sub: TypeVar if sub.isInstantiated => isSuperOf(sub.inst) + case sub: TypeVar if sub.isInstantiated => isSuperOf(sub.instanceOpt) case _ => isSubTypeWhenFrozen(sub, tp) tp match case tp @ AndType(tp1, tp2) => recombine(dropIfSuper(tp1, sub), dropIfSuper(tp2, sub), tp) case tp: TypeVar if tp.isInstantiated => - dropIfSuper(tp.inst, sub) + dropIfSuper(tp.instanceOpt, sub) case _ => if isSuperOf(sub) then NoType else tp end dropIfSuper @@ -2538,14 +2538,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def isSubOf(sup: Type): Boolean = sup match case OrType(sup1, sup2) => isSubOf(sup1) || isSubOf(sup2) - case sup: TypeVar if sup.isInstantiated => isSubOf(sup.inst) + case sup: TypeVar if sup.isInstantiated => isSubOf(sup.instanceOpt) case _ => isSubType(tp, sup, whenFrozen = !canConstrain) tp match case tp @ OrType(tp1, tp2) => recombine(dropIfSub(tp1, sup, canConstrain), dropIfSub(tp2, sup, canConstrain), tp) case tp: TypeVar if tp.isInstantiated => - dropIfSub(tp.inst, sup, canConstrain) + dropIfSub(tp.instanceOpt, sup, canConstrain) case _ => if isSubOf(sup) then NoType else tp end dropIfSub diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index ef7329c3698d..160d7749de61 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -231,7 +231,7 @@ class TyperState() { val tvars = tl.paramRefs.map(other.typeVarOfParam(_)).collect { case tv: TypeVar => tv } if this.isCommittable then tvars.foreach(tvar => - if !tvar.inst.exists && !isOwnedAnywhere(this, tvar) then includeVar(tvar)) + if !tvar.isPermanentlyInstantiated && !isOwnedAnywhere(this, tvar) then includeVar(tvar)) typeComparer.addToConstraint(tl, tvars) }) && // Integrate the additional constraints on type variables from `other` @@ -287,10 +287,10 @@ class TyperState() { for tvar <- ownedVars do val tvarState = tvar.owningState.nn.get assert(tvarState eqn this, s"Inconsistent state in $this: it owns $tvar whose owningState is ${tvarState}") - assert(!tvar.inst.exists, s"Inconsistent state in $this: it owns $tvar which is already instantiated") + assert(!tvar.isPermanentlyInstantiated, s"Inconsistent state in $this: it owns $tvar which is already instantiated") val inst = constraint.instType(tvar) if inst.exists then - tvar.setInst(inst) + tvar.setPermanentInst(inst) val tl = tvar.origin.binder if constraint.isRemovable(tl) then toCollect += tl for tl <- toCollect do diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 3c9f7e05b6e2..ba48b6a0f2e6 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -139,7 +139,7 @@ object Types extends TypeUtils { case t: AppliedType => t.fold(false, (x, tp) => x || test(tp, theAcc)) case t: TypeVar => - !t.inst.exists || test(t.inst, theAcc) + !t.isPermanentlyInstantiated || test(t.permanentInst, theAcc) case t: LazyRef => !t.completed || test(t.ref, theAcc) case _ => @@ -4934,11 +4934,15 @@ object Types extends TypeUtils { def setOrigin(p: TypeParamRef) = currentOrigin = p /** The permanent instance type of the variable, or NoType is none is given yet */ - private var myInst: Type = NoType + private var inst: Type = NoType - private[core] def inst: Type = myInst - private[core] def setInst(tp: Type): Unit = - myInst = tp + /** The permanent instance type that's stored in the type variable, so it cannot be retracted + * anymore, or NoType if the variable can still be further constrained or a provisional + * instance type in the constraint can be retracted. + */ + private[core] def permanentInst = inst + private[core] def setPermanentInst(tp: Type): Unit = + inst = tp if tp.exists && owningState != null then val owningState1 = owningState.uncheckedNN.get if owningState1 != null then @@ -4946,8 +4950,8 @@ object Types extends TypeUtils { owningState = null // no longer needed; null out to avoid a memory leak private[core] def resetInst(ts: TyperState): Unit = - assert(myInst.exists) - myInst = NoType + assert(inst.exists) + inst = NoType owningState = new WeakReference(ts) /** The state owning the variable. This is at first `creatorState`, but it can @@ -4985,10 +4989,15 @@ object Types extends TypeUtils { /** Is the variable already instantiated? */ def isInstantiated(using Context): Boolean = instanceOpt.exists + /** Is the variable already instantiated so that the instance cannot be + * retracted anymore? + */ + def isPermanentlyInstantiated: Boolean = inst.exists + /** Instantiate variable with given type */ def instantiateWith(tp: Type)(using Context): Type = { assert(tp ne this, i"self instantiation of $origin, constraint = ${ctx.typerState.constraint}") - assert(!myInst.exists, i"$origin is already instantiated to $myInst but we attempted to instantiate it to $tp") + assert(!inst.exists, i"$origin is already instantiated to $inst but we attempted to instantiate it to $tp") typr.println(i"instantiating $this with $tp") if Config.checkConstraintsSatisfiable then @@ -4996,7 +5005,7 @@ object Types extends TypeUtils { i"$origin is constrained to be $currentEntry but attempted to instantiate it to $tp") if ((ctx.typerState eq owningState.nn.get.uncheckedNN) && !TypeComparer.subtypeCheckInProgress) - setInst(tp) + setPermanentInst(tp) ctx.typerState.constraint = ctx.typerState.constraint.replace(origin, tp) tp } @@ -5013,8 +5022,8 @@ object Types extends TypeUtils { */ def instantiate(fromBelow: Boolean)(using Context): Type = val tp = typeToInstantiateWith(fromBelow) - if myInst.exists then // The line above might have triggered instantiation of the current type variable - myInst + if inst.exists then // The line above might have triggered instantiation of the current type variable + inst else instantiateWith(tp) diff --git a/tests/pos/i20154.scala b/tests/pos/i20154.scala new file mode 100644 index 000000000000..17dc41be7011 --- /dev/null +++ b/tests/pos/i20154.scala @@ -0,0 +1,15 @@ +sealed abstract class Kyo[+T, -S] +opaque type <[+T, -S] >: T = T | Kyo[T, S] + +abstract class Effect[+E]: + type Command[_] + +case class Recurse[Command[_], Result[_], E <: Effect[E], T, S, S2]( + h: ResultHandler[Command, Result, E, S], + v: T < (E & S & S2) +) + +abstract class ResultHandler[Command[_], Result[_], E <: Effect[E], S]: + opaque type Handle[T, S2] >: (Result[T] < (S & S2)) = Result[T] < (S & S2) | Recurse[Command, Result, E, T, S, S2] + + def handle[T, S2](h: ResultHandler[Command, Result, E, S], v: T < (E & S & S2)): Handle[T, S2] = Recurse(h, v) From 0ee804f5e5ad4e8712bb11c6a9f1bb172ef7586b Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 11 Apr 2024 17:37:57 +0200 Subject: [PATCH 101/465] Restore forgotten fatal-warnings tests In daeee3544a7933736240d1b79ebe81d699d74b0d, support for tests in the directory `tests/pos-special/fatal-warnings` was dropped in favor of magic `//>` comments in the regular `tests/pos` directory, but a few tests were forgotten in the original directory and were thus not run. This commit moves them to the appropriate directory with the correct magic comment. --- compiler/test/dotc/pos-test-pickling.blacklist | 1 + tests/{pos-special/fatal-warnings => pos}/i17735.scala | 4 ++-- tests/{pos-special/fatal-warnings => pos}/i17735a.scala | 2 +- tests/{pos-special/fatal-warnings => pos}/i17741.scala | 4 ++-- tests/{pos-special/fatal-warnings => pos}/nowarnannot.scala | 2 ++ 5 files changed, 8 insertions(+), 5 deletions(-) rename tests/{pos-special/fatal-warnings => pos}/i17735.scala (90%) rename tests/{pos-special/fatal-warnings => pos}/i17735a.scala (90%) rename tests/{pos-special/fatal-warnings => pos}/i17741.scala (90%) rename tests/{pos-special/fatal-warnings => pos}/nowarnannot.scala (66%) diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 81661e87b84e..3ea8b550f160 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -30,6 +30,7 @@ strict-pattern-bindings-3.0-migration.scala i17186b.scala i11982a.scala i17255 +i17735.scala # Tree is huge and blows stack for printing Text i7034.scala diff --git a/tests/pos-special/fatal-warnings/i17735.scala b/tests/pos/i17735.scala similarity index 90% rename from tests/pos-special/fatal-warnings/i17735.scala rename to tests/pos/i17735.scala index f171d4a028f7..17fb31010a8a 100644 --- a/tests/pos-special/fatal-warnings/i17735.scala +++ b/tests/pos/i17735.scala @@ -1,4 +1,4 @@ -//> using options -Wvalue-discard +//> using options -Xfatal-warnings -Wvalue-discard import scala.collection.mutable import scala.annotation.nowarn @@ -21,4 +21,4 @@ object Foo: // here @nowarn is effective without -Wfatal-warnings (i.e. no warning) // But with -Wfatal-warnings we get an error messageBuilder.append("\n").append(s): @nowarn("msg=discarded non-Unit value*") - messageBuilder.result() \ No newline at end of file + messageBuilder.result() diff --git a/tests/pos-special/fatal-warnings/i17735a.scala b/tests/pos/i17735a.scala similarity index 90% rename from tests/pos-special/fatal-warnings/i17735a.scala rename to tests/pos/i17735a.scala index fe0ea7e6bc45..b4d91f8d25fc 100644 --- a/tests/pos-special/fatal-warnings/i17735a.scala +++ b/tests/pos/i17735a.scala @@ -1,4 +1,4 @@ -//> using options -Wvalue-discard -Wconf:msg=non-Unit:s +//> using options -Xfatal-warnings -Wvalue-discard -Wconf:msg=non-Unit:s import scala.collection.mutable import scala.annotation.nowarn diff --git a/tests/pos-special/fatal-warnings/i17741.scala b/tests/pos/i17741.scala similarity index 90% rename from tests/pos-special/fatal-warnings/i17741.scala rename to tests/pos/i17741.scala index 7171aab83e4b..aa32e5a573d4 100644 --- a/tests/pos-special/fatal-warnings/i17741.scala +++ b/tests/pos/i17741.scala @@ -1,4 +1,4 @@ -//> using options -Wnonunit-statement +//> using options -Xfatal-warnings -Wnonunit-statement class Node() class Elem( @@ -29,4 +29,4 @@ object Main { ) } }: @annotation.nowarn() -} \ No newline at end of file +} diff --git a/tests/pos-special/fatal-warnings/nowarnannot.scala b/tests/pos/nowarnannot.scala similarity index 66% rename from tests/pos-special/fatal-warnings/nowarnannot.scala rename to tests/pos/nowarnannot.scala index 26e9713d0543..1710ae34b56f 100644 --- a/tests/pos-special/fatal-warnings/nowarnannot.scala +++ b/tests/pos/nowarnannot.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -Wvalue-discard + case class F(i: Int) object Main { From c7570c81c1eb524ce316360316be0e03fcffde9e Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 11 Apr 2024 17:41:28 +0200 Subject: [PATCH 102/465] Suppress "extension method will never be selected" for overrides When we're overriding an existing extension method, we don't have the liberty of renaming the method, so we shouldn't get warnings we can't do anything about. --- .../dotty/tools/dotc/typer/RefChecks.scala | 53 ++++++++++--------- tests/pos/ext-override.scala | 12 +++++ 2 files changed, 40 insertions(+), 25 deletions(-) create mode 100644 tests/pos/ext-override.scala diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index e3d78e3c5707..1397b05ec3b5 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -1108,6 +1108,8 @@ object RefChecks { * An extension method is hidden if it does not offer a parameter that is not subsumed * by the corresponding parameter of the member with the same name (or of all alternatives of an overload). * + * This check is suppressed if this method is an override. + * * For example, it is not possible to define a type-safe extension `contains` for `Set`, * since for any parameter type, the existing `contains` method will compile and would be used. * @@ -1125,31 +1127,32 @@ object RefChecks { * If the extension method is nullary, it is always hidden by a member of the same name. * (Either the member is nullary, or the reference is taken as the eta-expansion of the member.) */ - def checkExtensionMethods(sym: Symbol)(using Context): Unit = if sym.is(Extension) then - extension (tp: Type) - def strippedResultType = Applications.stripImplicit(tp.stripPoly, wildcardOnly = true).resultType - def firstExplicitParamTypes = Applications.stripImplicit(tp.stripPoly, wildcardOnly = true).firstParamTypes - def hasImplicitParams = tp.stripPoly match { case mt: MethodType => mt.isImplicitMethod case _ => false } - val target = sym.info.firstExplicitParamTypes.head // required for extension method, the putative receiver - val methTp = sym.info.strippedResultType // skip leading implicits and the "receiver" parameter - def hidden = - target.nonPrivateMember(sym.name) - .filterWithPredicate: - member => - val memberIsImplicit = member.info.hasImplicitParams - val paramTps = - if memberIsImplicit then methTp.stripPoly.firstParamTypes - else methTp.firstExplicitParamTypes - - paramTps.isEmpty || memberIsImplicit && !methTp.hasImplicitParams || { - val memberParamTps = member.info.stripPoly.firstParamTypes - !memberParamTps.isEmpty - && memberParamTps.lengthCompare(paramTps) == 0 - && memberParamTps.lazyZip(paramTps).forall((m, x) => x frozen_<:< m) - } - .exists - if !target.typeSymbol.denot.isAliasType && !target.typeSymbol.denot.isOpaqueAlias && hidden - then report.warning(ExtensionNullifiedByMember(sym, target.typeSymbol), sym.srcPos) + def checkExtensionMethods(sym: Symbol)(using Context): Unit = + if sym.is(Extension) && !sym.nextOverriddenSymbol.exists then + extension (tp: Type) + def strippedResultType = Applications.stripImplicit(tp.stripPoly, wildcardOnly = true).resultType + def firstExplicitParamTypes = Applications.stripImplicit(tp.stripPoly, wildcardOnly = true).firstParamTypes + def hasImplicitParams = tp.stripPoly match { case mt: MethodType => mt.isImplicitMethod case _ => false } + val target = sym.info.firstExplicitParamTypes.head // required for extension method, the putative receiver + val methTp = sym.info.strippedResultType // skip leading implicits and the "receiver" parameter + def hidden = + target.nonPrivateMember(sym.name) + .filterWithPredicate: + member => + val memberIsImplicit = member.info.hasImplicitParams + val paramTps = + if memberIsImplicit then methTp.stripPoly.firstParamTypes + else methTp.firstExplicitParamTypes + + paramTps.isEmpty || memberIsImplicit && !methTp.hasImplicitParams || { + val memberParamTps = member.info.stripPoly.firstParamTypes + !memberParamTps.isEmpty + && memberParamTps.lengthCompare(paramTps) == 0 + && memberParamTps.lazyZip(paramTps).forall((m, x) => x frozen_<:< m) + } + .exists + if !target.typeSymbol.denot.isAliasType && !target.typeSymbol.denot.isOpaqueAlias && hidden + then report.warning(ExtensionNullifiedByMember(sym, target.typeSymbol), sym.srcPos) end checkExtensionMethods /** Verify that references in the user-defined `@implicitNotFound` message are valid. diff --git a/tests/pos/ext-override.scala b/tests/pos/ext-override.scala new file mode 100644 index 000000000000..d08439e13c9a --- /dev/null +++ b/tests/pos/ext-override.scala @@ -0,0 +1,12 @@ +//> using options -Xfatal-warnings + +trait Foo[T]: + extension (x: T) + def hi: String + +class Bla: + def hi: String = "hi" +object Bla: + given Foo[Bla] with + extension (x: Bla) + def hi: String = x.hi From 8c2970cfd914bfb58bcb8ce039814f97a299edea Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 1 Apr 2024 22:23:34 +0200 Subject: [PATCH 103/465] Fix caching of baseData in SymDenotations Without the setting, the outdated baseData cache will be used which produces incorrect baseClasses --- .../src/dotty/tools/dotc/transform/PostTyper.scala | 7 +++++++ .../src/dotty/tools/dotc/transform/init/Checker.scala | 10 +++++++--- .../src/dotty/tools/dotc/transform/init/Objects.scala | 4 ++-- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 60e4075e8674..cb192e581105 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -76,6 +76,13 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => override def changesMembers: Boolean = true // the phase adds super accessors and synthetic members + /** + * Serializable and AbstractFunction are added for scala2-library companion object of case class + * + * Ideally `compilingScala2StdLib` should be used, but it is initialized too late to be effective. + */ + override def changesParents: Boolean = true + override def transformPhase(using Context): Phase = thisPhase.next def newTransformer(using Context): Transformer = diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index 692b3177786d..1a4141f3d495 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -36,12 +36,16 @@ class Checker extends Phase: traverser.traverse(unit.tpdTree) override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = - val checkCtx = ctx.fresh.setPhase(this.start) + val checkCtx = ctx.fresh.setPhase(this) val traverser = new InitTreeTraverser() - val unitContexts = units.map(unit => checkCtx.fresh.setCompilationUnit(unit)) val units0 = - for unitContext <- unitContexts if traverse(traverser)(using unitContext) yield unitContext.compilationUnit + for + unit <- units + unitContext = checkCtx.fresh.setCompilationUnit(unit) + if traverse(traverser)(using unitContext) + yield + unitContext.compilationUnit cancellable { val classes = traverser.getClasses() diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index cbbd97e3810e..793d4b41b174 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -11,6 +11,7 @@ import StdNames.* import Names.TermName import NameKinds.OuterSelectName import NameKinds.SuperAccessorName +import Decorators.* import ast.tpd.* import util.{ SourcePosition, NoSourcePosition } @@ -66,12 +67,11 @@ import dotty.tools.dotc.core.Flags.AbstractOrTrait * whole-program analysis. However, the check is not modular in terms of project boundaries. * */ -import Decorators.* class Objects(using Context @constructorOnly): val immutableHashSetBuider: Symbol = requiredClass("scala.collection.immutable.HashSetBuilder") // TODO: this should really be an annotation on the rhs of the field initializer rather than the field itself. val HashSetBuilder_rootNode: Symbol = immutableHashSetBuider.requiredValue("rootNode") - + val whiteList = Set(HashSetBuilder_rootNode) // ----------------------------- abstract domain ----------------------------- From 9bdf3160c9f565d0c52b6ce0ca0e9067ba533746 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 1 Apr 2024 22:29:19 +0200 Subject: [PATCH 104/465] Add test --- tests/init-global/pos/scala2-library.scala | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 tests/init-global/pos/scala2-library.scala diff --git a/tests/init-global/pos/scala2-library.scala b/tests/init-global/pos/scala2-library.scala new file mode 100644 index 000000000000..d4d693b7368a --- /dev/null +++ b/tests/init-global/pos/scala2-library.scala @@ -0,0 +1,2 @@ +//> using options -Ysafe-init-global -Ycompile-scala2-library +case class UninitializedFieldError(msg: String) extends RuntimeException(msg) From ae1a9e5ad315f6299a60ea0b4fe724751f95c001 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 1 Apr 2024 23:10:12 +0200 Subject: [PATCH 105/465] Fix -Ycheck:all error: mismatch of parents --- compiler/src/dotty/tools/dotc/transform/PostTyper.scala | 4 +++- tests/init-global/pos/scala2-library.scala | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index cb192e581105..26587a320bf6 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -583,7 +583,9 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if !sym.hasAnnotation(defn.ExperimentalAnnot) && ctx.settings.experimental.value && isTopLevelDefinitionInSource(sym) then sym.addAnnotation(ExperimentalAnnotation("Added by -experimental", sym.span)) - private def scala2LibPatch(tree: TypeDef)(using Context) = + // It needs to run at the phase of the postTyper --- otherwise, the test of the symbols will use + // the transformed denotation with added `Serializable` and `AbstractFunction`. + private def scala2LibPatch(tree: TypeDef)(using Context) = atPhase(thisPhase): val sym = tree.symbol if compilingScala2StdLib && sym.is(ModuleClass) then // Add Serializable to companion objects of serializable classes, diff --git a/tests/init-global/pos/scala2-library.scala b/tests/init-global/pos/scala2-library.scala index d4d693b7368a..8fa9245aebe0 100644 --- a/tests/init-global/pos/scala2-library.scala +++ b/tests/init-global/pos/scala2-library.scala @@ -1,2 +1,2 @@ -//> using options -Ysafe-init-global -Ycompile-scala2-library +//> using options -Ycompile-scala2-library case class UninitializedFieldError(msg: String) extends RuntimeException(msg) From 9f4188958b28a7e0e7fc8fc1a2167f56ee709932 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Tue, 2 Apr 2024 19:03:02 +0200 Subject: [PATCH 106/465] Only set PostTyper.changesParents to true for scala2-lib --- compiler/src/dotty/tools/dotc/Run.scala | 8 ++++---- .../src/dotty/tools/dotc/core/Contexts.scala | 2 +- .../src/dotty/tools/dotc/core/Phases.scala | 16 +++++++++++++--- .../tools/dotc/transform/PostTyper.scala | 19 ++++++++++++++----- 4 files changed, 32 insertions(+), 13 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index a7107656889d..ffc54e969b1f 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -293,10 +293,13 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if (ctx.settings.YtestPickler.value) List("pickler") else ctx.settings.YstopAfter.value + val runCtx = ctx.fresh + runCtx.setProfiler(Profiler()) + val pluginPlan = ctx.base.addPluginPhases(ctx.base.phasePlan) val phases = ctx.base.fusePhases(pluginPlan, ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value) - ctx.base.usePhases(phases) + ctx.base.usePhases(phases, runCtx) if ctx.settings.YnoDoubleBindings.value then ctx.base.checkNoDoubleBindings = true @@ -340,9 +343,6 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint profiler.finished() } - val runCtx = ctx.fresh - runCtx.setProfiler(Profiler()) - unfusedPhases.foreach(_.initContext(runCtx)) val fusedPhases = runCtx.base.allPhases if ctx.settings.explainCyclic.value then runCtx.setProperty(CyclicReference.Trace, new CyclicReference.Trace()) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 73fea84a640b..47006bdbe561 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -892,7 +892,7 @@ object Contexts { val definitions: Definitions = new Definitions // Set up some phases to get started */ - usePhases(List(SomePhase)) + usePhases(List(SomePhase), FreshContext(this)) /** Initializes the `ContextBase` with a starting context. * This initializes the `platform` and the `definitions`. diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 2ce08167e815..89ea4dd6aa50 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -126,7 +126,7 @@ object Phases { * The list should never contain NoPhase. * if fusion is enabled, phases in same subgroup will be fused to single phase. */ - final def usePhases(phasess: List[Phase], fuse: Boolean = true): Unit = { + final def usePhases(phasess: List[Phase], runCtx: FreshContext, fuse: Boolean = true): Unit = { val flatPhases = collection.mutable.ListBuffer[Phase]() @@ -161,11 +161,21 @@ object Phases { phase match { case p: MegaPhase => val miniPhases = p.miniPhases - miniPhases.foreach{ phase => + for phase <- miniPhases do checkRequirements(phase) - phase.init(this, nextPhaseId)} + // Given phases a chance to initialize state based on the run context. + // + // `phase.initContext` should be called before `phase.init` as the later calls abstract methods + // `changesMembers` and `changeParents` which may depend on the run context. + // + // See `PostTyper.changeParents` + phase.initContext(runCtx) + phase.init(this, nextPhaseId) + end for p.init(this, miniPhases.head.id, miniPhases.last.id) case _ => + // See comment above about the ordering of the two calls. + phase.initContext(runCtx) phase.init(this, nextPhaseId) checkRequirements(phase) } diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 26587a320bf6..30c994a1777c 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -77,19 +77,28 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => override def changesMembers: Boolean = true // the phase adds super accessors and synthetic members /** - * Serializable and AbstractFunction are added for scala2-library companion object of case class - * - * Ideally `compilingScala2StdLib` should be used, but it is initialized too late to be effective. + * Serializable and AbstractFunction1 are added for companion objects of case classes in scala2-library */ - override def changesParents: Boolean = true + override def changesParents: Boolean = + if !initContextCalled then + throw new Exception("Calling changesParents before initContext, should call initContext first") + compilingScala2StdLib override def transformPhase(using Context): Phase = thisPhase.next def newTransformer(using Context): Transformer = new PostTyperTransformer + /** + * Used to check that `changesParents` is called after `initContext`. + * + * This contract is easy to break and results in subtle bugs. + */ + private var initContextCalled = false + private var compilingScala2StdLib = false override def initContext(ctx: FreshContext): Unit = + initContextCalled = true compilingScala2StdLib = ctx.settings.YcompileScala2Library.value(using ctx) val superAcc: SuperAccessors = new SuperAccessors(thisPhase) @@ -584,7 +593,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => sym.addAnnotation(ExperimentalAnnotation("Added by -experimental", sym.span)) // It needs to run at the phase of the postTyper --- otherwise, the test of the symbols will use - // the transformed denotation with added `Serializable` and `AbstractFunction`. + // the transformed denotation with added `Serializable` and `AbstractFunction1`. private def scala2LibPatch(tree: TypeDef)(using Context) = atPhase(thisPhase): val sym = tree.symbol if compilingScala2StdLib && sym.is(ModuleClass) then From a3d00984e339962566cd6838dc516b4c2dc448be Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 8 Apr 2024 19:37:46 +0200 Subject: [PATCH 107/465] Document -Ycheck:all for scala2-library-bootstrapped Discussed with @nicolasstucki offline --- project/Build.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/project/Build.scala b/project/Build.scala index fef7a2bcb60b..69441d0aaa01 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1053,6 +1053,7 @@ object Build { settings(commonBootstrappedSettings). settings(scala2LibraryBootstrappedSettings). settings(moduleName := "scala2-library") + // -Ycheck:all is set in project/scripts/scala2-library-tasty-mima.sh /** Scala 2 library compiled by dotty using the latest published sources of the library. * From 104557127ec82fc9237a0d7949fdcb737552ec18 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 11 Apr 2024 20:41:44 +0200 Subject: [PATCH 108/465] Retain default parameters with `export` Default parameters need to have the `HasDefault` flag set to work properly, it seems that without this flag they were still selected but only under join compilation. While we're at it, we define a full set of flags that might be exported. Note that Given/Implicit/Erased were already set by `tpd.DefDef` and Inlined doesn't seem to make a difference in practice since the body of the exported inline def is manually constructed and doesn't contain proxies either way. --- compiler/src/dotty/tools/dotc/core/Flags.scala | 3 +++ compiler/src/dotty/tools/dotc/typer/Namer.scala | 4 +++- tests/pos/export-param-flags/A_1.scala | 5 +++++ tests/pos/export-param-flags/B_2.scala | 2 ++ tests/printing/export-param-flags.check | 13 +++++++++++++ tests/printing/export-param-flags.scala | 5 +++++ 6 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 tests/pos/export-param-flags/A_1.scala create mode 100644 tests/pos/export-param-flags/B_2.scala create mode 100644 tests/printing/export-param-flags.check create mode 100644 tests/printing/export-param-flags.scala diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 1f38289161cd..8110bc769d4f 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -535,6 +535,9 @@ object Flags { /** Flags retained in term export forwarders */ val RetainedExportTermFlags = Infix | Given | Implicit | Inline | Transparent | Erased | HasDefaultParams | NoDefaultParams | ExtensionMethod + /** Flags retained in parameters of term export forwarders */ + val RetainedExportTermParamFlags = Given | Implicit | Erased | HasDefault | Inline + val MandatoryExportTermFlags = Exported | Method | Final /** Flags retained in type export forwarders */ diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 1016fe467a0a..bad78b6714e8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1293,7 +1293,7 @@ class Namer { typer: Typer => getter => addForwarder( getter.name.asTermName, getter.asSeenFrom(path.tpe), span)) - // adding annotations at the parameter level + // adding annotations and flags at the parameter level // TODO: This probably needs to be filtered to avoid adding some annotation // such as MacroAnnotations if sym.is(Method) then @@ -1301,6 +1301,8 @@ class Namer { typer: Typer => (origParameter, exportedParameter) <- orig.lazyZip(forwarded) do exportedParameter.addAnnotations(origParameter.annotations) + if exportedParameter.isTerm then + exportedParameter.setFlag(origParameter.flags & RetainedExportTermParamFlags) end addForwarder def addForwardersNamed(name: TermName, alias: TermName, span: Span): Unit = diff --git a/tests/pos/export-param-flags/A_1.scala b/tests/pos/export-param-flags/A_1.scala new file mode 100644 index 000000000000..1ac8d10ba930 --- /dev/null +++ b/tests/pos/export-param-flags/A_1.scala @@ -0,0 +1,5 @@ +object A: + def defaultParam(x: Int = 1) = x + +object Exported: + export A.* diff --git a/tests/pos/export-param-flags/B_2.scala b/tests/pos/export-param-flags/B_2.scala new file mode 100644 index 000000000000..0387f66d7aa7 --- /dev/null +++ b/tests/pos/export-param-flags/B_2.scala @@ -0,0 +1,2 @@ +object B: + val x = Exported.defaultParam() diff --git a/tests/printing/export-param-flags.check b/tests/printing/export-param-flags.check new file mode 100644 index 000000000000..ffab6f77c93d --- /dev/null +++ b/tests/printing/export-param-flags.check @@ -0,0 +1,13 @@ +[[syntax trees at end of typer]] // tests/printing/export-param-flags.scala +package { + final lazy module val A: A = new A() + final module class A() extends Object() { this: A.type => + inline def inlinedParam(inline x: Int): Int = x.+(x):Int + } + final lazy module val Exported: Exported = new Exported() + final module class Exported() extends Object() { this: Exported.type => + export A.* + final inline def inlinedParam(inline x: Int): Int = A.inlinedParam(x) + } +} + diff --git a/tests/printing/export-param-flags.scala b/tests/printing/export-param-flags.scala new file mode 100644 index 000000000000..cad2d3c8fee8 --- /dev/null +++ b/tests/printing/export-param-flags.scala @@ -0,0 +1,5 @@ +object A: + inline def inlinedParam(inline x: Int): Int = x + x + +object Exported: + export A.* From e60158c9f79713d44ab3265612bb1a9b1356eef9 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Wed, 13 Mar 2024 14:53:56 +0100 Subject: [PATCH 109/465] Publish `scala2-library-cc-tasty` --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 69441d0aaa01..41de75ca7c64 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2116,7 +2116,7 @@ object Build { // FIXME: we do not aggregate `bin` because its tests delete jars, thus breaking other tests def asDottyRoot(implicit mode: Mode): Project = project.withCommonSettings. aggregate(`scala3-interfaces`, dottyLibrary, dottyCompiler, tastyCore, `scala3-sbt-bridge`, scala3PresentationCompiler). - bootstrappedAggregate(`scala2-library-tasty`, `scala3-language-server`, `scala3-staging`, + bootstrappedAggregate(`scala2-library-tasty`, `scala2-library-cc-tasty`, `scala3-language-server`, `scala3-staging`, `scala3-tasty-inspector`, `scala3-library-bootstrappedJS`, scaladoc). dependsOn(tastyCore). dependsOn(dottyCompiler). From 07622a14108f77cbdf4356a9edfb1ec666392045 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 12 Apr 2024 11:12:58 +0200 Subject: [PATCH 110/465] Allow SAM types to contain multiple refinements --- compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- tests/run/i18315.scala | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index ba48b6a0f2e6..47ba9833fc2f 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5965,7 +5965,7 @@ object Types extends TypeUtils { def withRefinements(toType: Type, fromTp: Type): Type = fromTp.dealias match case RefinedType(fromParent, name, info: AliasingBounds) if tp0.member(name).exists => val parent1 = withRefinements(toType, fromParent) - RefinedType(toType, name, info) + RefinedType(parent1, name, info) case _ => toType val tp = withRefinements(tp0, origTp) diff --git a/tests/run/i18315.scala b/tests/run/i18315.scala index 85824920efbd..51a80420632d 100644 --- a/tests/run/i18315.scala +++ b/tests/run/i18315.scala @@ -7,9 +7,16 @@ trait Sam2: type T def apply(x: T): T +trait Sam3: + type T + type U + def apply(x: T): U + object Test: def main(args: Array[String]): Unit = val s1: Sam1 { type T = String } = x => x.trim s1.apply("foo") val s2: Sam2 { type T = Int } = x => x + 1 s2.apply(1) + val s3: Sam3 { type T = Int; type U = String } = x => x.toString + s3.apply(2) From 97313ed5c6399fe0395d6a49877eaf74436e63a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Fri, 12 Apr 2024 12:56:42 +0200 Subject: [PATCH 111/465] Stabilise returned completions by improving deduplication + extra completions for constructors (#19976) This PR doesn't address all issues. In the future we need to get rid of https://github.com/scala/scala3/compare/main...rochala:improved-deduplication-and-constructors-search?expand=1#diff-035851592480495dfdb20da6b615ec7dd77b3db70cda46aba56230d8cd690773R157-R167 as it is not working as intended. The future PR of shortened type printer refactor includes a proper way to split extension params. `CompletionValue.Interpolator` should also be removed, and instead we should return workspace / completions as it is now hard to sort those completions. Next refactor is reusing completion affix for other kinds of completions such as case completions, so prefix / suffix is handled in single place. This PR will unblock fuzzy search in the compiler because of stabilizing returned completions. --- .../tools/dotc/interactive/Completion.scala | 90 +-- .../tools/languageserver/CompletionTest.scala | 29 +- .../dotty/tools/pc/MetalsInteractive.scala | 2 +- .../dotty/tools/pc/PcInlayHintsProvider.scala | 1 - .../tools/pc/SignatureHelpProvider.scala | 6 - .../pc/completions/CompletionAffix.scala | 95 +++ .../pc/completions/CompletionProvider.scala | 90 +-- .../pc/completions/CompletionSuffix.scala | 39 -- .../pc/completions/CompletionValue.scala | 66 +- .../tools/pc/completions/Completions.scala | 328 ++++++---- .../completions/InterpolatorCompletions.scala | 35 +- .../pc/completions/MatchCaseCompletions.scala | 2 - .../pc/completions/NamedArgCompletions.scala | 3 +- .../tools/pc/base/BaseCompletionSuite.scala | 7 +- .../tests/completion/CompletionArgSuite.scala | 6 +- .../tests/completion/CompletionDocSuite.scala | 32 +- .../CompletionExtraConstructorSuite.scala | 565 ++++++++++++++++++ .../CompletionInterpolatorSuite.scala | 69 ++- .../completion/CompletionKeywordSuite.scala | 9 +- .../completion/CompletionOverrideSuite.scala | 2 +- .../completion/CompletionSnippetSuite.scala | 70 ++- .../pc/tests/completion/CompletionSuite.scala | 213 ++----- .../completion/CompletionWorkspaceSuite.scala | 13 +- 23 files changed, 1250 insertions(+), 522 deletions(-) create mode 100644 presentation-compiler/src/main/dotty/tools/pc/completions/CompletionAffix.scala delete mode 100644 presentation-compiler/src/main/dotty/tools/pc/completions/CompletionSuffix.scala create mode 100644 presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 025a2022500d..44407daf600c 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -86,26 +86,21 @@ object Completion: * * Otherwise, provide no completion suggestion. */ - def completionMode(path: List[untpd.Tree], pos: SourcePosition): Mode = - - val completionSymbolKind: Mode = - path match - case GenericImportSelector(sel) => - if sel.imported.span.contains(pos.span) then Mode.ImportOrExport // import scala.@@ - else if sel.isGiven && sel.bound.span.contains(pos.span) then Mode.ImportOrExport - else Mode.None // import scala.{util => u@@} - case GenericImportOrExport(_) => Mode.ImportOrExport | Mode.Scope // import TrieMa@@ - case untpd.Literal(Constants.Constant(_: String)) :: _ => Mode.Term | Mode.Scope // literal completions - case (ref: untpd.RefTree) :: _ => - val maybeSelectMembers = if ref.isInstanceOf[untpd.Select] then Mode.Member else Mode.Scope - - if (ref.name.isTermName) Mode.Term | maybeSelectMembers - else if (ref.name.isTypeName) Mode.Type | maybeSelectMembers - else Mode.None - - case _ => Mode.None - - completionSymbolKind + def completionMode(path: List[untpd.Tree], pos: SourcePosition): Mode = path match + case GenericImportSelector(sel) => + if sel.imported.span.contains(pos.span) then Mode.ImportOrExport // import scala.@@ + else if sel.isGiven && sel.bound.span.contains(pos.span) then Mode.ImportOrExport + else Mode.None // import scala.{util => u@@} + case GenericImportOrExport(_) => Mode.ImportOrExport | Mode.Scope // import TrieMa@@ + case untpd.Literal(Constants.Constant(_: String)) :: _ => Mode.Term | Mode.Scope // literal completions + case (ref: untpd.RefTree) :: _ => + val maybeSelectMembers = if ref.isInstanceOf[untpd.Select] then Mode.Member else Mode.Scope + + if (ref.name.isTermName) Mode.Term | maybeSelectMembers + else if (ref.name.isTypeName) Mode.Type | maybeSelectMembers + else Mode.None + + case _ => Mode.None /** When dealing with in varios palces we check to see if they are * due to incomplete backticks. If so, we ensure we get the full prefix @@ -130,7 +125,7 @@ object Completion: def completionPrefix(path: List[untpd.Tree], pos: SourcePosition)(using Context): String = def fallback: Int = var i = pos.point - 1 - while i >= 0 && Chars.isIdentifierPart(pos.source.content()(i)) do i -= 1 + while i >= 0 && Character.isUnicodeIdentifierPart(pos.source.content()(i)) do i -= 1 i + 1 path match @@ -278,6 +273,32 @@ object Completion: if denot.isType then denot.symbol.showFullName else denot.info.widenTermRefExpr.show + /** Include in completion sets only symbols that + * 1. is not absent (info is not NoType) + * 2. are not a primary constructor, + * 3. have an existing source symbol, + * 4. are the module class in case of packages, + * 5. are mutable accessors, to exclude setters for `var`, + * 6. symbol is not a package object + * 7. symbol is not an artifact of the compiler + * 8. symbol is not a constructor proxy module when in type completion mode + * 9. have same term/type kind as name prefix given so far + */ + def isValidCompletionSymbol(sym: Symbol, completionMode: Mode)(using Context): Boolean = + sym.exists && + !sym.isAbsent() && + !sym.isPrimaryConstructor && + sym.sourceSymbol.exists && + (!sym.is(Package) || sym.is(ModuleClass)) && + !sym.isAllOf(Mutable | Accessor) && + !sym.isPackageObject && + !sym.is(Artifact) && + !(completionMode.is(Mode.Type) && sym.isAllOf(ConstructorProxyModule)) && + ( + (completionMode.is(Mode.Term) && (sym.isTerm || sym.is(ModuleClass)) + || (completionMode.is(Mode.Type) && (sym.isType || sym.isStableMember))) + ) + given ScopeOrdering(using Context): Ordering[Seq[SingleDenotation]] with val order = List(defn.ScalaPredefModuleClass, defn.ScalaPackageClass, defn.JavaLangPackageClass) @@ -531,34 +552,13 @@ object Completion: extMethodsWithAppliedReceiver.groupByName /** Include in completion sets only symbols that - * 1. start with given name prefix, and - * 2. is not absent (info is not NoType) - * 3. are not a primary constructor, - * 4. have an existing source symbol, - * 5. are the module class in case of packages, - * 6. are mutable accessors, to exclude setters for `var`, - * 7. symbol is not a package object - * 8. symbol is not an artifact of the compiler - * 9. have same term/type kind as name prefix given so far + * 1. match the filter method, + * 2. satisfy [[Completion.isValidCompletionSymbol]] */ private def include(denot: SingleDenotation, nameInScope: Name)(using Context): Boolean = - val sym = denot.symbol - - nameInScope.startsWith(prefix) && - sym.exists && completionsFilter(NoType, nameInScope) && - !sym.isAbsent() && - !sym.isPrimaryConstructor && - sym.sourceSymbol.exists && - (!sym.is(Package) || sym.is(ModuleClass)) && - !sym.isAllOf(Mutable | Accessor) && - !sym.isPackageObject && - !sym.is(Artifact) && - ( - (mode.is(Mode.Term) && (sym.isTerm || sym.is(ModuleClass)) - || (mode.is(Mode.Type) && (sym.isType || sym.isStableMember))) - ) + isValidCompletionSymbol(denot.symbol, mode) private def extractRefinements(site: Type)(using Context): Seq[SingleDenotation] = site match diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index d0ceb37c07ba..6ef8bee8a5d2 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -954,14 +954,8 @@ class CompletionTest { .noCompletions() @Test def i13624_annotType: Unit = - val expected1 = Set( - ("MyAnnotation", Class, "MyAnnotation"), - ("MyAnnotation", Module, "MyAnnotation"), - ) - val expected2 = Set( - ("MyAnnotation", Class, "Foo.MyAnnotation"), - ("MyAnnotation", Module, "Foo.MyAnnotation"), - ) + val expected1 = Set(("MyAnnotation", Class, "MyAnnotation")) + val expected2 = Set(("MyAnnotation", Class, "Foo.MyAnnotation")) code"""object Foo{ | class MyAnnotation extends annotation.StaticAnnotation |} @@ -984,14 +978,8 @@ class CompletionTest { @Test def i13624_annotation : Unit = code"""@annotation.implicitNot${m1} |@annotation.implicitNotFound @mai${m2}""" - .completion(m1, - ("implicitNotFound", Class, "scala.annotation.implicitNotFound"), - ("implicitNotFound", Module, "scala.annotation.implicitNotFound"), - ) - .completion(m2, - ("main", Class, "main"), - ("main", Module, "main"), - ) + .completion(m1, ("implicitNotFound", Class, "scala.annotation.implicitNotFound")) + .completion(m2, ("main", Class, "main")) @Test def i13623_annotation : Unit = code"""import annot${m1}""" @@ -1489,7 +1477,6 @@ class CompletionTest { ("xDef", Method, "=> Int"), ("xVal", Field, "Int"), ("xObject", Module, "Foo.xObject"), - ("xClass", Module, "Foo.xClass"), ("xClass", Class, "Foo.xClass"))) } @@ -1557,9 +1544,7 @@ class CompletionTest { |object T: | extension (x: Test.TestSel$m1) |""" - .completion(m1, Set( - ("TestSelect", Module, "Test.TestSelect"), ("TestSelect", Class, "Test.TestSelect") - )) + .completion(m1, Set(("TestSelect", Class, "Test.TestSelect"))) @Test def extensionDefinitionCompletionsSelectNested: Unit = code"""|object Test: @@ -1568,9 +1553,7 @@ class CompletionTest { |object T: | extension (x: Test.Test2.TestSel$m1) |""" - .completion(m1, Set( - ("TestSelect", Module, "Test.Test2.TestSelect"), ("TestSelect", Class, "Test.Test2.TestSelect") - )) + .completion(m1, Set(("TestSelect", Class, "Test.Test2.TestSelect"))) @Test def extensionDefinitionCompletionsSelectInside: Unit = code"""|object Test: diff --git a/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala b/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala index 381e0eaec6a5..648c59725742 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala @@ -5,7 +5,7 @@ import scala.annotation.tailrec import dotc.* import ast.*, tpd.* -import core.*, Contexts.*, Decorators.*, Flags.*, Names.*, Symbols.*, Types.* +import core.*, Contexts.*, Flags.*, Names.*, Symbols.*, Types.* import interactive.* import util.* import util.SourcePosition diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala index 9b40f1e6777a..6d634f56363c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala @@ -24,7 +24,6 @@ import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans.Span -import dotty.tools.pc.IndexedContext import org.eclipse.lsp4j.InlayHint import org.eclipse.lsp4j.InlayHintKind diff --git a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala index f7797efbfb27..80317185458b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala @@ -6,20 +6,14 @@ import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver -import dotty.tools.dotc.parsing.Tokens.closingRegionTokens -import dotty.tools.dotc.reporting.ErrorMessageID -import dotty.tools.dotc.reporting.ExpectedTokenButFound import dotty.tools.dotc.util.Signatures import dotty.tools.dotc.util.SourceFile -import dotty.tools.dotc.util.Spans -import dotty.tools.dotc.util.Spans.Span import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam import dotty.tools.pc.utils.MtagsEnrichments.* import org.eclipse.lsp4j as l import scala.jdk.CollectionConverters.* -import scala.jdk.OptionConverters.* import scala.meta.internal.metals.ReportContext import scala.meta.pc.OffsetParams import scala.meta.pc.SymbolDocumentation diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionAffix.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionAffix.scala new file mode 100644 index 000000000000..4ed58c773a7c --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionAffix.scala @@ -0,0 +1,95 @@ +package dotty.tools.pc.completions + +import org.eclipse.lsp4j.Position +import org.eclipse.lsp4j.Range + +/** + * @param suffixes which we should insert + * @param prefixes which we should insert + * @param snippet which suffix should we insert the snippet $0 + */ +case class CompletionAffix( + suffixes: Set[Suffix], + prefixes: Set[Prefix], + snippet: Suffix, + currentPrefix: Option[String], +): + def addLabelSnippet = suffixes.exists(_.kind == SuffixKind.Bracket) + def hasSnippet = snippet.kind != SuffixKind.NoSuffix + def chain(copyFn: CompletionAffix => CompletionAffix) = copyFn(this) + def withNewSuffix(kind: Suffix) = this.copy(suffixes = suffixes + kind) + def withNewPrefix(kind: Prefix) = this.copy(prefixes = prefixes + kind) + def withCurrentPrefix(currentPrefix: String) = this.copy(currentPrefix = Some(currentPrefix)) + def withNewSuffixSnippet(suffix: Suffix) = + this.copy(suffixes = suffixes + suffix, snippet = suffix) + + def nonEmpty: Boolean = suffixes.nonEmpty || prefixes.nonEmpty + + def toSuffix: String = + def loop(suffixes: List[SuffixKind]): String = + def cursor = if suffixes.head == snippet.kind then "$0" else "" + suffixes match + case SuffixKind.Brace :: tail => s"($cursor)" + loop(tail) + case SuffixKind.Bracket :: tail => s"[$cursor]" + loop(tail) + case SuffixKind.Template :: tail => s" {$cursor}" + loop(tail) + case _ => "" + loop(suffixes.toList.map(_.kind)) + + def toSuffixOpt: Option[String] = + val edit = toSuffix + if edit.nonEmpty then Some(edit) else None + + + given Ordering[Position] = Ordering.by(elem => (elem.getLine, elem.getCharacter)) + + def toInsertRange: Option[Range] = + import scala.language.unsafeNulls + + val ranges = prefixes.collect: + case Affix(_, Some(range)) => range + .toList + for + startPos <- ranges.map(_.getStart).minOption + endPos <- ranges.map(_.getEnd).maxOption + yield Range(startPos, endPos) + + private def loopPrefix(prefixes: List[PrefixKind]): String = + prefixes match + case PrefixKind.New :: tail => "new " + loopPrefix(tail) + case _ => "" + + /** + * We need to insert previous prefix, but we don't want to display it in the label i.e. + * ```scala + * scala.util.Tr@@ + * ```` + * should return `new Try[T]: Try[T]` + * but insert `new scala.util.Try` + * + */ + def toInsertPrefix: String = + loopPrefix(prefixes.toList.map(_.kind)) + currentPrefix.getOrElse("") + + def toPrefix: String = + loopPrefix(prefixes.toList.map(_.kind)) + +end CompletionAffix + +object CompletionAffix: + val empty = CompletionAffix( + suffixes = Set.empty, + prefixes = Set.empty, + snippet = Affix(SuffixKind.NoSuffix), + currentPrefix = None, + ) + +enum SuffixKind: + case Brace, Bracket, Template, NoSuffix + +enum PrefixKind: + case New + +type Suffix = Affix[SuffixKind] +type Prefix = Affix[PrefixKind] + +private case class Affix[+T](kind: T, insertRange: Option[Range] = None) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 2beb4460db56..7e02c23229e8 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -153,13 +153,36 @@ class CompletionProvider( val printer = ShortenedTypePrinter(search, IncludeDefaultParam.ResolveLater)(using indexedContext) + val underlyingCompletion = completion match + case CompletionValue.ExtraMethod(_, underlying) => underlying + case other => other + // For overloaded signatures we get multiple symbols, so we need // to recalculate the description - // related issue https://github.com/scala/scala3/issues/11941 - lazy val kind: CompletionItemKind = completion.completionItemKind - val description = completion.description(printer) - val label = completion.labelWithDescription(printer) - val ident = completion.insertText.getOrElse(completion.label) + // related issue https://github.com/lampepfl/scala3/issues/11941 + lazy val kind: CompletionItemKind = underlyingCompletion.completionItemKind + val description = underlyingCompletion.description(printer) + val label = underlyingCompletion.labelWithDescription(printer) + val ident = underlyingCompletion.insertText.getOrElse(underlyingCompletion.label) + + lazy val isInStringInterpolation = + path match + // s"My name is $name" + case (_: Ident) :: (_: SeqLiteral) :: (_: Typed) :: Apply( + Select(Apply(Select(Select(_, name), _), _), _), + _ + ) :: _ => + name == StdNames.nme.StringContext + // "My name is $name" + case Literal(Constant(_: String)) :: _ => + true + case _ => + false + + def wrapInBracketsIfRequired(newText: String): String = + if underlyingCompletion.snippetAffix.nonEmpty && isInStringInterpolation then + "{" + newText + "}" + else newText def mkItem( newText: String, @@ -170,25 +193,25 @@ class CompletionProvider( val editRange = if newText.startsWith(oldText) then completionPos.stripSuffixEditRange else completionPos.toEditRange - val textEdit = new TextEdit(range.getOrElse(editRange), newText) + val textEdit = new TextEdit(range.getOrElse(editRange), wrapInBracketsIfRequired(newText)) val item = new CompletionItem(label) item.setSortText(f"${idx}%05d") item.setDetail(description) - item.setFilterText(completion.filterText.getOrElse(completion.label)) + item.setFilterText(underlyingCompletion.filterText.getOrElse(underlyingCompletion.label)) item.setTextEdit(textEdit) - item.setAdditionalTextEdits((completion.additionalEdits ++ additionalEdits).asJava) - completion.insertMode.foreach(item.setInsertTextMode) + item.setAdditionalTextEdits((underlyingCompletion.additionalEdits ++ additionalEdits).asJava) + underlyingCompletion.insertMode.foreach(item.setInsertTextMode) - val data = completion.completionData(buildTargetIdentifier) + val data = underlyingCompletion.completionData(buildTargetIdentifier) item.setData(data.toJson) - item.setTags(completion.lspTags.asJava) + item.setTags(underlyingCompletion.lspTags.asJava) if config.isCompletionSnippetsEnabled() then item.setInsertTextFormat(InsertTextFormat.Snippet) - completion.command.foreach { command => + underlyingCompletion.command.foreach { command => item.setCommand(new Command("", command)) } @@ -196,21 +219,8 @@ class CompletionProvider( item end mkItem - val completionTextSuffix = completion.snippetSuffix.toEdit - - lazy val isInStringInterpolation = - path match - // s"My name is $name" - case (_: Ident) :: (_: SeqLiteral) :: (_: Typed) :: Apply( - Select(Apply(Select(Select(_, name), _), _), _), - _ - ) :: _ => - name == StdNames.nme.StringContext - // "My name is $name" - case Literal(Constant(_: String)) :: _ => - true - case _ => - false + val completionTextSuffix = underlyingCompletion.snippetAffix.toSuffix + val completionTextPrefix = underlyingCompletion.snippetAffix.toInsertPrefix lazy val backtickSoftKeyword = path match case (_: Select) :: _ => false @@ -232,7 +242,7 @@ class CompletionProvider( mkItem(nameEdit.getNewText().nn, other.toList, range = Some(nameEdit.getRange().nn)) case _ => mkItem( - v.insertText.getOrElse( ident.backticked(backtickSoftKeyword) + completionTextSuffix), + v.insertText.getOrElse(completionTextPrefix + ident.backticked(backtickSoftKeyword) + completionTextSuffix), edits.edits, range = v.range ) @@ -242,25 +252,25 @@ class CompletionProvider( case IndexedContext.Result.InScope => mkItem( v.insertText.getOrElse( - ident.backticked( - backtickSoftKeyword - ) + completionTextSuffix + completionTextPrefix + ident.backticked(backtickSoftKeyword) + completionTextSuffix ), range = v.range, ) + // Special case when symbol is out of scope, and there is no auto import. + // It means that it will use fully qualified path case _ if isInStringInterpolation => mkItem( - "{" + sym.fullNameBackticked + completionTextSuffix + "}", + "{" + completionTextPrefix + sym.fullNameBackticked + completionTextSuffix + "}", range = v.range ) case _ if v.isExtensionMethod => mkItem( - ident.backticked(backtickSoftKeyword) + completionTextSuffix, + completionTextPrefix + ident.backticked(backtickSoftKeyword) + completionTextSuffix, range = v.range ) case _ => mkItem( - sym.fullNameBackticked( + completionTextPrefix + sym.fullNameBackticked( backtickSoftKeyword ) + completionTextSuffix, range = v.range @@ -270,18 +280,16 @@ class CompletionProvider( end match end mkItemWithImports - completion match + underlyingCompletion match case v: (CompletionValue.Workspace | CompletionValue.Extension | CompletionValue.ImplicitClass) => mkItemWithImports(v) case v: CompletionValue.Interpolator if v.isWorkspace || v.isExtension => mkItemWithImports(v) case _ => - val insert = - completion.insertText.getOrElse(ident.backticked(backtickSoftKeyword)) - mkItem( - insert + completionTextSuffix, - range = completion.range - ) + val nameText = underlyingCompletion.insertText.getOrElse(ident.backticked(backtickSoftKeyword)) + val nameWithAffixes = completionTextPrefix + nameText + completionTextSuffix + mkItem(nameWithAffixes, range = underlyingCompletion.range) + end match end completionItems end CompletionProvider diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionSuffix.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionSuffix.scala deleted file mode 100644 index 580d65089737..000000000000 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionSuffix.scala +++ /dev/null @@ -1,39 +0,0 @@ -package dotty.tools.pc.completions - -/** - * @param suffixes which we should insert - * @param snippet which suffix should we insert the snippet $0 - */ -case class CompletionSuffix( - suffixes: Set[SuffixKind], - snippet: SuffixKind, -): - def addLabelSnippet = suffixes.contains(SuffixKind.Bracket) - def hasSnippet = snippet != SuffixKind.NoSuffix - def chain(copyFn: CompletionSuffix => CompletionSuffix) = copyFn(this) - def withNewSuffix(kind: SuffixKind) = - CompletionSuffix(suffixes + kind, snippet) - def withNewSuffixSnippet(kind: SuffixKind) = - CompletionSuffix(suffixes + kind, kind) - def toEdit: String = - def loop(suffixes: List[SuffixKind]): String = - def cursor = if suffixes.head == snippet then "$0" else "" - suffixes match - case SuffixKind.Brace :: tail => s"($cursor)" + loop(tail) - case SuffixKind.Bracket :: tail => s"[$cursor]" + loop(tail) - case SuffixKind.Template :: tail => s" {$cursor}" + loop(tail) - case _ => "" - loop(suffixes.toList) - def toEditOpt: Option[String] = - val edit = toEdit - if edit.nonEmpty then Some(edit) else None -end CompletionSuffix - -object CompletionSuffix: - val empty = CompletionSuffix( - suffixes = Set.empty, - snippet = SuffixKind.NoSuffix, - ) - -enum SuffixKind: - case Brace, Bracket, Template, NoSuffix diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala index 2810fe728b9a..e1877a1a9c88 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala @@ -40,7 +40,7 @@ enum CompletionSource: sealed trait CompletionValue: def label: String def insertText: Option[String] = None - def snippetSuffix: CompletionSuffix = CompletionSuffix.empty + def snippetAffix: CompletionAffix = CompletionAffix.empty def additionalEdits: List[TextEdit] = Nil def range: Option[Range] = None def filterText: Option[String] = None @@ -66,7 +66,6 @@ object CompletionValue: sealed trait Symbolic extends CompletionValue: def denotation: Denotation val symbol = denotation.symbol - def isFromWorkspace: Boolean = false override def completionItemDataKind = CompletionItemData.None def isExtensionMethod: Boolean = false @@ -80,6 +79,9 @@ object CompletionValue: ) def importSymbol: Symbol = symbol + override def range: Option[Range] = + snippetAffix.toInsertRange + def completionItemKind(using Context): CompletionItemKind = val symbol = this.symbol if symbol.is(Package) || symbol.is(Module) then @@ -97,20 +99,18 @@ object CompletionValue: override def labelWithDescription( printer: ShortenedTypePrinter )(using Context): String = - if symbol.is(Method) then s"${label}${description(printer)}" - else if symbol.isConstructor then label + if symbol.isConstructor then s"${snippetAffix.toPrefix}${label}${description(printer)}" + else if symbol.is(Method) then s"${label}${description(printer)}" else if symbol.is(Mutable) then s"$label: ${description(printer)}" else if symbol.is(Package) || symbol.is(Module) || symbol.isClass then - if isFromWorkspace then - s"${labelWithSuffix(printer)} -${description(printer)}" - else s"${labelWithSuffix(printer)}${description(printer)}" + s"${labelWithSuffix(printer)}${description(printer)}" else if symbol.isType then labelWithSuffix(printer) else if symbol.isTerm && symbol.info.typeSymbol.is(Module) then s"${label}${description(printer)}" else s"$label: ${description(printer)}" protected def labelWithSuffix(printer: ShortenedTypePrinter)(using Context): String = - if snippetSuffix.addLabelSnippet + if snippetAffix.addLabelSnippet then val printedParams = symbol.info.typeParams.map(p => p.paramName.decoded ++ printer.tpe(p.paramInfo) @@ -126,29 +126,64 @@ object CompletionValue: case class Compiler( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix + override val snippetAffix: CompletionAffix ) extends Symbolic: override def completionItemDataKind: Integer = CompletionSource.CompilerKind.ordinal + /** + * We need to access original completion in sorting phase. + * This class is only a wrapper to hold both new completion and original completion. + * + * All methods are proxied to @param extraMethod + * + * FIXME Refactor this file to different architercture. At least to somethhing that is easier to modifiy and scale. + * One solution may be a migration to flag based solution. + */ + case class ExtraMethod( + owner: Denotation, + extraMethod: Symbolic, + ) extends Symbolic: + override def additionalEdits: List[TextEdit] = extraMethod.additionalEdits + override def command: Option[String] = extraMethod.command + override def completionData(buildTargetIdentifier: String)(using Context): CompletionItemData = extraMethod.completionData((buildTargetIdentifier)) + override def completionItemKind(using Context): CompletionItemKind = extraMethod.completionItemKind + override def description(printer: ShortenedTypePrinter)(using Context): String = extraMethod.description(printer) + override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = extraMethod.labelWithDescription(printer) + override def range: Option[Range] = extraMethod.range + override def denotation: Denotation = extraMethod.denotation + override def label: String = extraMethod.label + override def filterText: Option[String] = extraMethod.filterText + override def importSymbol: Symbol = extraMethod.importSymbol + override def lspTags(using Context): List[CompletionItemTag] = extraMethod.lspTags + override def insertText: Option[String] = extraMethod.insertText + override def isExtensionMethod: Boolean = extraMethod.isExtensionMethod + override def snippetAffix: CompletionAffix = extraMethod.snippetAffix + override def insertMode: Option[InsertTextMode] = extraMethod.insertMode + override val symbol: Symbol = extraMethod.symbol + override def completionItemDataKind: Integer = extraMethod.completionItemDataKind + case class Scope( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix, + override val snippetAffix: CompletionAffix, ) extends Symbolic: override def completionItemDataKind: Integer = CompletionSource.ScopeKind.ordinal case class Workspace( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix, + override val snippetAffix: CompletionAffix, override val importSymbol: Symbol ) extends Symbolic: - override def isFromWorkspace: Boolean = true override def completionItemDataKind: Integer = CompletionSource.WorkspaceKind.ordinal override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = - if symbol.is(Method) && symbol.name != nme.apply then + if symbol.isConstructor || symbol.name == nme.apply then + s"${snippetAffix.toPrefix}${label}${description(printer)} - ${printer.fullNameString(importSymbol.effectiveOwner)}" + else if symbol.is(Method) then s"${labelWithSuffix(printer)} - ${printer.fullNameString(symbol.effectiveOwner)}" + else if symbol.is(Package) || symbol.is(Module) || symbol.isClass then + s"${labelWithSuffix(printer)} -${description(printer)}" else super.labelWithDescription(printer) /** @@ -157,7 +192,7 @@ object CompletionValue: case class ImplicitClass( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix, + override val snippetAffix: CompletionAffix, override val importSymbol: Symbol, ) extends Symbolic: override def completionItemKind(using Context): CompletionItemKind = @@ -172,7 +207,7 @@ object CompletionValue: case class Extension( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix + override val snippetAffix: CompletionAffix ) extends Symbolic: override def completionItemKind(using Context): CompletionItemKind = CompletionItemKind.Method @@ -257,6 +292,7 @@ object CompletionValue: override def completionItemKind(using Context): CompletionItemKind = CompletionItemKind.Folder + // TODO remove this type and return `Compiler`, `Workspace` instead case class Interpolator( denotation: Denotation, label: String, diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index 81a543701817..7a10c9e4804d 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -8,12 +8,11 @@ import scala.collection.mutable import scala.meta.internal.metals.Fuzzy import scala.meta.internal.metals.ReportContext import scala.meta.internal.mtags.CoursierComplete -import scala.meta.internal.pc.{IdentifierComparator, MemberOrdering} +import scala.meta.internal.pc.{IdentifierComparator, MemberOrdering, CompletionFuzzy} import scala.meta.pc.* import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.ast.untpd -import dotty.tools.dotc.ast.NavigateAST import dotty.tools.dotc.core.Comments.Comment import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.* @@ -34,7 +33,7 @@ import dotty.tools.pc.completions.OverrideCompletions.OverrideExtractor import dotty.tools.pc.buildinfo.BuildInfo import dotty.tools.pc.utils.MtagsEnrichments.* import dotty.tools.dotc.core.Denotations.SingleDenotation -import dotty.tools.dotc.interactive.Interactive + class Completions( text: String, @@ -69,10 +68,9 @@ class Completions( false case (_: (Import | Export)) :: _ => false case _ :: (_: (Import | Export)) :: _ => false - case (_: Ident) :: (_: SeqLiteral) :: _ => false case _ => true - private lazy val allowTemplateSuffix: Boolean = + private lazy val isNew: Boolean = path match case _ :: New(selectOrIdent: (Select | Ident)) :: _ => true case _ => false @@ -88,7 +86,6 @@ class Completions( val generalExclude = isUninterestingSymbol(sym) || !isNotLocalForwardReference(sym) || - sym.isPackageObject || hasSyntheticCursorSuffix def isWildcardParam(sym: Symbol) = @@ -105,14 +102,23 @@ class Completions( end if end includeSymbol + def enrichedCompilerCompletions(qualType: Type): (List[CompletionValue], SymbolSearch.Result) = + val compilerCompletions = Completion + .rawCompletions(completionPos.originalCursorPosition, completionMode, completionPos.query, path, adjustedPath) + + compilerCompletions + .toList + .flatMap(toCompletionValues) + .filterInteresting(qualType) + def completions(): (List[CompletionValue], SymbolSearch.Result) = val (advanced, exclusive) = advancedCompletions(path, completionPos) val (all, result) = if exclusive then (advanced, SymbolSearch.Result.COMPLETE) else - val keywords = - KeywordsCompletions.contribute(path, completionPos, comments) + val keywords = KeywordsCompletions.contribute(path, completionPos, comments) val allAdvanced = advanced ++ keywords + path match // should not show completions for toplevel case Nil | (_: PackageDef) :: _ if !completionPos.originalCursorPosition.source.file.ext.isScalaScript => @@ -120,18 +126,10 @@ class Completions( case Select(qual, _) :: _ if qual.typeOpt.isErroneous => (allAdvanced, SymbolSearch.Result.COMPLETE) case Select(qual, _) :: _ => - val compilerCompletions = Completion.rawCompletions(completionPos.originalCursorPosition, completionMode, completionPos.query, path, adjustedPath) - val (compiler, result) = compilerCompletions - .toList - .flatMap(toCompletionValues) - .filterInteresting(qual.typeOpt.widenDealias) + val (compiler, result) = enrichedCompilerCompletions(qual.typeOpt.widenDealias) (allAdvanced ++ compiler, result) case _ => - val compilerCompletions = Completion.rawCompletions(completionPos.originalCursorPosition, completionMode, completionPos.query, path, adjustedPath) - val (compiler, result) = compilerCompletions - .toList - .flatMap(toCompletionValues) - .filterInteresting() + val (compiler, result) = enrichedCompilerCompletions(defn.AnyType) (allAdvanced ++ compiler, result) end match @@ -147,7 +145,7 @@ class Completions( denots: Seq[SingleDenotation] ): List[CompletionValue] = denots.toList.flatMap: denot => - completionsWithSuffix( + completionsWithAffix( denot, completion.show, (label, denot, suffix) => CompletionValue.Compiler(label, denot, suffix) @@ -157,13 +155,17 @@ class Completions( inline private def undoBacktick(label: String): String = label.stripPrefix("`").stripSuffix("`") + // TODO This has to be refactored to properly split extension methods + // This method has to be fixed even further. The similar problem will be present in shortened type printer. private def getParams(symbol: Symbol) = lazy val extensionParam = symbol.extensionParam if symbol.is(Flags.Extension) then symbol.paramSymss.filterNot( _.contains(extensionParam) ) - else symbol.paramSymss + else if symbol.isConstructor then + symbol.owner.paramSymss + else symbol.paramSymss.filter(!_.exists(_.isTypeParam)) private def isAbstractType(symbol: Symbol) = (symbol.info.typeSymbol.is(Trait) // trait A{ def doSomething: Int} @@ -184,20 +186,19 @@ class Completions( ) end isAbstractType - private def findSuffix(symbol: Symbol): CompletionSuffix = - CompletionSuffix.empty + private def findSuffix(symbol: Symbol): CompletionAffix = + CompletionAffix.empty .chain { suffix => // for [] suffix - if shouldAddSnippet && symbol.info.typeParams.nonEmpty - then suffix.withNewSuffixSnippet(SuffixKind.Bracket) + if shouldAddSnippet && symbol.info.typeParams.nonEmpty then + suffix.withNewSuffixSnippet(Affix(SuffixKind.Bracket)) else suffix } .chain { suffix => // for () suffix - if shouldAddSnippet && symbol.is(Flags.Method) - then + if shouldAddSnippet && symbol.is(Flags.Method) then val paramss = getParams(symbol) paramss match case Nil => suffix - case List(Nil) => suffix.withNewSuffix(SuffixKind.Brace) + case List(Nil) => suffix.withNewSuffix(Affix(SuffixKind.Brace)) case _ if config.isCompletionSnippetsEnabled() => val onlyParameterless = paramss.forall(_.isEmpty) lazy val onlyImplicitOrTypeParams = paramss.forall( @@ -205,58 +206,93 @@ class Completions( sym.isType || sym.is(Implicit) || sym.is(Given) } ) - if onlyParameterless then suffix.withNewSuffix(SuffixKind.Brace) + if onlyParameterless then suffix.withNewSuffix(Affix(SuffixKind.Brace)) else if onlyImplicitOrTypeParams then suffix - else if suffix.hasSnippet then - suffix.withNewSuffix(SuffixKind.Brace) - else suffix.withNewSuffixSnippet(SuffixKind.Brace) + else if suffix.hasSnippet then suffix.withNewSuffix(Affix(SuffixKind.Brace)) + else suffix.withNewSuffixSnippet(Affix(SuffixKind.Brace)) case _ => suffix end match else suffix } .chain { suffix => // for {} suffix - if shouldAddSnippet && allowTemplateSuffix - && isAbstractType(symbol) - then - if suffix.hasSnippet then suffix.withNewSuffix(SuffixKind.Template) - else suffix.withNewSuffixSnippet(SuffixKind.Template) + if shouldAddSnippet && isNew && isAbstractType(symbol) then + if suffix.hasSnippet then suffix.withNewSuffix(Affix(SuffixKind.Template)) + else suffix.withNewSuffixSnippet(Affix(SuffixKind.Template)) else suffix } end findSuffix - def completionsWithSuffix( + def completionsWithAffix( denot: SingleDenotation, label: String, - toCompletionValue: (String, SingleDenotation, CompletionSuffix) => CompletionValue + toCompletionValue: (String, SingleDenotation, CompletionAffix) => CompletionValue.Symbolic ): List[CompletionValue] = val sym = denot.symbol - // find the apply completion that would need a snippet - val methodDenots: List[SingleDenotation] = - if shouldAddSnippet && completionMode.is(Mode.Term) && - (sym.is(Flags.Module) || sym.isField || sym.isClass && !sym.is(Flags.Trait)) && !sym.is(Flags.JavaDefined) - then - val info = - /* Companion will be added even for normal classes now, - * but it will not show up from classpath. We can suggest - * constructors based on those synthetic applies. - */ - if sym.isClass && sym.companionModule.exists then sym.companionModule.info - else denot.info - val applyDenots = info.member(nme.apply).allSymbols.map(_.asSeenFrom(info).asSingleDenotation) - denot :: applyDenots - else denot :: Nil - - methodDenots.map { methodDenot => - val suffix = findSuffix(methodDenot.symbol) + val hasNonSyntheticConstructor = sym.name.isTypeName && sym.isClass + && !sym.is(ModuleClass) && !sym.is(Trait) && !sym.is(Abstract) && !sym.is(Flags.JavaDefined) + + val (extraMethodDenots, skipOriginalDenot): (List[SingleDenotation], Boolean) = + if shouldAddSnippet && isNew && hasNonSyntheticConstructor then + val constructors = sym.info.member(nme.CONSTRUCTOR).allSymbols.map(_.asSingleDenotation) + .filter(_.symbol.isAccessibleFrom(denot.info)) + constructors -> true + + else if shouldAddSnippet && completionMode.is(Mode.Term) && sym.name.isTermName && + !sym.is(Flags.JavaDefined) && (sym.isClass || sym.is(Module) || (sym.isField && denot.info.isInstanceOf[TermRef])) then + + val constructors = if sym.isAllOf(ConstructorProxyModule) then + sym.companionClass.info.member(nme.CONSTRUCTOR).allSymbols + else + val companionApplies = denot.info.member(nme.apply).allSymbols + val classConstructors = if sym.companionClass.exists && !sym.companionClass.isOneOf(AbstractOrTrait) then + sym.companionClass.info.member(nme.CONSTRUCTOR).allSymbols + else Nil + + if companionApplies.exists(_.is(Synthetic)) then + companionApplies ++ classConstructors.filter(!_.isPrimaryConstructor) + else + companionApplies ++ classConstructors + + val result = constructors.map(_.asSeenFrom(denot.info).asSingleDenotation) + .filter(_.symbol.isAccessibleFrom(denot.info)) + + result -> (sym.isAllOf(ConstructorProxyModule) || sym.is(Trait)) + else Nil -> false + + val extraCompletionValues = + val existsApply = extraMethodDenots.exists(_.symbol.name == nme.apply) + + extraMethodDenots.map { methodDenot => + val suffix = findSuffix(methodDenot.symbol) + val affix = if methodDenot.symbol.isConstructor && existsApply then + adjustedPath match + case (select @ Select(qual, _)) :: _ => + val start = qual.span.start + val insertRange = select.sourcePos.startPos.withEnd(completionPos.queryEnd).toLsp + + suffix + .withCurrentPrefix(qual.show + ".") + .withNewPrefix(Affix(PrefixKind.New, insertRange = Some(insertRange))) + case _ => + suffix.withNewPrefix(Affix(PrefixKind.New)) + else suffix + val name = undoBacktick(label) + + CompletionValue.ExtraMethod( + owner = denot, + extraMethod = toCompletionValue(name, methodDenot, affix) + ) + } + + if skipOriginalDenot then extraCompletionValues + else + val suffix = findSuffix(denot.symbol) val name = undoBacktick(label) - toCompletionValue( - name, - methodDenot, - suffix - ) - } - end completionsWithSuffix + val denotCompletionValue = toCompletionValue(name, denot, suffix) + denotCompletionValue :: extraCompletionValues + + end completionsWithAffix /** * @return Tuple of completionValues and flag. If the latter boolean value is true @@ -495,13 +531,22 @@ class Completions( val query = completionPos.query if completionMode.is(Mode.Scope) && query.nonEmpty then val visitor = new CompilerSearchVisitor(sym => - if !(sym.is(Flags.ExtensionMethod) || - (sym.maybeOwner.is(Flags.Implicit) && sym.maybeOwner.isClass)) + if Completion.isValidCompletionSymbol(sym, completionMode) && + !(sym.is(Flags.ExtensionMethod) || (sym.maybeOwner.is(Flags.Implicit) && sym.maybeOwner.isClass)) then indexedContext.lookupSym(sym) match case IndexedContext.Result.InScope => false + case _ if completionMode.is(Mode.ImportOrExport) => + visit( + CompletionValue.Workspace( + label = undoBacktick(sym.decodedName), + denotation = sym, + snippetAffix = CompletionAffix.empty, + importSymbol = sym + ) + ) case _ => - completionsWithSuffix( + completionsWithAffix( sym, sym.decodedName, CompletionValue.Workspace(_, _, _, sym) @@ -534,13 +579,13 @@ class Completions( && !sym.isConstructor && !isDefaultVariableSetter if isExtensionMethod then - completionsWithSuffix( + completionsWithAffix( sym, sym.decodedName, CompletionValue.Extension(_, _, _) ).map(visit).forall(_ == true) else if isImplicitClassMember then - completionsWithSuffix( + completionsWithAffix( sym, sym.decodedName, CompletionValue.ImplicitClass(_, _, _, sym.maybeOwner), @@ -569,13 +614,36 @@ class Completions( sym.showFullName + sigString else sym.fullName.stripModuleClassSuffix.show + /** If we try to complete TypeName, we should favor types over terms with same name value and without suffix. + */ + def deduplicateCompletions(completions: List[CompletionValue]): List[CompletionValue] = + val (symbolicCompletions, rest) = completions.partition: + _.isInstanceOf[CompletionValue.Symbolic] + + val symbolicCompletionsMap = symbolicCompletions + .collect { case symbolic: CompletionValue.Symbolic => symbolic } + .groupBy(_.symbol.fullName) // we somehow have to ignore proxy type + + val filteredSymbolicCompletions = symbolicCompletionsMap.filter: (name, denots) => + lazy val existsTypeWithoutSuffix: Boolean = !symbolicCompletionsMap + .get(name.toTypeName) + .forall(_.forall(sym => sym.snippetAffix.suffixes.nonEmpty)) + + (completionMode.is(Mode.Term) && !completionMode.is(Mode.ImportOrExport)) || + // show non synthetic symbols + // companion test should not result TrieMap[K, V] + (name.isTermName && !existsTypeWithoutSuffix) || + name.isTypeName + .toList.unzip._2.flatten + + filteredSymbolicCompletions ++ rest + extension (l: List[CompletionValue]) def filterInteresting( qualType: Type = ctx.definitions.AnyType, enrich: Boolean = true ): (List[CompletionValue], SymbolSearch.Result) = - - val isSeen = mutable.Set.empty[String] + val alreadySeen = mutable.Set.empty[String] val buf = List.newBuilder[CompletionValue] def visit(head: CompletionValue): Boolean = val (id, include) = @@ -585,15 +653,13 @@ class Completions( case ck: CompletionValue.CaseKeyword => (ck.label, true) case symOnly: CompletionValue.Symbolic => val sym = symOnly.symbol - val name = SemanticdbSymbols.symbolName(sym) - val nameId = - if sym.isClass || sym.is(Module) then - // drop #|. at the end to avoid duplication - name.substring(0, name.length() - 1).nn - else name + val name = symOnly match + case CompletionValue.ExtraMethod(owner, extraMethod) => + SemanticdbSymbols.symbolName(owner.symbol) + SemanticdbSymbols.symbolName(extraMethod.symbol) + case _ => SemanticdbSymbols.symbolName(sym) val suffix = - if symOnly.snippetSuffix.addLabelSnippet then "[]" else "" - val id = nameId + suffix + if symOnly.snippetAffix.addLabelSnippet then "[]" else "" + val id = name + suffix val include = includeSymbol(sym) (id, include) case kw: CompletionValue.Keyword => (kw.label, true) @@ -604,8 +670,8 @@ class Completions( (fileSysMember.label, true) case ii: CompletionValue.IvyImport => (ii.label, true) - if !isSeen(id) && include then - isSeen += id + if !alreadySeen(id) && include then + alreadySeen += id buf += head true else false @@ -615,12 +681,9 @@ class Completions( if enrich then val searchResult = - enrichWithSymbolSearch(visit, qualType).getOrElse( - SymbolSearch.Result.COMPLETE - ) - (buf.result, searchResult) - else (buf.result, SymbolSearch.Result.COMPLETE) - + enrichWithSymbolSearch(visit, qualType).getOrElse(SymbolSearch.Result.COMPLETE) + (deduplicateCompletions(buf.result), searchResult) + else (deduplicateCompletions(buf.result), SymbolSearch.Result.COMPLETE) end filterInteresting end extension @@ -704,18 +767,24 @@ class Completions( relevance end symbolRelevance + def computeRelevance(sym: Symbol, completionValue: CompletionValue.Symbolic) = + completionValue match + case _: CompletionValue.Override => + var penalty = symbolRelevance(sym) + // show the abstract members first + if !sym.is(Deferred) then penalty |= MemberOrdering.IsNotAbstract + penalty + case _: CompletionValue.Workspace => + symbolRelevance(sym) | (IsWorkspaceSymbol + sym.name.show.length()) + case _ => symbolRelevance(sym) + completion match - case ov: CompletionValue.Override => - var penalty = symbolRelevance(ov.symbol) - // show the abstract members first - if !ov.symbol.is(Deferred) then penalty |= MemberOrdering.IsNotAbstract - penalty - case CompletionValue.Workspace(_, denot, _, _) => - symbolRelevance(denot.symbol) | (IsWorkspaceSymbol + denot.name.show.length()) + case CompletionValue.ExtraMethod(owner, extraMethod) => + computeRelevance(owner.symbol, extraMethod) case sym: CompletionValue.Symbolic => - symbolRelevance(sym.symbol) - case _ => - Int.MaxValue + computeRelevance(sym.symbol, sym) + case _ => Int.MaxValue + end computeRelevancePenalty private lazy val isEvilMethod: Set[Name] = Set[Name]( @@ -823,6 +892,7 @@ class Completions( def priority(v: CompletionValue): Int = v match case _: CompletionValue.Compiler => 0 + case CompletionValue.ExtraMethod(_, _: CompletionValue.Compiler) => 0 case _ => 1 priority(o1) - priority(o2) @@ -862,6 +932,23 @@ class Completions( prioritizeCaseKeyword || prioritizeNamed end compareCompletionValue + def methodScore(v: CompletionValue.Symbolic)(using Context): Int = + val sym = v.symbol + val workspacePenalty = v match + case CompletionValue.ExtraMethod(_, _: CompletionValue.Workspace) => 5 + case _: CompletionValue.Workspace => 5 + case _ => 0 + + val isExtraMethod = v.isInstanceOf[CompletionValue.ExtraMethod] + val methodPenalty = + if isNew && sym.isConstructor then -1 + else if isExtraMethod && !sym.isConstructor then 1 + else if isExtraMethod then 2 + else if !sym.isAllOf(SyntheticModule) then 3 + else 4 + + workspacePenalty + methodPenalty + override def compare(o1: CompletionValue, o2: CompletionValue): Int = (o1, o2) match case (o1: CompletionValue.NamedArg, o2: CompletionValue.NamedArg) => @@ -881,32 +968,39 @@ class Completions( val byLocalSymbol = compareLocalSymbols(s1, s2) if byLocalSymbol != 0 then byLocalSymbol else - val byRelevance = compareByRelevance(o1, o2) - if byRelevance != 0 then byRelevance + val byFuzzy = Integer.compare( + fuzzyScore(sym1), + fuzzyScore(sym2) + ) + if byFuzzy != 0 then byFuzzy else - val byFuzzy = Integer.compare( - fuzzyScore(sym1), - fuzzyScore(sym2) - ) - if byFuzzy != 0 then byFuzzy + val byRelevance = compareByRelevance(o1, o2) + if byRelevance != 0 then byRelevance else - val byIdentifier = IdentifierComparator.compare( - s1.name.show, - s2.name.show + val byMethodScore = Integer.compare( + methodScore(sym1), + methodScore(sym2) ) - if byIdentifier != 0 then byIdentifier + if byMethodScore != 0 then byMethodScore else - val byOwner = - s1.owner.fullName.toString - .compareTo(s2.owner.fullName.toString) - if byOwner != 0 then byOwner + val byIdentifier = IdentifierComparator.compare( + s1.name.show, + s2.name.show + ) + if byIdentifier != 0 then byIdentifier else - val byParamCount = Integer.compare( - s1.paramSymss.flatten.size, - s2.paramSymss.flatten.size - ) - if byParamCount != 0 then byParamCount - else s1.detailString.compareTo(s2.detailString) + val byOwner = + s1.owner.fullName.toString + .compareTo(s2.owner.fullName.toString) + if byOwner != 0 then byOwner + else + val byParamCount = Integer.compare( + s1.paramSymss.flatten.size, + s2.paramSymss.flatten.size + ) + if byParamCount != 0 then byParamCount + else s1.detailString.compareTo(s2.detailString) + end if end if end if end if diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala index 2a8ead70ea33..9c973e6e63e0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala @@ -12,9 +12,7 @@ import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Symbols.Symbol -import dotty.tools.dotc.util.Spans import dotty.tools.dotc.core.Types.Type -import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.CompilerSearchVisitor import dotty.tools.pc.IndexedContext import dotty.tools.pc.utils.MtagsEnrichments.* @@ -112,18 +110,17 @@ object InterpolatorCompletions: buildTargetIdentifier: String )(using Context, ReportContext): List[CompletionValue] = def newText( - name: String, - suffix: Option[String], + label: String, + affix: CompletionAffix , identOrSelect: Ident | Select ): String = - val snippetCursor = suffixEnding(suffix, areSnippetsSupported) + val snippetCursor = suffixEnding(affix.toSuffixOpt, areSnippetsSupported) new StringBuilder() .append('{') - .append( - text.substring(identOrSelect.span.start, identOrSelect.span.end) - ) + .append(affix.toPrefix) // we use toPrefix here, because previous prefix is added in the next step + .append(text.substring(identOrSelect.span.start, identOrSelect.span.end)) .append('.') - .append(name.backticked) + .append(label.backticked) .append(snippetCursor) .append('}') .toString @@ -155,14 +152,14 @@ object InterpolatorCompletions: sym.name.toString() ) => val label = sym.name.decoded - completions.completionsWithSuffix( + completions.completionsWithAffix( sym, label, - (name, denot, suffix) => + (name, denot, affix) => CompletionValue.Interpolator( denot.symbol, label, - Some(newText(name, suffix.toEditOpt, identOrSelect)), + Some(newText(name, affix, identOrSelect)), Nil, Some(completionPos.originalCursorPosition.withStart(identOrSelect.span.start).toLsp), // Needed for VS Code which will not show the completion otherwise @@ -252,16 +249,18 @@ object InterpolatorCompletions: interpolatorEdit ++ dollarEdits end additionalEdits - def newText(symbolName: String, suffix: Option[String]): String = + def newText(symbolName: String, affix: CompletionAffix): String = val out = new StringBuilder() val identifier = symbolName.backticked val symbolNeedsBraces = interpolator.needsBraces || identifier.startsWith("`") || - suffix.isDefined + affix.toSuffixOpt.isDefined || + affix.toPrefix.nonEmpty if symbolNeedsBraces && !hasOpeningBrace then out.append('{') + out.append(affix.toInsertPrefix) out.append(identifier) - out.append(suffixEnding(suffix, areSnippetsSupported)) + out.append(suffixEnding(affix.toSuffixOpt, areSnippetsSupported)) if symbolNeedsBraces && !hasClosingBrace then out.append('}') out.toString end newText @@ -286,14 +285,14 @@ object InterpolatorCompletions: sym.name.decoded ) && !sym.isType => val label = sym.name.decoded - completions.completionsWithSuffix( + completions.completionsWithAffix( sym, label, - (name, denot, suffix) => + (name, denot, affix) => CompletionValue.Interpolator( denot.symbol, label, - Some(newText(name, suffix.toEditOpt)), + Some(newText(name, affix)), additionalEdits(), Some(nameRange), None, diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala index 7f1d92305309..908865124f58 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala @@ -15,7 +15,6 @@ import dotty.tools.toOption import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Definitions import dotty.tools.dotc.core.Denotations.Denotation import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Flags.* @@ -24,7 +23,6 @@ import dotty.tools.dotc.core.Symbols.NoSymbol import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.Types.AndType import dotty.tools.dotc.core.Types.ClassInfo -import dotty.tools.dotc.core.Types.NoType import dotty.tools.dotc.core.Types.OrType import dotty.tools.dotc.core.Types.Type import dotty.tools.dotc.core.Types.TypeRef diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala index 6f244d9a3414..8ac5ef64af10 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala @@ -2,7 +2,6 @@ package dotty.tools.pc.completions import scala.util.Try -import dotty.tools.dotc.ast.NavigateAST import dotty.tools.dotc.ast.Trees.ValDef import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.ast.untpd @@ -439,4 +438,4 @@ case class JustSymbol(symbol: Symbol)(using Context) extends ParamSymbol: def info: Type = symbol.info case class RefinedSymbol(symbol: Symbol, name: Name, info: Type) - extends ParamSymbol \ No newline at end of file + extends ParamSymbol diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala index 964f6a6894a2..bfb31906bce1 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala @@ -207,7 +207,8 @@ abstract class BaseCompletionSuite extends BasePCSuite: includeDetail: Boolean = true, filename: String = "A.scala", filter: String => Boolean = _ => true, - enablePackageWrap: Boolean = true + enablePackageWrap: Boolean = true, + includeCompletionKind: Boolean = false, ): Unit = val out = new StringBuilder() val withPkg = @@ -221,13 +222,14 @@ abstract class BaseCompletionSuite extends BasePCSuite: filteredItems.foreach { item => val label = TestCompletions.getFullyQualifiedLabel(item) val commitCharacter = - if (includeCommitCharacter) + if includeCommitCharacter then Option(item.getCommitCharacters) .getOrElse(Collections.emptyList()) .asScala .mkString(" (commit: '", " ", "')") else "" val documentation = doc(item.getDocumentation) + val completionKind = Option.when(includeCompletionKind)(s" (${item.getKind.toString})").getOrElse("") if (includeDocs && documentation.nonEmpty) { out.append("> ").append(documentation).append("\n") } @@ -244,6 +246,7 @@ abstract class BaseCompletionSuite extends BasePCSuite: "" }) .append(commitCharacter) + .append(completionKind) .append("\n") } val completionSources = filteredItems diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala index 61239b535e1c..f4bfc806dbb3 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala @@ -96,7 +96,7 @@ class CompletionArgSuite extends BaseCompletionSuite: """|age = : Int |followers = : Int |Main test - |User test + |User(name: String = ..., age: Int = ..., address: String = ..., followers: Int = ...): User |""".stripMargin, topLines = Option(4) ) @@ -130,7 +130,7 @@ class CompletionArgSuite extends BaseCompletionSuite: """|age = : Int |followers = : Int |Main test - |User test + |User(name: String = ..., age: Int = ..., address: String = ..., followers: Int = ...): User |""".stripMargin, topLines = Option(4) ) @@ -1119,4 +1119,4 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """x: Int |x = : Any""".stripMargin, - ) \ No newline at end of file + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala index 45f07b5fb7b1..b487611b9ea1 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala @@ -182,10 +182,10 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """ - |> Found documentation for scala/util/Try. - |Try scala.util |> Found documentation for scala/util/Try.apply(). |Try[T](r: => T): Try[T] + |> Found documentation for scala/util/Try. + |Try scala.util |""".stripMargin, includeDocs = true ) @@ -199,7 +199,7 @@ class CompletionDocSuite extends BaseCompletionSuite: """.stripMargin, """ |> Found documentation for scala/collection/mutable/StringBuilder. - |StringBuilder scala.collection.mutable + |StringBuilder(): StringBuilder |""".stripMargin, includeDocs = true, topLines = Some(1) @@ -213,9 +213,9 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """ + |Vector[A](elems: A*): Vector[A] |> Found documentation for scala/package.Vector. |Vector scala.collection.immutable - |Vector[A](elems: A*): Vector[A] |""".stripMargin, includeDocs = true ) @@ -228,11 +228,8 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """ - |> ### class Catch - |Found documentation for scala/util/control/Exception.Catch# - |### object Catch - |Found documentation for scala/util/control/Exception.Catch. - |Catch[T] - scala.util.control.Exception + |> Found documentation for scala/util/control/Exception.Catch# + |Catch[T](pf: Catcher[T], fin: Option[Finally] = ..., rethrow: Throwable => Boolean = ...): Catch[T] - scala.util.control.Exception |> ### class Catch |Found documentation for scala/util/control/Exception.Catch# |### object Catch @@ -249,8 +246,8 @@ class CompletionDocSuite extends BaseCompletionSuite: | scala.util.Failure@@ |} """.stripMargin, - """|Failure scala.util - |Failure[T](exception: Throwable): Failure[T] + """|Failure[T](exception: Throwable): Failure[T] + |Failure scala.util |""".stripMargin, includeDocs = true ) @@ -264,16 +261,8 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """ - |> ### class DynamicVariable - |Found documentation for scala/util/DynamicVariable# - |### object DynamicVariable - |Found documentation for scala/util/DynamicVariable. - |DynamicVariable[T] scala.util - |> ### class DynamicVariable - |Found documentation for scala/util/DynamicVariable# - |### object DynamicVariable - |Found documentation for scala/util/DynamicVariable. - |DynamicVariable scala.util + |> Found documentation for scala/util/DynamicVariable# + |DynamicVariable[T](init: T): DynamicVariable[T] |""".stripMargin, includeDocs = true ) @@ -317,6 +306,5 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """|myNumbers: Vector[Int] - |myNumbers(i: Int): Int |""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala new file mode 100644 index 000000000000..010d0b14fa90 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala @@ -0,0 +1,565 @@ +package dotty.tools.pc.tests.completion + +import scala.meta.pc.SymbolDocumentation +import scala.language.unsafeNulls + +import dotty.tools.pc.base.BaseCompletionSuite +import dotty.tools.pc.utils.MockEntries + +import org.junit.Test +import org.junit.Ignore +import scala.collection.immutable.ListMapBuilder + +class CompletionExtraConstructorSuite extends BaseCompletionSuite: + + @Test def `no-extra-new-completions-class-1` = + check( + """|object Wrapper: + | class TestClass(x: Int) + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-class-2` = + check( + """|object Wrapper: + | class TestClass() + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-class-3` = + check( + """|object Wrapper: + | class TestClass[T](x: T) + | TestCla@@ + |""".stripMargin, + """|TestClass[T](x: T): TestClass[T] (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-case-class-1` = + check( + """|object Wrapper: + | case class TestClass(x: Int) + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-case-class-2` = + check( + """|object Wrapper: + | case class TestClass() + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-case-class-3` = + check( + """|object Wrapper: + | case class TestClass[T](x: T) + | TestCla@@ + |""".stripMargin, + """|TestClass[T](x: T): TestClass[T] (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-1` = + check( + """|object Wrapper: + | abstract class TestClass(x: Int) + | TestCla@@ + |""".stripMargin, + """| + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-2` = + check( + """|object Wrapper: + | abstract class TestClass() + | TestCla@@ + |""".stripMargin, + """| + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass[T](x: T): TestClass[T] (Constructor) + @Test def `extra-new-completions-abstract-class-3` = + check( + """|object Wrapper: + | abstract class TestClass[T](x: T) + | TestCla@@ + |""".stripMargin, + """| + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass (Constructor) + @Test def `extra-new-completions-trait-1` = + check( + """|object Wrapper: + | trait TestClass + | TestCla@@ + |""".stripMargin, + """| + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `extra-new-completions-class-1` = + check( + """|object Wrapper: + | class TestClass(x: Int) + | object TestClass: + | def apply(x: Int, y: Int): TestClass = TestClass(x + y) + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int, y: Int): TestClass (Method) + |new TestClass(x: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `extra-new-completions-class-2` = + check( + """|object Wrapper: + | class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = TestClass(x) + | TestCla@@ + |} + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |new TestClass(x: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `extra-new-completions-class-3` = + check( + """|object Wrapper: + | class TestClass() + | object TestClass: + | def apply(): TestClass = TestClass(1) + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |new TestClass(): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-with-companion-1` = + check( + """|object Wrapper: + | abstract class TestClass(x: Int) + | object TestClass: + | def apply(x: Int, y: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int, y: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-with-companion-2` = + check( + """|object Wrapper: + | abstract class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-with-companion-3` = + check( + """|object Wrapper: + | abstract class TestClass() + | object TestClass: + | def apply(): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-trait-with-companion-1` = + check( + """|object Wrapper: + | trait TestClass(x: Int) + | object TestClass: + | def apply(x: Int, y: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int, y: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-trait-with-companion-2` = + check( + """|object Wrapper: + | trait TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(): TestClass (Constructor) + @Test def `extra-new-completions-trait-with-companion-3` = + check( + """|object Wrapper: + | trait TestClass() + | object TestClass: + | def apply(): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // This test should have new TestClass completion without parentheses. The actual issue is with printer, edit text is correct + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(): TestClass (Constructor) + @Test def `extra-new-completions-trait-with-companion-4` = + check( + """|object Wrapper: + | trait TestClass + | object TestClass: + | def apply(): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + checkSnippet( + """|object Wrapper: + | trait TestClass + | object TestClass: + | def apply(): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass() + |TestClass + |""".stripMargin, + ) + + @Test def `multiple-extra-new-constructors-class-1` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Constructor) + |TestClass(x: Int): TestClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-class-2` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Constructor) + |TestClass(x: Int): TestClass (Constructor) + |TestClass(x: Int, y: Int): TestClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-2` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(z: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(z: Int): TestClass (Method) + |new TestClass(): TestClass (Constructor) + |new TestClass(x: Int): TestClass (Constructor) + |new TestClass(x: Int, y: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-3` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(z: Int): TestClass = ??? + | def apply(z: Int, w: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(z: Int): TestClass (Method) + |TestClass(z: Int, w: Int): TestClass (Method) + |new TestClass(): TestClass (Constructor) + |new TestClass(x: Int): TestClass (Constructor) + |new TestClass(x: Int, y: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-same-signature-class` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |new TestClass(): TestClass (Constructor) + |new TestClass(x: Int): TestClass (Constructor) + |new TestClass(x: Int, y: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-same-signature-case-class` = + check( + """|object Wrapper: + | case class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass(x: Int): TestClass (Method) + |new TestClass(x: Int): TestClass (Constructor) + |new TestClass(x: Int, y: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-same-signature-trait` = + check( + """|object Wrapper: + | trait TestClass + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + + // TODO We first need to detect support when to add additional braces / colon + // missing: + // new TestClass(): TestClass (Constructor) + // new TestClass(x: Int): TestClass (Constructor) + // new TestClass(x: Int, y: Int): TestClass (Constructor) + @Test def `multiple-extra-new-constructors-with-companion-same-signature-abstract` = + check( + """|object Wrapper: + | abstract class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-completions-in-type-mode-1` = + check( + """|object Wrapper: + | class TestClass() + | val x: TestCla@@ + |""".stripMargin, + """|TestClass test.Wrapper (Class) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-completions-in-type-mode-2` = + check( + """|object Wrapper: + | class TestClass() + | val x: TestCla@@ + |""".stripMargin, + """|TestClass test.Wrapper (Class) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-completions-in-type-mode-3` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + | val x: TestCla@@ + |""".stripMargin, + """|TestClass test.Wrapper (Class) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `workspace-no-extra-completions-in-type-mode-4` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object M { + | val x: TestCla@@ + |} + |""".stripMargin, + """|TestClass - test.Wrapper (Class) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `workspace-multiple-extra-new-constructors` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object M { + | TestCla@@ + |} + |""".stripMargin, + """|TestClass(x: Int): TestClass - test.Wrapper (Method) + |new TestClass(): TestClass - test.Wrapper (Constructor) + |new TestClass(x: Int): TestClass - test.Wrapper (Constructor) + |new TestClass(x: Int, y: Int): TestClass - test.Wrapper (Constructor) + |TestClass - test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `prepend-new` = + checkSnippet( + """|object Wrapper: + | case class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main { + | TestClas@@ + |} + |""".stripMargin, + """|TestClass($0) + |new TestClass + |TestClass + |""".stripMargin + ) + + @Test def `prepend-new-fully-qualified-path` = + checkSnippet( + """|object Wrapper: + | case class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main { + | Wrapper.Test@@ + |} + |""".stripMargin, + """|TestClass($0) + |new Wrapper.TestClass + |TestClass + |""".stripMargin + ) + + @Test def `dont-include-private-members` = + check( + """|object TestObject: + | private def apply(i: Int) = i + |object Main: + | TestObject@@ + |""".stripMargin, + """|TestObject test + |""".stripMargin + ) + diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala index d9dc635ce21a..08cc1535fd56 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala @@ -4,6 +4,7 @@ import dotty.tools.pc.base.BaseCompletionSuite import org.junit.runners.MethodSorters import org.junit.{FixMethodOrder, Test} +import org.junit.Ignore @FixMethodOrder(MethodSorters.NAME_ASCENDING) class CompletionInterpolatorSuite extends BaseCompletionSuite: @@ -542,7 +543,7 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |} |""".stripMargin, """s"Hello $hello@@"""".stripMargin, - """s"Hello $helloMethod"""".stripMargin, + """s"Hello ${helloMethod($0)}"""".stripMargin, filter = _.contains("a: Int") ) @@ -627,10 +628,10 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |} |""".stripMargin, assertSingleItem = false, - // Scala 3 has an additional Paths() completion - itemIndex = 2 + filter = _.contains("java.nio.file") ) + @Test def `auto-imports-prefix-with-interpolator` = checkEdit( """| @@ -644,7 +645,6 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: | s"this is an interesting ${java.nio.file.Paths}" |} |""".stripMargin, - // Scala 3 has an additional Paths object completion itemIndex = 1, assertSingleItem = false ) @@ -745,7 +745,7 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |object Main { | val a = s"${ListBuffer($0)}"" |}""".stripMargin, - filter = _.contains("[A]") + assertSingleItem = false, ) @Test def `dont-show-when-writing-before-dollar` = @@ -780,3 +780,62 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |""".stripMargin, "host: String" ) + + @Test def `prepend-new-missing-interpolator` = + checkSnippet( + """|case class TestClass(x: Int) + |object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main: + | "$TestClas@@" + |""".stripMargin, + """|{TestClass($0)} + |{new TestClass$0} + |TestClass$0 + |""".stripMargin + ) + + @Ignore("This case is not yet supported by metals") + @Test def `prepend-new-missing-interpolator-with-prefix` = + checkSnippet( + """|object Wrapper: + | case class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main: + | "$Wrapper.TestClas@@" + |""".stripMargin, + """|{Wrapper.TestClass($0)} + |{new Wrapper.TestClass$0} + |{Wrapper.TestClass$0} + |""".stripMargin + ) + + @Test def `prepend-new-with-prefix` = + checkSnippet( + """|object Wrapper: + | case class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main: + | s"$Wrapper.TestClas@@" + |""".stripMargin, + """|{Wrapper.TestClass($0)} + |{new Wrapper.TestClass$0} + |{Wrapper.TestClass$0} + |""".stripMargin + ) + + @Test def `prepend-new-interpolator` = + checkSnippet( + """|case class TestClass(x: Int) + |object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main: + | s"$TestClas@@" + |""".stripMargin, + """|{TestClass($0)} + |{new TestClass} + |TestClass + |""".stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala index 15c449904074..cc6751454d4f 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala @@ -151,8 +151,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: """|value: Int |val |var - |varargs(): varargs - |varargs - scala.annotation + |varargs(): varargs - scala.annotation |""".stripMargin ) @@ -169,8 +168,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: |""".stripMargin, """|val |var - |varargs(): varargs - |varargs - scala.annotation + |varargs(): varargs - scala.annotation |""".stripMargin ) @@ -203,8 +201,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: |} |""".stripMargin, """|value: Int - |varargs(): varargs - |varargs - scala.annotation""".stripMargin + |varargs(): varargs - scala.annotation""".stripMargin ) @Test def `val-trailing-space` = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala index 8bc45d344244..b3abc1474375 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala @@ -926,8 +926,8 @@ class CompletionOverrideSuite extends BaseCompletionSuite: |} |""".stripMargin, """|def hello1: Int - |override val hello2: Int |override def equals(x$0: Any): Boolean + |override def hashCode(): Int |""".stripMargin, includeDetail = false, topLines = Some(3) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala index c3e3f374c23d..b601a63ff234 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala @@ -172,7 +172,6 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |ArrayDequeOps[$0] |ArrayDeque |ArrayDeque - |ArrayDequeOps |""".stripMargin ) @@ -305,15 +304,35 @@ class CompletionSnippetSuite extends BaseCompletionSuite: @Test def `case-class2` = checkSnippet( - s"""|object Main { - | scala.util.Tr@@ + s"""|object wrapper: + | case class Test2(x: Int) + | object Test2: + | def apply(x: Int): Test2 = ??? + |object Main { + | wrapper.Test@@ |} |""".stripMargin, - """|Try - |Try($0) + """|Test2($0) + |new wrapper.Test2 + |Test2 |""".stripMargin ) + @Test def `case-class2-edit` = + checkEditLine( + s"""|object wrapper: + | case class Test2(x: Int) + | object Test2: + | def apply(x: Int): Test2 = ??? + |object Main { + | ___ + |} + |""".stripMargin, + "wrapper.Test@@", + "new wrapper.Test2", + filter = _.contains("new Test2") + ) + @Test def `case-class3` = checkSnippet( s"""|object Main { @@ -322,9 +341,10 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |""".stripMargin, // Note: the class and trait items in here are invalid. So // they are filtered out. - """|Try - |Try($0) - |""".stripMargin + """|Try($0) - [T](r: => T): Try[T] + |Try - scala.util + |""".stripMargin, + includeDetail = true ) @Test def `symbol` = @@ -352,10 +372,10 @@ class CompletionSnippetSuite extends BaseCompletionSuite: | Wi@@ |} |""".stripMargin, - """|Widget - example - |Widget($0) - (name: String): Widget + """|Widget($0) - (name: String): Widget |Widget($0) - (age: Int): Widget |Widget($0) - (name: String, age: Int): Widget + |Widget - example |""".stripMargin, includeDetail = true, topLines = Some(4) @@ -365,18 +385,34 @@ class CompletionSnippetSuite extends BaseCompletionSuite: checkSnippet( s"""|package example | - |object Widget{} + |object TestObject {} |object Main { - | Wi@@ + | TestObjec@@ |} |""".stripMargin, - """|Widget - example - |Window - java.awt - |WindowPeer - java.awt.peer - |WithFilter - scala.collection + """|TestObject - example + |""".stripMargin, + includeDetail = true, + ) + + @Test def `dont-enter-empty-paramlist` = + checkSnippet( + s"""|package example + | + |object Main { + | ListMa@@ + |} + |""".stripMargin, + """|ListMap($0) - [K, V](elems: (K, V)*): ListMap[K, V] + |new ListMap - [K, V]: ListMap[K, V] + |ListMap - scala.collection.immutable + |ListMap($0) - [K, V](elems: (K, V)*): ListMap[K, V] + |new ListMap - [K, V]: ListMap[K, V] + |ListMap - scala.collection.mutable + |ListMapBuilder - [K, V]: ListMapBuilder[K, V] + |ConcurrentSkipListMap - java.util.concurrent |""".stripMargin, includeDetail = true, - topLines = Some(4) ) // https://github.com/scalameta/metals/issues/4004 diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index eadadd484089..ebca80dc0717 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -26,13 +26,12 @@ class CompletionSuite extends BaseCompletionSuite: | Lis@@ |}""".stripMargin, """ - |List scala.collection.immutable |List[A](elems: A*): List[A] + |List scala.collection.immutable |List - java.awt |List - java.util - |ListMap[K, V](elems: (K, V)*): ListMap[K, V] |""".stripMargin, - topLines = Some(5) + topLines = Some(4) ) @Test def member = @@ -179,8 +178,24 @@ class CompletionSuite extends BaseCompletionSuite: |object A { | TrieMap@@ |}""".stripMargin, - """|TrieMap scala.collection.concurrent - |TrieMap[K, V](elems: (K, V)*): TrieMap[K, V] + """|TrieMap[K, V](elems: (K, V)*): TrieMap[K, V] + |new TrieMap[K, V]: TrieMap[K, V] + |new TrieMap[K, V](hashf: Hashing[K], ef: Equiv[K]): TrieMap[K, V] + |TrieMap scala.collection.concurrent + |""".stripMargin + ) + + @Test def `no-companion-apply-in-new`= + check( + """ + |import scala.collection.concurrent._ + |object A { + | new TrieMap@@ + |}""".stripMargin, + // TrieMap should be filtered if it doesn't contain any types that can be constructed in `new` keyword context. + """|TrieMap[K, V]: TrieMap[K, V] + |TrieMap[K, V](hashf: Hashing[K], ef: Equiv[K]): TrieMap[K, V] + |TrieMap scala.collection.concurrent |""".stripMargin ) @@ -216,16 +231,13 @@ class CompletionSuite extends BaseCompletionSuite: """ |import JavaCon@@ |""".stripMargin, - """|AsJavaConverters - scala.collection.convert - |JavaConverters - scala.collection + """|JavaConverters - scala.collection |JavaConversions - scala.concurrent |AsJavaConsumer - scala.jdk.FunctionWrappers + |AsJavaConverters - scala.collection.convert |FromJavaConsumer - scala.jdk.FunctionWrappers |AsJavaBiConsumer - scala.jdk.FunctionWrappers |AsJavaIntConsumer - scala.jdk.FunctionWrappers - |AsJavaLongConsumer - scala.jdk.FunctionWrappers - |FromJavaBiConsumer - scala.jdk.FunctionWrappers - |FromJavaIntConsumer - scala.jdk.FunctionWrappers |""".stripMargin ) @@ -473,8 +485,7 @@ class CompletionSuite extends BaseCompletionSuite: | |} """.stripMargin, - """|DelayedLazyVal scala.concurrent - |DelayedLazyVal[T](f: () => T, body: => Unit)(exec: ExecutionContext): DelayedLazyVal[T]""".stripMargin + "DelayedLazyVal[T](f: () => T, body: => Unit)(implicit exec: ExecutionContext): DelayedLazyVal[T]" ) @Test def local2 = @@ -618,8 +629,8 @@ class CompletionSuite extends BaseCompletionSuite: |} |""".stripMargin, """|Some(value) scala - |Some scala |Some[A](value: A): Some[A] + |Some scala |""".stripMargin ) @@ -630,8 +641,8 @@ class CompletionSuite extends BaseCompletionSuite: | case List(Som@@) |} |""".stripMargin, - """|Some scala - |Some[A](value: A): Some[A] + """|Some[A](value: A): Some[A] + |Some scala |""".stripMargin ) @@ -656,8 +667,8 @@ class CompletionSuite extends BaseCompletionSuite: |} |""".stripMargin, """|Some(value) scala - |Seq scala.collection.immutable - |Set scala.collection.immutable + |Set[A](elems: A*): Set[A] + |Seq[A](elems: A*): Seq[A] |""".stripMargin, topLines = Some(3) ) @@ -1154,8 +1165,7 @@ class CompletionSuite extends BaseCompletionSuite: |def main = | Testin@@ |""".stripMargin, - """|Testing a - |Testing(): Testing + """|Testing(): Testing |""".stripMargin ) @@ -1168,8 +1178,7 @@ class CompletionSuite extends BaseCompletionSuite: |def main = | Testin@@ |""".stripMargin, - """|Testing a - |Testing(a: Int, b: String): Testing + """|Testing(a: Int, b: String): Testing |""".stripMargin ) @@ -1314,28 +1323,28 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin, """|AClass[A <: Int] test.O |AClass test.O - |AbstractTypeClassManifest - scala.reflect.ClassManifestFactory """.stripMargin ) + val extensionResult = + """|Foo test + |Found - scala.collection.Searching + |Font - java.awt + |Form - java.text.Normalizer + |Format - java.text + |FontPeer - java.awt.peer + |FormView - javax.swing.text.html + |Formatter - java.util + |Formatter - java.util.logging + |FocusEvent - java.awt.event""".stripMargin + @Test def `extension-definition-scope` = check( """|trait Foo |object T: | extension (x: Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @Test def `extension-definition-symbol-search` = @@ -1354,18 +1363,7 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A <: Fo@@] |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @Test def `extension-definition-type-parameter-symbol-search` = @@ -1384,18 +1382,7 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (using Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @@ -1405,18 +1392,7 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (x: Int)(using Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @Test def `extension-definition-mix-2` = @@ -1425,18 +1401,7 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (using Fo@@)(x: Int)(using Foo) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @Test def `extension-definition-mix-3` = @@ -1445,18 +1410,7 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (using Foo)(x: Int)(using Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @Test def `extension-definition-mix-4` = @@ -1465,18 +1419,7 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](x: Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @Test def `extension-definition-mix-5` = @@ -1485,18 +1428,7 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](using Fo@@)(x: Int) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @Test def `extension-definition-mix-6` = @@ -1505,18 +1437,7 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](using Foo)(x: Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @Test def `extension-definition-mix-7` = @@ -1525,18 +1446,7 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](using Foo)(x: Fo@@)(using Foo) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult ) @Test def `extension-definition-select` = @@ -1569,7 +1479,6 @@ class CompletionSuite extends BaseCompletionSuite: | extension [T](x: Test.TestSel@@) |""".stripMargin, """|TestSelect[T] test.Test - |TestSelect test.Test |""".stripMargin ) @@ -1665,11 +1574,11 @@ class CompletionSuite extends BaseCompletionSuite: check( """import scala.collection.{AbstractMap, @@} |""".stripMargin, - """GenIterable scala.collection - |GenMap scala.collection - |GenSeq scala.collection - |GenSet scala.collection - |GenTraversable scala.collection + """+: scala.collection + |:+ scala.collection + |AbstractIndexedSeqView scala.collection + |AbstractIterable scala.collection + |AbstractIterator scala.collection |""".stripMargin, topLines = Some(5) ) @@ -1719,7 +1628,6 @@ class CompletionSuite extends BaseCompletionSuite: | foo@@ |""".stripMargin, """|fooBar: List[Int] - |fooBar(n: Int): Int |""".stripMargin ) @@ -1729,7 +1637,12 @@ class CompletionSuite extends BaseCompletionSuite: | List@@ |""".stripMargin, """|List[A](elems: A*): List[A] - |ListMap[K, V](elems: (K, V)*): ListMap[K, V] + |ListSet[A](elems: A*): ListSet[A] - scala.collection.immutable + |ListMap[K, V](elems: (K, V)*): ListMap[K, V] - scala.collection.immutable + |new ListMap[K, V]: ListMap[K, V] - scala.collection.immutable + |new ListSet[A]: ListSet[A] - scala.collection.immutable + |ListMap[K, V](elems: (K, V)*): ListMap[K, V] - scala.collection.mutable + |new ListMap[K, V]: ListMap[K, V] - scala.collection.mutable |""".stripMargin, filter = _.contains("[") ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala index 52e565a5a78b..c8cfbd178f32 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala @@ -700,7 +700,7 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |object Main { | val a = ListBuffer($0) |}""".stripMargin, - filter = _.contains("[A]") + filter = _.startsWith("ListBuffer[A]") ) @Test def `type-import` = @@ -811,7 +811,6 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |""".stripMargin, """|fooBar: String |fooBar: List[Int] - |fooBar(n: Int): Int |""".stripMargin, ) @@ -827,8 +826,9 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: | |val j = MyTy@@ |""".stripMargin, - """|MyType(m: Long): MyType - |MyType - demo.other""".stripMargin, + """|MyType(m: Long): MyType - demo.other + |MyType - demo.other + """.stripMargin, ) @Test def `type-apply2` = @@ -843,8 +843,9 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: | |val j = MyTy@@ |""".stripMargin, - """|MyType(m: Long): MyType - |MyType - demo.other""".stripMargin, + """|MyType(m: Long): MyType - demo.other + |MyType - demo.other + """.stripMargin, ) @Test def `method-name-conflict` = From acfc62159da3609bc4a196bcb88af7549e5d802d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Fri, 12 Apr 2024 19:42:48 +0200 Subject: [PATCH 112/465] Add custom matchers for completions (fuzzy search for presentation compiler) (#19850) Fixes https://github.com/scalameta/metals/issues/4656 Fixes https://github.com/lampepfl/dotty/issues/5507 Fixes https://github.com/lampepfl/dotty/issues/17706 To ensure CI output will be the same on all runners, I'm scheduling runs on all jvm versions + windows. [test_windows_full] [test_java8] [test_java11] [test_java15] [test_java17] [test_java18] [test_java19] --- .../tools/dotc/interactive/Completion.scala | 20 ++-- .../dotty/tools/dotc/typer/Implicits.scala | 2 +- .../tools/pc/completions/Completions.scala | 14 ++- .../tools/pc/base/BaseCompletionSuite.scala | 2 +- .../tests/completion/CompletionDocSuite.scala | 4 +- .../completion/CompletionExtensionSuite.scala | 35 ++++-- .../completion/CompletionKeywordSuite.scala | 30 +++-- .../completion/CompletionOverrideSuite.scala | 5 +- .../CompletionSnippetNegSuite.scala | 12 +- .../completion/CompletionSnippetSuite.scala | 7 +- .../pc/tests/completion/CompletionSuite.scala | 110 ++++++++++++++---- 11 files changed, 165 insertions(+), 76 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 44407daf600c..2ff8ad1c6535 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -71,10 +71,11 @@ object Completion: mode: Mode, rawPrefix: String, tpdPath: List[tpd.Tree], - untpdPath: List[untpd.Tree] + untpdPath: List[untpd.Tree], + customMatcher: Option[Name => Boolean] = None )(using Context): CompletionMap = val adjustedPath = typeCheckExtensionConstructPath(untpdPath, tpdPath, pos) - computeCompletions(pos, mode, rawPrefix, adjustedPath) + computeCompletions(pos, mode, rawPrefix, adjustedPath, customMatcher) /** * Inspect `path` to determine what kinds of symbols should be considered. @@ -193,11 +194,12 @@ object Completion: .flatten.getOrElse(tpdPath) private def computeCompletions( - pos: SourcePosition, mode: Mode, rawPrefix: String, adjustedPath: List[tpd.Tree] + pos: SourcePosition, mode: Mode, rawPrefix: String, adjustedPath: List[tpd.Tree], matches: Option[Name => Boolean] )(using Context): CompletionMap = val hasBackTick = rawPrefix.headOption.contains('`') val prefix = if hasBackTick then rawPrefix.drop(1) else rawPrefix - val completer = new Completer(mode, prefix, pos) + val matches0 = matches.getOrElse(_.startsWith(prefix)) + val completer = new Completer(mode, pos, matches0) val result = adjustedPath match // Ignore synthetic select from `This` because in code it was `Ident` @@ -209,7 +211,6 @@ object Completion: case _ => completer.scopeCompletions interactiv.println(i"""completion info with pos = $pos, - | prefix = ${completer.prefix}, | term = ${completer.mode.is(Mode.Term)}, | type = ${completer.mode.is(Mode.Type)}, | scope = ${completer.mode.is(Mode.Scope)}, @@ -311,13 +312,13 @@ object Completion: /** Computes code completions depending on the context in which completion is requested * @param mode Should complete names of terms, types or both - * @param prefix The prefix that all suggested completions should start with * @param pos Cursor position where completion was requested + * @param matches Function taking name used to filter completions * * For the results of all `xyzCompletions` methods term names and type names are always treated as different keys in the same map * and they never conflict with each other. */ - class Completer(val mode: Mode, val prefix: String, pos: SourcePosition): + class Completer(val mode: Mode, pos: SourcePosition, matches: Name => Boolean): /** Completions for terms and types that are currently in scope: * the members of the current class, local definitions and the symbols that have been imported, * recursively adding completions from outer scopes. @@ -524,7 +525,7 @@ object Completion: // There are four possible ways for an extension method to be applicable // 1. The extension method is visible under a simple name, by being defined or inherited or imported in a scope enclosing the reference. - val termCompleter = new Completer(Mode.Term, prefix, pos) + val termCompleter = new Completer(Mode.Term, pos, matches) val extMethodsInScope = termCompleter.scopeCompletions.toList.flatMap: case (name, denots) => denots.collect: case d: SymDenotation if d.isTerm && d.termRef.symbol.is(Extension) => (d.termRef, name.asTermName) @@ -556,7 +557,7 @@ object Completion: * 2. satisfy [[Completion.isValidCompletionSymbol]] */ private def include(denot: SingleDenotation, nameInScope: Name)(using Context): Boolean = - nameInScope.startsWith(prefix) && + matches(nameInScope) && completionsFilter(NoType, nameInScope) && isValidCompletionSymbol(denot.symbol, mode) @@ -605,7 +606,6 @@ object Completion: private def implicitConversionTargets(qual: tpd.Tree)(using Context): Set[SearchSuccess] = { val typer = ctx.typer val conversions = new typer.ImplicitSearch(defn.AnyType, qual, pos.span).allImplicits - conversions.map(_.tree.typeOpt) interactiv.println(i"implicit conversion targets considered: ${conversions.toList}%, %") conversions diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5ffc81744d85..ffd9d7fd8515 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -531,7 +531,7 @@ object Implicits: |must be more specific than $target""" :: Nil override def msg(using Context) = - super.msg.append("\nThe expected type $target is not specific enough, so no search was attempted") + super.msg.append(i"\nThe expected type $target is not specific enough, so no search was attempted") override def toString = s"TooUnspecific" end TooUnspecific diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index 7a10c9e4804d..3f2d89a15b72 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -16,6 +16,7 @@ import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Comments.Comment import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Denotations.SingleDenotation import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.NameOps.* @@ -26,14 +27,13 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.Completion.Mode +import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.SrcPos import dotty.tools.pc.AutoImports.AutoImportsGenerator -import dotty.tools.pc.completions.OverrideCompletions.OverrideExtractor import dotty.tools.pc.buildinfo.BuildInfo +import dotty.tools.pc.completions.OverrideCompletions.OverrideExtractor import dotty.tools.pc.utils.MtagsEnrichments.* -import dotty.tools.dotc.core.Denotations.SingleDenotation - class Completions( text: String, @@ -102,9 +102,13 @@ class Completions( end if end includeSymbol + lazy val fuzzyMatcher: Name => Boolean = name => + if completionMode.is(Mode.Member) then CompletionFuzzy.matchesSubCharacters(completionPos.query, name.toString) + else CompletionFuzzy.matches(completionPos.query, name.toString) + def enrichedCompilerCompletions(qualType: Type): (List[CompletionValue], SymbolSearch.Result) = val compilerCompletions = Completion - .rawCompletions(completionPos.originalCursorPosition, completionMode, completionPos.query, path, adjustedPath) + .rawCompletions(completionPos.originalCursorPosition, completionMode, completionPos.query, path, adjustedPath, Some(fuzzyMatcher)) compilerCompletions .toList @@ -421,7 +425,7 @@ class Completions( // class Fo@@ case (td: TypeDef) :: _ - if Fuzzy.matches( + if CompletionFuzzy.matches( td.symbol.name.decoded.replace(Cursor.value, "").nn, filename ) => diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala index bfb31906bce1..776aab8bc2f7 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala @@ -123,7 +123,7 @@ abstract class BaseCompletionSuite extends BasePCSuite: if (assertSingleItem && items.length != 1) then fail( - s"expected single completion item, obtained ${items.length} items.\n${items}" + s"expected single completion item, obtained ${items.length} items.\n${items.map(_.getLabel.nn + "\n")}" ) if (items.size <= itemIndex) then diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala index b487611b9ea1..ec0b6dc20688 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala @@ -156,8 +156,8 @@ class CompletionDocSuite extends BaseCompletionSuite: |Found documentation for scala/collection/Iterator. |Iterator scala.collection |""".stripMargin, - - includeDocs = true + includeDocs = true, + topLines = Some(1) ) @Test def `scala5` = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtensionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtensionSuite.scala index f48ba06f699c..e67c31329c1c 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtensionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtensionSuite.scala @@ -17,12 +17,14 @@ class CompletionExtensionSuite extends BaseCompletionSuite: |def main = 100.inc@@ |""".stripMargin, """|incr: Int (extension) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @Test def `simple-old-syntax` = check( - """|package example + """package example | |object Test: | implicit class TestOps(a: Int): @@ -30,8 +32,9 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | |def main = 100.test@@ |""".stripMargin, - """|testOps(b: Int): String (implicit) - |""".stripMargin + """testOps(b: Int): String (implicit) + |""".stripMargin, + topLines = Some(1) ) @Test def `simple2` = @@ -93,8 +96,10 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | |def main = "foo".iden@@ |""".stripMargin, - """|identity: String (implicit) - |""".stripMargin // identity2 won't be available + """|identity: String (implicit) + |""".stripMargin, // identity2 won't be available + filter = _.contains("(implicit)") + ) @Test def `filter-by-type-subtype` = @@ -152,7 +157,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def incr: Int = num + 1 | |def main = 100.incr - |""".stripMargin + |""".stripMargin, + assertSingleItem = false ) @Test def `simple-edit-old` = @@ -174,7 +180,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def incr: Int = num + 1 | |def main = 100.incr - |""".stripMargin + |""".stripMargin, + assertSingleItem = false ) @Test def `simple-edit-suffix` = @@ -262,6 +269,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def main = 100.inc@@ |""".stripMargin, """|incr: Int (extension) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -276,6 +285,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def main = 100.inc@@ |""".stripMargin, """|incr: Int (implicit) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -290,6 +301,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def main = 100.inc@@ |""".stripMargin, """|incr: Int (extension) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -304,6 +317,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def main = 100.inc@@ |""".stripMargin, """|incr: Int (implicit) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -391,7 +406,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: |testVal: Int (implicit) |testVar: Int (implicit) |testOps(b: Int): String (implicit) - |""".stripMargin + |""".stripMargin, + topLines = Some(4) ) @Test def `implicit-val-edit` = @@ -413,5 +429,6 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | val testVal: Int = 42 | |def main = 100.testVal - |""".stripMargin + |""".stripMargin, + assertSingleItem = false ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala index cc6751454d4f..bf7077d47b3f 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala @@ -691,28 +691,26 @@ class CompletionKeywordSuite extends BaseCompletionSuite: @Test def `derives-with-extends` = check( - """ - |package foo - | - |trait Bar {} - |trait Baz {} - | - |class Foo(x: Int) extends Bar with Baz der@@ - """.stripMargin, + """|package foo + | + |trait Bar {} + |trait Baz {} + | + |class Foo(x: Int) extends Bar with Baz der@@ + |""".stripMargin, """|derives |""".stripMargin ) @Test def `derives-with-constructor-extends` = check( - """ - |package foo - | - |trait Bar {} - |class Baz(b: Int) {} - | - |class Foo(x: Int) extends Bar with Baz(1) der@@ - """.stripMargin, + """|package foo + | + |trait Bar {} + |class Baz(b: Int) {} + | + |class Foo(x: Int) extends Bar with Baz(1) der@@ + |""".stripMargin, """|derives |""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala index b3abc1474375..94c444b0feb9 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala @@ -925,12 +925,15 @@ class CompletionOverrideSuite extends BaseCompletionSuite: | def@@ |} |""".stripMargin, + """|def hello1: Int |override def equals(x$0: Any): Boolean |override def hashCode(): Int + |override def toString(): String + |override val hello2: Int |""".stripMargin, includeDetail = false, - topLines = Some(3) + topLines = Some(5) ) @Test def `path-dependent` = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetNegSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetNegSuite.scala index ccd989d811b5..8cbbad0e6ef2 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetNegSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetNegSuite.scala @@ -16,12 +16,12 @@ class CompletionSnippetNegSuite extends BaseCompletionSuite: @Test def `member` = checkSnippet( - """ - |object Main { - | List.appl@@ - |} - |""".stripMargin, - "apply" + """|object Main { + | List.appl@@ + |} + |""".stripMargin, + """|apply + |unapplySeq""".stripMargin ) @Test def `scope` = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala index b601a63ff234..5769304919ca 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala @@ -15,6 +15,7 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |} |""".stripMargin, """|apply($0) + |unapplySeq($0) |""".stripMargin ) @@ -429,7 +430,8 @@ class CompletionSnippetSuite extends BaseCompletionSuite: | extension (s: String) | def bar = 0 | val bar = "abc".bar - """.stripMargin + """.stripMargin, + filter = _.contains("bar: Int") ) // https://github.com/scalameta/metals/issues/4004 @@ -446,5 +448,6 @@ class CompletionSnippetSuite extends BaseCompletionSuite: | extension (s: String) | def bar() = 0 | val bar = "abc".bar() - """.stripMargin + """.stripMargin, + filter = _.contains("bar: Int") ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index ebca80dc0717..10a57f705ceb 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -404,6 +404,7 @@ class CompletionSuite extends BaseCompletionSuite: |Function20 scala |Function21 scala |Function22 scala + |PartialFunction scala |""".stripMargin, topLines = Some(25) ) @@ -531,7 +532,6 @@ class CompletionSuite extends BaseCompletionSuite: |until(end: Long): Exclusive[Long] |until(end: Long, step: Long): Exclusive[Long] |""".stripMargin, - postProcessObtained = _.replace("Float", "Double"), stableOrder = false ) @@ -795,6 +795,10 @@ class CompletionSuite extends BaseCompletionSuite: |} |""".stripMargin, """|intNumber: Int + |toInt: Int + |instance: Int + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -1105,7 +1109,8 @@ class CompletionSuite extends BaseCompletionSuite: |} |""".stripMargin, """|first: java.util.List[Int] - |""".stripMargin + |""".stripMargin, + topLines = Some(1) ) @Test def `object-at-type-pos` = @@ -1329,14 +1334,7 @@ class CompletionSuite extends BaseCompletionSuite: val extensionResult = """|Foo test |Found - scala.collection.Searching - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event""".stripMargin + """.stripMargin @Test def `extension-definition-scope` = check( @@ -1344,7 +1342,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (x: Fo@@) |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-symbol-search` = @@ -1363,7 +1362,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A <: Fo@@] |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-type-parameter-symbol-search` = @@ -1382,7 +1382,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (using Fo@@) |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @@ -1392,7 +1393,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (x: Int)(using Fo@@) |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-2` = @@ -1401,7 +1403,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (using Fo@@)(x: Int)(using Foo) |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-3` = @@ -1410,7 +1413,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (using Foo)(x: Int)(using Fo@@) |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-4` = @@ -1419,7 +1423,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](x: Fo@@) |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-5` = @@ -1428,7 +1433,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](using Fo@@)(x: Int) |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-6` = @@ -1437,7 +1443,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](using Foo)(x: Fo@@) |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-7` = @@ -1446,7 +1453,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](using Foo)(x: Fo@@)(using Foo) |""".stripMargin, - extensionResult + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-select` = @@ -1490,6 +1498,7 @@ class CompletionSuite extends BaseCompletionSuite: """|object O: | val a = List.apply($0) |""".stripMargin, + assertSingleItem = false ) @Test def `multiline-comment` = @@ -1550,13 +1559,21 @@ class CompletionSuite extends BaseCompletionSuite: assertSingleItem = false ) - @Test def `multi-export` = check( """export scala.collection.{AbstractMap, Set@@} |""".stripMargin, """Set scala.collection |SetOps scala.collection + |AbstractSet scala.collection + |BitSet scala.collection + |BitSetOps scala.collection + |SortedSet scala.collection + |SortedSetFactoryDefaults scala.collection + |SortedSetOps scala.collection + |StrictOptimizedSetOps scala.collection + |StrictOptimizedSortedSetOps scala.collection + |GenSet = scala.collection.Set[X] |""".stripMargin ) @@ -1566,6 +1583,15 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin, """Set scala.collection |SetOps scala.collection + |AbstractSet scala.collection + |BitSet scala.collection + |BitSetOps scala.collection + |SortedSet scala.collection + |SortedSetFactoryDefaults scala.collection + |SortedSetOps scala.collection + |StrictOptimizedSetOps scala.collection + |StrictOptimizedSortedSetOps scala.collection + |GenSet = scala.collection.Set[X] |""".stripMargin, ) @@ -1606,7 +1632,8 @@ class CompletionSuite extends BaseCompletionSuite: | List(1,2,3).tes@@ |""".stripMargin, """|test(p: Int => Boolean): List[Int] - |""".stripMargin + |""".stripMargin, + topLines = Some(1) ) @Test def `old-style-extension-type-variable-inference` = @@ -1618,7 +1645,8 @@ class CompletionSuite extends BaseCompletionSuite: | List(1,2,3).tes@@ |""".stripMargin, """|test(p: Int => Boolean): List[Int] - |""".stripMargin + |""".stripMargin, + topLines = Some(1) ) @Test def `instantiate-type-vars-in-extra-apply-completions` = @@ -1643,6 +1671,7 @@ class CompletionSuite extends BaseCompletionSuite: |new ListSet[A]: ListSet[A] - scala.collection.immutable |ListMap[K, V](elems: (K, V)*): ListMap[K, V] - scala.collection.mutable |new ListMap[K, V]: ListMap[K, V] - scala.collection.mutable + |LazyList[A](elems: A*): LazyList[A] |""".stripMargin, filter = _.contains("[") ) @@ -1774,3 +1803,38 @@ class CompletionSuite extends BaseCompletionSuite: filter = _ == "Override java.lang" ) + @Test def `fuzzy-search-test` = + check( + """| + |object MyInterface { + | def someMethod(x: Int): Int = ??? + |} + |object Test { + | MyInterface.m@@ + |} + |""".stripMargin, + """|someMethod(x: Int): Int + |""".stripMargin, + topLines = Some(1) + ) + + @Test def `fuzzy-search-test-multiple` = + check( + """| + |trait MyInterface { + | def someMethod(x: Int): Int = ??? + |} + |object Test { + | extension (interface: MyInterface) def someExtMethod(x: Int): Int = ??? + | implicit class MyInterfaceExtension(interface: MyInterface): + | def someOldExtMethod(x: Int): Int = ??? + | val x: MyInterface = ??? + | x.m@@ + |} + |""".stripMargin, + """|someMethod(x: Int): Int + |someExtMethod(x: Int): Int + |someOldExtMethod(x: Int): Int + |""".stripMargin, + topLines = Some(3) + ) From 6731181056d1e0ad38fdb83a899545bfaa17d443 Mon Sep 17 00:00:00 2001 From: Stephane Bersier Date: Fri, 12 Apr 2024 14:43:10 -0400 Subject: [PATCH 113/465] Update Quotes.scala --- library/src/scala/quoted/Quotes.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index b49763c38221..2c2d35d6cf44 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -589,8 +589,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => trait DefDefModule { this: DefDef.type => /** Create a method definition `def f[..](...)` with the signature defined in the symbol. * - * The `rhsFn` is a function that receives references to its parameters and should return - * `Some` containing the implementation of the method. Returns `None` the method has no implementation. + * The `rhsFn` is a function that receives references to its parameters, and should return + * `Some` containing the implementation of the method, or `None` if the method has no implementation. * Any definition directly inside the implementation should have `symbol` as owner. * * Use `Symbol.asQuotes` to create the rhs using quoted code. @@ -666,8 +666,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => trait ValDefModule { this: ValDef.type => /** Create a value definition `val x`, `var x` or `lazy val x` with the signature defined in the symbol. * - * The `rhs` should return be `Some` containing the implementation of the method. - * Returns `None` the method has no implementation. + * The `rhs` should return `Some` containing the implementation of the method, + * or `None` if the method has no implementation. * Any definition directly inside the implementation should have `symbol` as owner. * * Use `Symbol.asQuotes` to create the rhs using quoted code. From f58cbf99fc38d698ae52a5f11b4e780225eec342 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 12 Apr 2024 13:37:37 +0200 Subject: [PATCH 114/465] Improve ConstraintHandling of SkolemTypes by retaining instantiated type vars in LevelAvoidMap when possible. Fixes #19955 Consider pos/i19955a as an example. We try to adapt the given_IsInt_U for skolems of the form (?2 : Int) and (?7 : ?8.Out) where ?8 is an unknown value of type given_IsWrapOfInt_R[Int, Wrap[Int]], but only the former succeeds, even though ?8.Out is trivially within the bounds of U. The typing trace from the two implicit search results includes: ```scala [log typer] ==> typedImplicit(Cand(given_IsInt_U L4), IsInt[(?2 : Int)], , <399..399>)? [log typer] ==> isSubType(IsInt[U], IsInt[(?2 : Int)])? [log typer] ==> isSameType((?2 : Int), U)? [log typer] ==> isSubType((?2 : Int), U)? [log typer] <== isSubType((?2 : Int), U) = true [log typer] ==> isSubType(U, (?2 : Int))? [log typer] <== isSubType(U, (?2 : Int)) = true [log typer] <== isSameType((?2 : Int), U) = true [log typer] <== isSubType(IsInt[U], IsInt[(?2 : Int)]) = true [log typer] <== typedImplicit(Cand(given_IsInt_U L4), IsInt[(?2 : Int)], , <399..399>) = SearchSuccess: (given_IsInt_U : [U <: Int]: IsInt[U]) via given_IsInt_U[(?2 : Int)] [log typer] ==> typedImplicit(Cand(given_IsInt_U L4), IsInt[(?7 : ?8.Out)], , <423..423>)? [log typer] ==> isSubType(IsInt[U], IsInt[(?7 : ?8.Out)])? [log typer] ==> isSameType((?7 : ?8.Out), U)? [log typer] ==> isSubType((?7 : ?8.Out), U)? [log typer] <== isSubType((?7 : ?8.Out), U) = true [log typer] ==> isSubType(Int, (?7 : ?8.Out))? [log typer] <== isSubType(Int, (?7 : ?8.Out)) = false [log typer] <== isSameType((?7 : ?8.Out), U) = false [log typer] <== isSubType(IsInt[U], IsInt[(?7 : ?8.Out)]) = false [log typer] <== typedImplicit(Cand(given_IsInt_U L4), IsInt[(?7 : ?8.Out)], , <423..423>) = Search Failure: given_IsInt_U[U] ``` The difference in the failing case from the passing case is that the type variable U has been instantiated to Int by the first direction of isSameType before attempting the second direction. If we look closer at the ConstraintHandling: ``` [log typer] ==> addConstraint(U, (?2 : Int), true)? [log typer] ==> legalBound(U, (?2 : Int), false)? [log typer] ==> ApproximatingTypeMap#derivedSkolemType((?2 : Int), Int)? [log typer] <== ApproximatingTypeMap#derivedSkolemType((?2 : Int), Int) = (?2 : Int) [log typer] <== legalBound(U, (?2 : Int), false) = (?2 : Int) [log typer] ==> isSubType((?2 : Int), Int)? [log typer] <== isSubType((?2 : Int), Int) = true [log typer] <== addConstraint(U, (?2 : Int), true) = true [log typer] ==> addConstraint(U, (?7 : ?8.Out), true)? [log typer] ==> legalBound(U, (?7 : ?8.Out), false)? [log typer] ==> ApproximatingTypeMap#derivedSkolemType((?8 : given_IsWrapOfInt_R[Int, Wrap[Int]]), given_IsWrapOfInt_R[Int, Wrap[Int]])? [log typer] <== ApproximatingTypeMap#derivedSkolemType((?8 : given_IsWrapOfInt_R[Int, Wrap[Int]]), given_IsWrapOfInt_R[Int, Wrap[Int]]) = given_IsWrapOfInt_R[Int, Wrap[Int]] [log typer] ==> ApproximatingTypeMap#derivedSkolemType((?7 : ?8.Out), Int)? [log typer] <== ApproximatingTypeMap#derivedSkolemType((?7 : ?8.Out), Int) = Int [log typer] <== legalBound(U, (?7 : ?8.Out), false) = Int [log typer] <== addConstraint(U, (?7 : ?8.Out), true) = true ``` we can see that the issue lies in the approximation in the LevelAvoidMap used to obtain the legalBound. Modifying `ApproximatingTypeMap#derivedSkolemType` from `if info eq tp.info then tp`, to `if info frozen_=:= tp.info then tp.derivedSkolem(info)`, allows each direction of the subtyping checks in `isSameType` to obtain the more precise skolem as legal bound. But it does not solve the issue, since they obtain distinct skolems even if they equivalently-shaped, the constraints are still unsatisfiable. We can instead try to make `info eq tp.info` be true. It was not the case in the above example because `given_IsWrapOfInt_R[Int, Wrap[Int]]` contained a type variable `R := Wrap[Int]` which was substituted by the map. We can modify TypeMap to keep type variables rather than replace them by their instance when possible, i.e. when the instance is itself not transformed by the map. This solves the issue but breaks other places which assumed the stripping of type vars in TypeMaps. That problem is avoided by doing the changes in LevelAvoidMap only. --- .../tools/dotc/core/ConstraintHandling.scala | 13 ++++++++- .../test/dotc/pos-test-pickling.blacklist | 3 +++ tests/pos/i19955a.scala | 27 +++++++++++++++++++ tests/pos/i19955b.scala | 17 ++++++++++++ tests/pos/i20053b.scala | 22 +++++++++++++++ 5 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i19955a.scala create mode 100644 tests/pos/i19955b.scala create mode 100644 tests/pos/i20053b.scala diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 109929f0c6f5..93f8066d6b9f 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -248,7 +248,18 @@ trait ConstraintHandling { override def apply(tp: Type): Type = tp match case tp: TypeVar if !tp.isInstantiated && !levelOK(tp.nestingLevel, maxLevel) => legalVar(tp) - // TypeParamRef can occur in tl bounds + // TypeParamRef can occur in tl bounds + case tp: TypeVar if tp.isInstantiated => + /* `TypeMap` always strips instantiated type variables in `mapOver`. + * We can keep the original type var if its instance is not transformed + * by the LevelAvoidMap. This allows for simpler bounds and for + * derived skolems (see ApproximatingTypeMap#derivedSkolemType) to + * remain the same by keeping their info unchanged. Loosing skolems + * in the legalBound computation prevented type vars from being + * instantiated with theses skolems, even if they were within the bounds. + */ + val res = apply(tp.instanceOpt) + if res eq tp.instanceOpt then tp else res case tp: TypeParamRef => constraint.typeVarOfParam(tp) match case tvar: TypeVar => diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 3ea8b550f160..638455e7f2de 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -119,4 +119,7 @@ i7445b.scala # more aggresive reduce projection makes a difference i15525.scala +i19955a.scala +i19955b.scala +i20053b.scala diff --git a/tests/pos/i19955a.scala b/tests/pos/i19955a.scala new file mode 100644 index 000000000000..b8ea95d41d24 --- /dev/null +++ b/tests/pos/i19955a.scala @@ -0,0 +1,27 @@ + +trait Summon[R, T <: R]: + type Out +object Summon: + given [R, T <: R]: Summon[R, T] with + type Out = R + +trait DFTypeAny +trait DFBits[W <: Int] extends DFTypeAny +class DFVal[+T <: DFTypeAny] +type DFValAny = DFVal[DFTypeAny] +type DFValOf[+T <: DFTypeAny] = DFVal[T] +trait Candidate[R]: + type OutW <: Int +object Candidate: + type Aux[R, O <: Int] = Candidate[R] { type OutW = O } + given [W <: Int, R <: DFValOf[DFBits[W]]]: Candidate[R] with + type OutW = W + +extension [L](lhs: L) def foo(using es: Summon[L, lhs.type]): Unit = ??? +extension [L <: DFValAny](lhs: L)(using icL: Candidate[L]) def baz: DFValOf[DFBits[icL.OutW]] = ??? +extension [L <: DFValAny, W <: Int](lhs: L)(using icL: Candidate.Aux[L, W]) + def bazAux: DFValOf[DFBits[W]] = ??? + +val x = new DFVal[DFBits[4]] +val works = x.bazAux.foo +val fails = x.baz.foo \ No newline at end of file diff --git a/tests/pos/i19955b.scala b/tests/pos/i19955b.scala new file mode 100644 index 000000000000..99e101b312b1 --- /dev/null +++ b/tests/pos/i19955b.scala @@ -0,0 +1,17 @@ + +trait Wrap[W] + +trait IsWrapOfInt[R]: + type Out <: Int +given [W <: Int, R <: Wrap[W]]: IsWrapOfInt[R] with + type Out = Int + +trait IsInt[U <: Int] +given [U <: Int]: IsInt[U] = ??? + +extension [L](lhs: L) def get(using ev: IsWrapOfInt[L]): ev.Out = ??? +extension (lhs: Int) def isInt(using IsInt[lhs.type]): Unit = ??? + +val x: Wrap[Int] = ??? +val works = (x.get: Int).isInt +val fails = x.get.isInt diff --git a/tests/pos/i20053b.scala b/tests/pos/i20053b.scala new file mode 100644 index 000000000000..25180d56bbae --- /dev/null +++ b/tests/pos/i20053b.scala @@ -0,0 +1,22 @@ + +trait Sub[R, T >: R] +given [R, T >: R]: Sub[R, T] with {} + +trait Candidate[-R]: + type OutP +given [P]: Candidate[Option[P]] with + type OutP = P + +extension [L](lhs: L) + def ^^^[P](rhs: Option[P]) + (using es: Sub[lhs.type, Any]) + (using c: Candidate[L]) + (using check: c.type <:< Any): Option[c.OutP] = ??? + +val x: Option[Boolean] = ??? + +val z1 = x ^^^ x // Ok +val z2 = z1 ^^^ x // Ok +val zz = ^^^[Option[Boolean]](x ^^^ x)(x) // Ok + +val zzz = x ^^^ x ^^^ x // Error before changes From 6e5f540ec72a22d5f96419fcc0fd7ec7e2d30f49 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Sat, 13 Apr 2024 11:03:17 +0200 Subject: [PATCH 115/465] Move override of mapOverTypeVar logic in ApproximatingTypeMap --- .../tools/dotc/core/ConstraintHandling.scala | 13 +------------ compiler/src/dotty/tools/dotc/core/Types.scala | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 93f8066d6b9f..109929f0c6f5 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -248,18 +248,7 @@ trait ConstraintHandling { override def apply(tp: Type): Type = tp match case tp: TypeVar if !tp.isInstantiated && !levelOK(tp.nestingLevel, maxLevel) => legalVar(tp) - // TypeParamRef can occur in tl bounds - case tp: TypeVar if tp.isInstantiated => - /* `TypeMap` always strips instantiated type variables in `mapOver`. - * We can keep the original type var if its instance is not transformed - * by the LevelAvoidMap. This allows for simpler bounds and for - * derived skolems (see ApproximatingTypeMap#derivedSkolemType) to - * remain the same by keeping their info unchanged. Loosing skolems - * in the legalBound computation prevented type vars from being - * instantiated with theses skolems, even if they were within the bounds. - */ - val res = apply(tp.instanceOpt) - if res eq tp.instanceOpt then tp else res + // TypeParamRef can occur in tl bounds case tp: TypeParamRef => constraint.typeVarOfParam(tp) match case tvar: TypeVar => diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index ba48b6a0f2e6..7fb4b15f801e 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -6180,6 +6180,10 @@ object Types extends TypeUtils { variance = saved derivedLambdaType(tp)(ptypes1, this(restpe)) + protected def mapOverTypeVar(tp: TypeVar) = + val inst = tp.instanceOpt + if (inst.exists) apply(inst) else tp + def isRange(tp: Type): Boolean = tp.isInstanceOf[Range] protected def mapCapturingType(tp: Type, parent: Type, refs: CaptureSet, v: Int): Type = @@ -6217,8 +6221,7 @@ object Types extends TypeUtils { derivedTypeBounds(tp, lo1, this(tp.hi)) case tp: TypeVar => - val inst = tp.instanceOpt - if (inst.exists) apply(inst) else tp + mapOverTypeVar(tp) case tp: ExprType => derivedExprType(tp, this(tp.resultType)) @@ -6632,6 +6635,16 @@ object Types extends TypeUtils { tp.derivedLambdaType(tp.paramNames, formals, restpe) } + override protected def mapOverTypeVar(tp: TypeVar) = + val inst = tp.instanceOpt + if !inst.exists then tp + else + // We can keep the original type var if its instance is not transformed + // by the ApproximatingTypeMap. This allows for simpler bounds and for + // derivedSkolemType to retain more skolems, by keeping the info unchanged. + val res = apply(inst) + if res eq inst then tp else res + protected def reapply(tp: Type): Type = apply(tp) } From 4cea282d8153cd96cc1b5261ac3ddfb166b34b9f Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 14 Apr 2024 13:38:25 +0200 Subject: [PATCH 116/465] Discard poly-functions when trying to resolve overloading Discard poly-functions when trying to resolve overloading using subsequent parameter lists. Polyfunctions don't have a symbol, so the logic of remapping arguments does not work for them. --- .../src/dotty/tools/dotc/typer/Applications.scala | 4 ++-- tests/pos/i20176.scala | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i20176.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index abe69d36cb69..184b250e94fb 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -2267,13 +2267,13 @@ trait Applications extends Compatibility { case _ => (Nil, 0) /** Resolve overloading by mapping to a different problem where each alternative's - * type is mapped with `f`, alternatives with non-existing types are dropped, and the + * type is mapped with `f`, alternatives with non-existing types or symbols are dropped, and the * expected type is `pt`. Map the results back to the original alternatives. */ def resolveMapped(alts: List[TermRef], f: TermRef => Type, pt: Type)(using Context): List[TermRef] = val reverseMapping = alts.flatMap { alt => val t = f(alt) - if t.exists then + if t.exists && alt.symbol.exists then val (trimmed, skipped) = trimParamss(t.stripPoly, alt.symbol.rawParamss) val mappedSym = alt.symbol.asTerm.copy(info = t) mappedSym.rawParamss = trimmed diff --git a/tests/pos/i20176.scala b/tests/pos/i20176.scala new file mode 100644 index 000000000000..df0c6cc1e8a7 --- /dev/null +++ b/tests/pos/i20176.scala @@ -0,0 +1,12 @@ +type Accumulator[A] + +object Accumulator { + + val usage = + use[Int]: + "asd" + + inline def use[A](using DummyImplicit): [B] => Any => Any = ??? + + inline def use[A]: [B] => Any => Any = ??? +} From 2afcf397a8143f94e9f0a2686ed0a3e8d57d01d6 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Sat, 13 Apr 2024 23:13:13 +0200 Subject: [PATCH 117/465] Add regression tests Closes #19749 Closes #16208 --- tests/pos/i16208.scala | 12 ++++++++++++ tests/pos/i19749.scala | 15 +++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 tests/pos/i16208.scala create mode 100644 tests/pos/i19749.scala diff --git a/tests/pos/i16208.scala b/tests/pos/i16208.scala new file mode 100644 index 000000000000..18455f2e698c --- /dev/null +++ b/tests/pos/i16208.scala @@ -0,0 +1,12 @@ + +class Ann(x: Any) extends annotation.Annotation +object Message: + implicit def toNoExplanation(str: String): Message @Ann(str) = ??? +class Message + +object report: + def error(x: Message): Unit = ??? + +def test = + report.error("a") // works + report.error("a".stripMargin) // was an error diff --git a/tests/pos/i19749.scala b/tests/pos/i19749.scala new file mode 100644 index 000000000000..367c5f61bdce --- /dev/null +++ b/tests/pos/i19749.scala @@ -0,0 +1,15 @@ +import scala.deriving.Mirror + +case class A(x: Int, y: String) + +trait SomeTrait[T] + +object SomeTrait: + given [T]: SomeTrait[T] with {} + +def f1[T](using p: Mirror.ProductOf[T]): Tuple.Elem[p.MirroredElemTypes, 0] = ??? + +def f2[T, R](f: T => R)(using SomeTrait[R]) = ??? + +// Scala3.3 is fine, 3.4 has compilation errors, p MirroredElemTypes type is missing and has been changed to Nothing +val x = f2(_ => f1[A]) From 822e792894753ded1d6ce06a94592c3ba48c7eb6 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 13 Mar 2024 16:54:17 +0100 Subject: [PATCH 118/465] Avoid crash when superType does not exist after erasure Fixes #19929 Two main changes: - In TypeErasure, throw a TypeError instead of a FatalError if a supertype of an applied type does not exist. That way, we get a proper error with a position. - Move some catch-and-rethrow logic from ReTyper to TreeChecker. ReTyper alreayd had special exceptions that disabled the logic for all uses of ReTyper except TreeChecker. Unfortunately the ReTyper override also disabled the special TypeError handling in Typer. --- .../dotty/tools/dotc/core/TypeErasure.scala | 8 +-- .../dotty/tools/dotc/core/TypeErrors.scala | 3 +- .../tools/dotc/transform/TreeChecker.scala | 54 ++++++++++--------- .../src/dotty/tools/dotc/typer/ReTyper.scala | 7 --- tests/neg/i19929.check | 5 ++ tests/neg/i19929.scala | 5 ++ 6 files changed, 45 insertions(+), 37 deletions(-) create mode 100644 tests/neg/i19929.check create mode 100644 tests/neg/i19929.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 0474aff4087a..48fb1bab2da1 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -751,12 +751,12 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst private def checkedSuperType(tp: TypeProxy)(using Context): Type = val tp1 = tp.translucentSuperType if !tp1.exists then - val msg = tp.typeConstructor match + val typeErr = tp.typeConstructor match case tycon: TypeRef => - MissingType(tycon.prefix, tycon.name).toMessage.message + MissingType(tycon.prefix, tycon.name) case _ => - i"Cannot resolve reference to $tp" - throw FatalError(msg) + TypeError(em"Cannot resolve reference to $tp") + throw typeErr tp1 /** Widen term ref, skipping any `()` parameter of an eventual getter. Used to erase a TermRef. diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 240bc4eebd84..13fe02b712bc 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -46,7 +46,8 @@ abstract class TypeError(using creationContext: Context) extends Exception(""): def toMessage(using Context): Message /** Uses creationContext to produce the message */ - override def getMessage: String = toMessage.message + override def getMessage: String = + try toMessage.message catch case ex: Throwable => "TypeError" object TypeError: def apply(msg: Message)(using Context) = new TypeError: diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index f1f703fb07ee..0515a6978a47 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -418,31 +418,35 @@ object TreeChecker { } override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = { - val res = tree match { - case _: untpd.TypedSplice | _: untpd.Thicket | _: EmptyValDef[?] => - super.typedUnadapted(tree, pt, locked) - case _ if tree.isType => - promote(tree) - case _ => - val tree1 = super.typedUnadapted(tree, pt, locked) - def isSubType(tp1: Type, tp2: Type) = - (tp1 eq tp2) || // accept NoType / NoType - (tp1 <:< tp2) - def divergenceMsg(tp1: Type, tp2: Type) = - s"""Types differ - |Original type : ${tree.typeOpt.show} - |After checking: ${tree1.tpe.show} - |Original tree : ${tree.show} - |After checking: ${tree1.show} - |Why different : - """.stripMargin + core.TypeComparer.explained(_.isSubType(tp1, tp2)) - if (tree.hasType) // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted - assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt)) - tree1 - } - checkNoOrphans(res.tpe) - phasesToCheck.foreach(_.checkPostCondition(res)) - res + try + val res = tree match + case _: untpd.TypedSplice | _: untpd.Thicket | _: EmptyValDef[?] => + super.typedUnadapted(tree, pt, locked) + case _ if tree.isType => + promote(tree) + case _ => + val tree1 = super.typedUnadapted(tree, pt, locked) + def isSubType(tp1: Type, tp2: Type) = + (tp1 eq tp2) || // accept NoType / NoType + (tp1 <:< tp2) + def divergenceMsg(tp1: Type, tp2: Type) = + s"""Types differ + |Original type : ${tree.typeOpt.show} + |After checking: ${tree1.tpe.show} + |Original tree : ${tree.show} + |After checking: ${tree1.show} + |Why different : + """.stripMargin + core.TypeComparer.explained(_.isSubType(tp1, tp2)) + if (tree.hasType) // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted + assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt)) + tree1 + checkNoOrphans(res.tpe) + phasesToCheck.foreach(_.checkPostCondition(res)) + res + catch case NonFatal(ex) if !ctx.run.enrichedErrorMessage => + val treeStr = tree.show(using ctx.withPhase(ctx.phase.prev.megaPhase)) + println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) + throw ex } def checkNotRepeated(tree: Tree)(using Context): tree.type = { diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index e152b5e6b9c7..9741a366da89 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -170,13 +170,6 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def addCanThrowCapabilities(expr: untpd.Tree, cases: List[CaseDef])(using Context): untpd.Tree = expr - override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = - try super.typedUnadapted(tree, pt, locked) - catch case NonFatal(ex) if ctx.phase != Phases.typerPhase && ctx.phase != Phases.inliningPhase && !ctx.run.enrichedErrorMessage => - val treeStr = tree.show(using ctx.withPhase(ctx.phase.prev.megaPhase)) - println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) - throw ex - override def inlineExpansion(mdef: DefDef)(using Context): List[Tree] = mdef :: Nil override def inferView(from: Tree, to: Type)(using Context): Implicits.SearchResult = diff --git a/tests/neg/i19929.check b/tests/neg/i19929.check new file mode 100644 index 000000000000..6782cd2133ef --- /dev/null +++ b/tests/neg/i19929.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i19929.scala:5:6 ----------------------------------------------------------------------------------- +5 | val _: a.M = ??? // error was crash + | ^ + | cannot resolve reference to type (a : A).M + | the classfile defining the type might be missing from the classpath diff --git a/tests/neg/i19929.scala b/tests/neg/i19929.scala new file mode 100644 index 000000000000..af88d950dc4f --- /dev/null +++ b/tests/neg/i19929.scala @@ -0,0 +1,5 @@ +trait A: + private type M + +def foo(a: A{type M = Int}) = + val _: a.M = ??? // error was crash From 4920146f554d418808da97ec537a356ac2562f35 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 15 Apr 2024 13:45:37 +0200 Subject: [PATCH 119/465] Reclassify test --- tests/{neg => pos}/i19929.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename tests/{neg => pos}/i19929.scala (60%) diff --git a/tests/neg/i19929.scala b/tests/pos/i19929.scala similarity index 60% rename from tests/neg/i19929.scala rename to tests/pos/i19929.scala index af88d950dc4f..2e1c691af8f5 100644 --- a/tests/neg/i19929.scala +++ b/tests/pos/i19929.scala @@ -2,4 +2,4 @@ trait A: private type M def foo(a: A{type M = Int}) = - val _: a.M = ??? // error was crash + val _: a.M = ??? // was crash From 6841c7565244ae914589e68ac974e3928666529d Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 15 Apr 2024 15:35:21 +0200 Subject: [PATCH 120/465] Fix handling of AppliedType aliases in outerPrefix Fixes #10184 The overlooked case was an AppliedType where we don't dealias correctly (i.e. at phase at most erasure). --- compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index 55a692780b85..b851db26f5c8 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -360,7 +360,7 @@ object ExplicitOuter { // macro generated code to be able to pinpoint the root problem. } case tpe: TypeProxy => - outerPrefix(tpe.underlying) + atPhaseNoLater(erasurePhase)(outerPrefix(tpe.superType)) } /** It's possible (i1755.scala gives an example) that the type From ab4b12ccb958672fa039768f77fdea34ea96ac12 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 15 Apr 2024 15:37:58 +0200 Subject: [PATCH 121/465] Add test --- tests/pos/i20184.scala | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 tests/pos/i20184.scala diff --git a/tests/pos/i20184.scala b/tests/pos/i20184.scala new file mode 100644 index 000000000000..2c9f6ed62be0 --- /dev/null +++ b/tests/pos/i20184.scala @@ -0,0 +1,12 @@ +object Outer: + def Test = + object Inner: + var x: Int = 2 + class Rgb(): + def f = x + + type Id[X] = X + type TRgb = Id[Inner.Rgb] + + val ok = new Inner.Rgb() + val crash = new Id[Inner.Rgb] \ No newline at end of file From 0cc98527c5ff8bdc1c828b8cff051fb5cce87271 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 13:44:39 +0000 Subject: [PATCH 122/465] Bump peaceiris/actions-gh-pages from 3 to 4 Bumps [peaceiris/actions-gh-pages](https://github.com/peaceiris/actions-gh-pages) from 3 to 4. - [Release notes](https://github.com/peaceiris/actions-gh-pages/releases) - [Changelog](https://github.com/peaceiris/actions-gh-pages/blob/main/CHANGELOG.md) - [Commits](https://github.com/peaceiris/actions-gh-pages/compare/v3...v4) --- updated-dependencies: - dependency-name: peaceiris/actions-gh-pages dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 32bdb6f42b8f..12e90eb9d653 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -712,7 +712,7 @@ jobs: ./project/scripts/genDocs -doc-snapshot - name: Deploy Website to dotty-website - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: personal_token: ${{ env.DOTTY_WEBSITE_BOT_TOKEN }} publish_dir: docs/_site From a98e5c7ac5cd00910fc8e041d5a5314b88a09788 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 13:44:41 +0000 Subject: [PATCH 123/465] Bump VirtusLab/scala-cli-setup from 1.2.1 to 1.2.2 Bumps [VirtusLab/scala-cli-setup](https://github.com/virtuslab/scala-cli-setup) from 1.2.1 to 1.2.2. - [Release notes](https://github.com/virtuslab/scala-cli-setup/releases) - [Commits](https://github.com/virtuslab/scala-cli-setup/compare/v1.2.1...v1.2.2) --- updated-dependencies: - dependency-name: VirtusLab/scala-cli-setup dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 7136d6bb2f7d..641e24835c31 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.2.1 + - uses: VirtusLab/scala-cli-setup@v1.2.2 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From 44d9da14637538f5c908e786ddc82c6504b44084 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 15 Apr 2024 15:46:52 +0200 Subject: [PATCH 124/465] Streamline outerPrefix --- .../tools/dotc/transform/ExplicitOuter.scala | 21 ++++--------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index b851db26f5c8..15dfda845389 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -343,25 +343,12 @@ object ExplicitOuter { private final val HoistableFlags = Method | Lazy | Module /** The outer prefix implied by type `tpe` */ - private def outerPrefix(tpe: Type)(using Context): Type = tpe match { - case tpe: TypeRef => - tpe.symbol match { - case cls: ClassSymbol => - if (tpe.prefix eq NoPrefix) cls.owner.enclosingClass.thisType - else tpe.prefix - case _ => - // Need to be careful to dealias before erasure, otherwise we lose prefixes. - atPhaseNoLater(erasurePhase)(outerPrefix(tpe.underlying)) - // underlying is fine here and below since we are calling this after erasure. - // However, there is some weird stuff going on with parboiled2 where an - // AppliedType with a type alias as constructor is fed to outerPrefix. - // For some other unknown reason this works with underlying but not with superType. - // I was not able to minimize the problem and parboiled2 spits out way too much - // macro generated code to be able to pinpoint the root problem. - } + private def outerPrefix(tpe: Type)(using Context): Type = tpe match + case tpe: TypeRef if tpe.symbol.isClass => + if tpe.prefix eq NoPrefix then tpe.symbol.owner.enclosingClass.thisType + else tpe.prefix case tpe: TypeProxy => atPhaseNoLater(erasurePhase)(outerPrefix(tpe.superType)) - } /** It's possible (i1755.scala gives an example) that the type * given by outerPrefix contains a This-reference to a module outside From 16d9e3d3fada48983933d08da472debb7b7fa9e4 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 15 Apr 2024 17:00:57 +0200 Subject: [PATCH 125/465] Amend reclassify test --- tests/neg/i19929.check | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 tests/neg/i19929.check diff --git a/tests/neg/i19929.check b/tests/neg/i19929.check deleted file mode 100644 index 6782cd2133ef..000000000000 --- a/tests/neg/i19929.check +++ /dev/null @@ -1,5 +0,0 @@ --- Error: tests/neg/i19929.scala:5:6 ----------------------------------------------------------------------------------- -5 | val _: a.M = ??? // error was crash - | ^ - | cannot resolve reference to type (a : A).M - | the classfile defining the type might be missing from the classpath From 0caf5035ddd5b81969193bad10cef62eaa5db264 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 28 Nov 2023 12:05:34 +0100 Subject: [PATCH 126/465] write pipelined tasty in parallel. --- .../dotty/tools/backend/jvm/GenBCode.scala | 18 +++ compiler/src/dotty/tools/dotc/Driver.scala | 8 ++ .../src/dotty/tools/dotc/core/Contexts.scala | 11 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 60 ++------ .../dotty/tools/dotc/transform/Pickler.scala | 121 +++++++++++++++- compiler/src/dotty/tools/io/FileWriters.scala | 131 +++++++++++++++--- compiler/src/dotty/tools/io/JarArchive.scala | 2 +- 7 files changed, 272 insertions(+), 79 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 8d467529d60e..ade7c4705c52 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -10,6 +10,10 @@ import Symbols.* import dotty.tools.io.* import scala.collection.mutable import scala.compiletime.uninitialized +import java.util.concurrent.TimeoutException + +import scala.concurrent.duration.given +import scala.concurrent.Await class GenBCode extends Phase { self => @@ -90,6 +94,20 @@ class GenBCode extends Phase { self => try val result = super.runOn(units) generatedClassHandler.complete() + for holder <- ctx.asyncTastyPromise do + try + val asyncState = Await.result(holder.promise.future, 5.seconds) + for reporter <- asyncState.pending do + reporter.relayReports(frontendAccess.backendReporting) + catch + case _: TimeoutException => + report.error( + """Timeout (5s) in backend while waiting for async writing of TASTy files to -Yearly-tasty-output, + | this may be a bug in the compiler. + | + |Alternatively consider turning off pipelining for this project.""".stripMargin + ) + end for result finally // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index dcc6cf8d71c0..1e3d002d4391 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -66,6 +66,13 @@ class Driver { protected def command: CompilerCommand = ScalacCommand + private def setupAsyncTasty(ictx: FreshContext): Unit = inContext(ictx): + ictx.settings.YearlyTastyOutput.value match + case earlyOut if earlyOut.isDirectory && earlyOut.exists => + ictx.setInitialAsyncTasty() + case _ => + () // do nothing + /** Setup context with initialized settings from CLI arguments, then check if there are any settings that * would change the default behaviour of the compiler. * @@ -82,6 +89,7 @@ class Driver { Positioned.init(using ictx) inContext(ictx) { + setupAsyncTasty(ictx) if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then ictx.setProperty(ContextDoc, new ContextDocstrings) val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 47006bdbe561..add8859948a6 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -31,9 +31,11 @@ import StdNames.nme import compiletime.uninitialized import scala.annotation.internal.sharable +import scala.concurrent.Promise import DenotTransformers.DenotTransformer import dotty.tools.dotc.profile.Profiler +import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder import dotty.tools.dotc.sbt.interfaces.{IncrementalCallback, ProgressCallback} import util.Property.Key import util.Store @@ -54,8 +56,9 @@ object Contexts { private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) private val (progressCallbackLoc, store11) = store10.newLocation[ProgressCallback | Null]() + private val (tastyPromiseLoc, store12) = store11.newLocation[Option[AsyncTastyHolder]](None) - private val initialStore = store11 + private val initialStore = store12 /** The current context */ inline def ctx(using ctx: Context): Context = ctx @@ -197,6 +200,8 @@ object Contexts { /** The current settings values */ def settingsState: SettingsState = store(settingsStateLoc) + def asyncTastyPromise: Option[AsyncTastyHolder] = store(tastyPromiseLoc) + /** The current compilation unit */ def compilationUnit: CompilationUnit = store(compilationUnitLoc) @@ -685,6 +690,10 @@ object Contexts { updateStore(compilationUnitLoc, compilationUnit) } + def setInitialAsyncTasty(): this.type = + assert(store(tastyPromiseLoc) == None, "trying to set async tasty promise twice!") + updateStore(tastyPromiseLoc, Some(AsyncTastyHolder(settings.YearlyTastyOutput.value, Promise()))) + def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) def setIncCallback(callback: IncrementalCallback): this.type = updateStore(incCallbackLoc, callback) def setProgressCallback(callback: ProgressCallback): this.type = updateStore(progressCallbackLoc, callback) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 138cda099040..962529e9786a 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -70,11 +70,6 @@ class ExtractAPI extends Phase { override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = val doZincCallback = ctx.runZincPhases - val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YearlyTastyOutput.value match - case earlyOut if earlyOut.isDirectory && earlyOut.exists => - Some(Pickler.EarlyFileWriter(earlyOut)) - case _ => - None val nonLocalClassSymbols = new mutable.HashSet[Symbol] val units0 = if doZincCallback then @@ -82,7 +77,6 @@ class ExtractAPI extends Phase { super.runOn(units)(using ctx0) else units // still run the phase for the side effects (writing TASTy files to -Yearly-tasty-output) - sigWriter.foreach(writeSigFiles(units0, _)) if doZincCallback then ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) if ctx.settings.YjavaTasty.value then @@ -91,57 +85,19 @@ class ExtractAPI extends Phase { units0 end runOn - // Why we only write to early output in the first run? - // =================================================== - // TL;DR the point of pipeline compilation is to start downstream projects early, - // so we don't want to wait for suspended units to be compiled. - // - // But why is it safe to ignore suspended units? - // If this project contains a transparent macro that is called in the same project, - // the compilation unit of that call will be suspended (if the macro implementation - // is also in this project), causing a second run. - // However before we do that run, we will have already requested sbt to begin - // early downstream compilation. This means that the suspended definitions will not - // be visible in *early* downstream compilation. - // - // However, sbt will by default prevent downstream compilation happening in this scenario, - // due to the existence of macro definitions. So we are protected from failure if user tries - // to use the suspended definitions. - // - // Additionally, it is recommended for the user to move macro implementations to another project - // if they want to force early output. In this scenario the suspensions will no longer occur, so now - // they will become visible in the early-output. - // - // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force` - // for examples of this in action. - // - // Therefore we only need to write to early output in the first run. We also provide the option - // to diagnose suspensions with the `-Yno-suspended-units` flag. - private def writeSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = { - try - for - unit <- units - (cls, pickled) <- unit.pickled - if cls.isDefinedInCurrentRun - do - val internalName = - if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn - else cls.binaryClassName - val _ = writer.writeTasty(internalName, pickled()) - finally - writer.close() - if ctx.settings.verbose.value then - report.echo("[sig files written]") - end try - } - private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit = for cls <- nonLocalClassSymbols do val sourceFile = cls.source if sourceFile.exists && cls.isDefinedInCurrentRun then recordNonLocalClass(cls, sourceFile, cb) - cb.apiPhaseCompleted() - cb.dependencyPhaseCompleted() + for holder <- ctx.asyncTastyPromise do + import scala.concurrent.ExecutionContext.Implicits.global + // do not expect to be completed with failure + holder.promise.future.foreach: state => + if !state.hasErrors then + // We also await the promise at GenBCode to emit warnings/errors + cb.apiPhaseCompleted() + cb.dependencyPhaseCompleted() private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit = def registerProductNames(fullClassName: String, binaryClassName: String) = diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 3a4212547d16..30e6b0c8332f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -9,7 +9,7 @@ import tasty.* import config.Printers.{noPrinter, pickling} import config.Feature import java.io.PrintStream -import io.FileWriters.TastyWriter +import io.FileWriters.{TastyWriter, ReadOnlyContext} import StdNames.{str, nme} import Periods.* import Phases.* @@ -22,6 +22,11 @@ import compiletime.uninitialized import dotty.tools.io.{JarArchive, AbstractFile} import dotty.tools.dotc.printing.OutlinePrinter import scala.annotation.constructorOnly +import scala.concurrent.Promise +import dotty.tools.dotc.transform.Pickler.writeSigFilesAsync + +import scala.util.chaining.given +import dotty.tools.io.FileWriters.BufferingDelayedReporting object Pickler { val name: String = "pickler" @@ -33,8 +38,62 @@ object Pickler { */ inline val ParallelPickling = true + class AsyncTastyHolder(val earlyOut: AbstractFile, val promise: Promise[AsyncTastyState]) + class AsyncTastyState(val hasErrors: Boolean, val pending: Option[BufferingDelayedReporting]) + + // Why we only write to early output in the first run? + // =================================================== + // TL;DR the point of pipeline compilation is to start downstream projects early, + // so we don't want to wait for suspended units to be compiled. + // + // But why is it safe to ignore suspended units? + // If this project contains a transparent macro that is called in the same project, + // the compilation unit of that call will be suspended (if the macro implementation + // is also in this project), causing a second run. + // However before we do that run, we will have already requested sbt to begin + // early downstream compilation. This means that the suspended definitions will not + // be visible in *early* downstream compilation. + // + // However, sbt will by default prevent downstream compilation happening in this scenario, + // due to the existence of macro definitions. So we are protected from failure if user tries + // to use the suspended definitions. + // + // Additionally, it is recommended for the user to move macro implementations to another project + // if they want to force early output. In this scenario the suspensions will no longer occur, so now + // they will become visible in the early-output. + // + // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force` + // for examples of this in action. + // + // Therefore we only need to write to early output in the first run. We also provide the option + // to diagnose suspensions with the `-Yno-suspended-units` flag. + def writeSigFilesAsync( + tasks: List[(String, Array[Byte])], + writer: EarlyFileWriter, + promise: Promise[AsyncTastyState])(using ctx: ReadOnlyContext): Unit = { + try + for (internalName, pickled) <- tasks do + val _ = writer.writeTasty(internalName, pickled) + finally + try + writer.close() + finally + promise.success( + AsyncTastyState( + hasErrors = ctx.reporter.hasErrors, + pending = ( + ctx.reporter match + case buffered: BufferingDelayedReporting => Some(buffered) + case _ => None + ) + ) + ) + end try + end try + } + class EarlyFileWriter private (writer: TastyWriter, origin: AbstractFile): - def this(dest: AbstractFile)(using @constructorOnly ctx: Context) = this(TastyWriter(dest), dest) + def this(dest: AbstractFile)(using @constructorOnly ctx: ReadOnlyContext) = this(TastyWriter(dest), dest) export writer.writeTasty @@ -50,13 +109,15 @@ object Pickler { class Pickler extends Phase { import ast.tpd.* + def doAsyncTasty(using Context): Boolean = ctx.asyncTastyPromise.isDefined + override def phaseName: String = Pickler.name override def description: String = Pickler.description // No need to repickle trees coming from TASTY override def isRunnable(using Context): Boolean = - super.isRunnable && !ctx.settings.fromTasty.value + super.isRunnable && (!ctx.settings.fromTasty.value || doAsyncTasty) // when `-Yjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false @@ -86,11 +147,20 @@ class Pickler extends Phase { */ object serialized: val scratch = new ScratchData + private val buf = mutable.ListBuffer.empty[(String, Array[Byte])] def run(body: ScratchData => Array[Byte]): Array[Byte] = synchronized { scratch.reset() body(scratch) } + def commit(internalName: String, tasty: Array[Byte]): Unit = synchronized { + buf += ((internalName, tasty)) + } + def result(): List[(String, Array[Byte])] = synchronized { + val res = buf.toList + buf.clear() + res + } private val executor = Executor[Array[Byte]]() @@ -100,10 +170,29 @@ class Pickler extends Phase { if isOutline then ctx.fresh.setPrinterFn(OutlinePrinter(_)) else ctx + /** only ran under -Ypickle-write and -from-tasty */ + private def runFromTasty(unit: CompilationUnit)(using Context): Unit = { + val pickled = unit.pickled + for (cls, bytes) <- pickled do + serialized.commit(computeInternalName(cls), bytes()) + } + + private def computeInternalName(cls: ClassSymbol)(using Context): String = + if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn + else cls.binaryClassName + override def run(using Context): Unit = { val unit = ctx.compilationUnit pickling.println(i"unpickling in run ${ctx.runId}") + if ctx.settings.fromTasty.value then + // skip the rest of the phase, as tasty is already "pickled", + // however we still need to set up tasks to write TASTy to + // early output when pipelining is enabled. + if doAsyncTasty then + runFromTasty(unit) + return () + for cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree)) tree <- sliceTopLevel(unit.tpdTree, cls) @@ -137,6 +226,8 @@ class Pickler extends Phase { val positionWarnings = new mutable.ListBuffer[Message]() def reportPositionWarnings() = positionWarnings.foreach(report.warning(_)) + val internalName = if doAsyncTasty then computeInternalName(cls) else "" + def computePickled(): Array[Byte] = inContext(ctx.fresh) { serialized.run { scratch => treePkl.compactify(scratch) @@ -166,6 +257,10 @@ class Pickler extends Phase { println(i"**** pickled info of $cls") println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) println(i"**** end of pickled info of $cls") + + if doAsyncTasty then + serialized.commit(internalName, pickled) + pickled } } @@ -194,13 +289,27 @@ class Pickler extends Phase { } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { + val isConcurrent = useExecutor + + val writeTask: Option[() => Unit] = ctx.asyncTastyPromise.map: holder => + () => + given ReadOnlyContext = if isConcurrent then ReadOnlyContext.buffered else ReadOnlyContext.eager + val writer = Pickler.EarlyFileWriter(holder.earlyOut) + writeSigFilesAsync(serialized.result(), writer, holder.promise) + + def runPhase(writeCB: (doWrite: () => Unit) => Unit) = + super.runOn(units).tap(_ => writeTask.foreach(writeCB)) + val result = - if useExecutor then + if isConcurrent then executor.start() - try super.runOn(units) + try + runPhase: doWrite => + // unless we redesign executor to have "Unit" schedule overload, we need some sentinel value. + executor.schedule(() => { doWrite(); Array.emptyByteArray }) finally executor.close() else - super.runOn(units) + runPhase(_()) if ctx.settings.YtestPickler.value then val ctx2 = ctx.fresh .setSetting(ctx.settings.YreadComments, true) diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index 4f03194fa4ce..d8c397a6166f 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -1,8 +1,7 @@ package dotty.tools.io -import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.core.Decorators.em -import dotty.tools.dotc.report +import scala.language.unsafeNulls + import dotty.tools.io.AbstractFile import dotty.tools.io.JarArchive import dotty.tools.io.PlainFile @@ -25,13 +24,107 @@ import java.util.zip.CRC32 import java.util.zip.Deflater import java.util.zip.ZipEntry import java.util.zip.ZipOutputStream -import scala.language.unsafeNulls +import scala.collection.mutable + +import dotty.tools.dotc.core.Contexts, Contexts.Context +import dotty.tools.dotc.core.Decorators.em + +import dotty.tools.dotc.util.{SourcePosition, NoSourcePosition} + +import dotty.tools.dotc.reporting.Message +import dotty.tools.dotc.report + +import dotty.tools.backend.jvm.PostProcessorFrontendAccess.BackendReporting +import scala.annotation.constructorOnly /** Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed */ object FileWriters { type InternalName = String type NullableFile = AbstractFile | Null + inline def ctx(using ReadOnlyContext): ReadOnlyContext = summon[ReadOnlyContext] + + sealed trait DelayedReporting { + def hasErrors: Boolean + def error(message: Context ?=> Message, position: SourcePosition): Unit + def warning(message: Context ?=> Message, position: SourcePosition): Unit + def log(message: String): Unit + + def error(message: Context ?=> Message): Unit = error(message, NoSourcePosition) + def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) + } + + final class EagerDelayedReporting(using captured: Context) extends DelayedReporting: + private var _hasErrors = false + + def hasErrors: Boolean = _hasErrors + + def error(message: Context ?=> Message, position: SourcePosition): Unit = + report.error(message, position) + _hasErrors = true + + def warning(message: Context ?=> Message, position: SourcePosition): Unit = + report.warning(message, position) + + def log(message: String): Unit = report.echo(message) + + final class BufferingDelayedReporting extends DelayedReporting { + // We optimise access to the buffered reports for the common case - that there are no warning/errors to report + // We could use a listBuffer etc - but that would be extra allocation in the common case + // Note - all access is externally synchronized, as this allow the reports to be generated in on thread and + // consumed in another + private var bufferedReports = List.empty[Report] + private var _hasErrors = false + enum Report(val relay: Context ?=> BackendReporting => Unit): + case Error(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.error(message(ctx), position)) + case Warning(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.warning(message(ctx), position)) + case Log(message: String) extends Report(_.log(message)) + + def hasErrors: Boolean = synchronized: + _hasErrors + + def error(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: + bufferedReports ::= Report.Error({case given Context => message}, position) + _hasErrors = true + + def warning(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: + bufferedReports ::= Report.Warning({case given Context => message}, position) + + def log(message: String): Unit = synchronized: + bufferedReports ::= Report.Log(message) + + /** Should only be called from main compiler thread. */ + def relayReports(toReporting: BackendReporting)(using Context): Unit = synchronized: + if bufferedReports.nonEmpty then + bufferedReports.reverse.foreach(_.relay(toReporting)) + bufferedReports = Nil + } + + trait ReadSettings: + def jarCompressionLevel: Int + def debug: Boolean + + trait ReadOnlyContext: + + val settings: ReadSettings + val reporter: DelayedReporting + + trait BufferedReadOnlyContext extends ReadOnlyContext: + val reporter: BufferingDelayedReporting + + object ReadOnlyContext: + def readSettings(using ctx: Context): ReadSettings = new: + val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value + val debug = ctx.settings.Ydebug.value + + def buffered(using Context): BufferedReadOnlyContext = new: + val settings = readSettings + val reporter = BufferingDelayedReporting() + + def eager(using Context): ReadOnlyContext = new: + val settings = readSettings + val reporter = EagerDelayedReporting() + /** * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the * directory and files that are created, and eventually calls `close` when the writing is complete. @@ -47,7 +140,7 @@ object FileWriters { * * @param name the internal name of the class, e.g. "scala.Option" */ - def writeTasty(name: InternalName, bytes: Array[Byte])(using Context): NullableFile + def writeTasty(name: InternalName, bytes: Array[Byte])(using ReadOnlyContext): NullableFile /** * Close the writer. Behavior is undefined after a call to `close`. @@ -60,7 +153,7 @@ object FileWriters { object TastyWriter { - def apply(output: AbstractFile)(using Context): TastyWriter = { + def apply(output: AbstractFile)(using ReadOnlyContext): TastyWriter = { // In Scala 2 depenening on cardinality of distinct output dirs MultiClassWriter could have been used // In Dotty we always use single output directory @@ -73,7 +166,7 @@ object FileWriters { private final class SingleTastyWriter(underlying: FileWriter) extends TastyWriter { - override def writeTasty(className: InternalName, bytes: Array[Byte])(using Context): NullableFile = { + override def writeTasty(className: InternalName, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = { underlying.writeFile(classToRelativePath(className), bytes) } @@ -83,14 +176,14 @@ object FileWriters { } sealed trait FileWriter { - def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile + def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile def close(): Unit } object FileWriter { - def apply(file: AbstractFile, jarManifestMainClass: Option[String])(using Context): FileWriter = + def apply(file: AbstractFile, jarManifestMainClass: Option[String])(using ReadOnlyContext): FileWriter = if (file.isInstanceOf[JarArchive]) { - val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value + val jarCompressionLevel = ctx.settings.jarCompressionLevel // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where // created using `AbstractFile.bufferedOutputStream`instead of JarWritter val jarFile = file.underlyingSource.getOrElse{ @@ -127,7 +220,7 @@ object FileWriters { lazy val crc = new CRC32 - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = this.synchronized { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = this.synchronized { val entry = new ZipEntry(relativePath) if (storeOnly) { // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ @@ -155,14 +248,14 @@ object FileWriters { val noAttributes = Array.empty[FileAttribute[?]] private val isWindows = scala.util.Properties.isWin - private def checkName(component: Path)(using Context): Unit = if (isWindows) { + private def checkName(component: Path)(using ReadOnlyContext): Unit = if (isWindows) { val specials = raw"(?i)CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]".r val name = component.toString - def warnSpecial(): Unit = report.warning(em"path component is special Windows device: ${name}") + def warnSpecial(): Unit = ctx.reporter.warning(em"path component is special Windows device: ${name}") specials.findPrefixOf(name).foreach(prefix => if (prefix.length == name.length || name(prefix.length) == '.') warnSpecial()) } - def ensureDirForPath(baseDir: Path, filePath: Path)(using Context): Unit = { + def ensureDirForPath(baseDir: Path, filePath: Path)(using ReadOnlyContext): Unit = { import java.lang.Boolean.TRUE val parent = filePath.getParent if (!builtPaths.containsKey(parent)) { @@ -192,7 +285,7 @@ object FileWriters { private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = { val path = base.resolve(relativePath) try { ensureDirForPath(base, path) @@ -213,10 +306,10 @@ object FileWriters { os.close() } catch { case e: FileConflictException => - report.error(em"error writing ${path.toString}: ${e.getMessage}") + ctx.reporter.error(em"error writing ${path.toString}: ${e.getMessage}") case e: java.nio.file.FileSystemException => - if (ctx.settings.Ydebug.value) e.printStackTrace() - report.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") + if (ctx.settings.debug) e.printStackTrace() + ctx.reporter.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") } AbstractFile.getFile(path) } @@ -241,7 +334,7 @@ object FileWriters { finally out.close() } - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context):NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext):NullableFile = { val outFile = getFile(base, relativePath) writeBytes(outFile, bytes) outFile diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index e95dbe97bb19..728f89966af0 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -11,7 +11,7 @@ import scala.jdk.CollectionConverters.* * that be can used as the compiler's output directory. */ class JarArchive private (root: Directory) extends PlainDirectory(root) { - def close(): Unit = jpath.getFileSystem().close() + def close(): Unit = this.synchronized(jpath.getFileSystem().close()) override def exists: Boolean = jpath.getFileSystem().isOpen() && super.exists def allFileNames(): Iterator[String] = java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString) From a25d2523bd6dcaf0d130ad164d77ef34be0644a5 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 11 Apr 2024 18:04:51 +0200 Subject: [PATCH 127/465] Address feedback. Adjust semantics. Now in backend we block on the composition of the Zinc API callbacks and tasty being written. We also now conditionally signal that we are "done" writing tasty, based on if any units were suspended. This works in line with the Zinc default, which will ignore the early output anyway under the presence of macros (user can override this). Also move async tasty holder to the Run, as it is now context dependent on suspending. TODO: give the user an option to optimise performance by preventing definition of such problematic macros (which would also avoid suspensions). --- .../tools/backend/jvm/ClassfileWriters.scala | 6 +- .../dotty/tools/backend/jvm/GenBCode.scala | 25 +- .../dotty/tools/dotc/CompilationUnit.scala | 10 +- compiler/src/dotty/tools/dotc/Driver.scala | 14 +- compiler/src/dotty/tools/dotc/Run.scala | 36 ++- .../src/dotty/tools/dotc/core/Contexts.scala | 10 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 9 +- .../src/dotty/tools/dotc/sbt/package.scala | 19 ++ .../dotty/tools/dotc/transform/Pickler.scala | 219 ++++++++++++------ compiler/src/dotty/tools/io/FileWriters.scala | 105 ++++++--- .../a/src/main/scala/a/A.scala | 7 + .../a/src/test/scala/a/Hello.scala | 10 + .../b/src/main/scala/b/Hello.scala | 9 + .../pipelining/pipelining-cancel/build.sbt | 12 + .../project/DottyInjectedPlugin.scala | 12 + sbt-test/pipelining/pipelining-cancel/test | 6 + .../pipelining-scala-macro/build.sbt | 24 +- 17 files changed, 375 insertions(+), 158 deletions(-) create mode 100644 sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala create mode 100644 sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala create mode 100644 sbt-test/pipelining/pipelining-cancel/build.sbt create mode 100644 sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-cancel/test diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index ec251b4aa3f0..44498082c697 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -20,7 +20,11 @@ import dotty.tools.io.JarArchive import scala.language.unsafeNulls - +/** !!! This file is now copied in `dotty.tools.io.FileWriters` in a more general way that does not rely upon + * `PostProcessorFrontendAccess`, this should probably be changed to wrap that class instead. + * + * Until then, any changes to this file should be copied to `dotty.tools.io.FileWriters` as well. + */ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { type NullableFile = AbstractFile | Null import frontendAccess.{compilerSettings, backendReporting} diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index ade7c4705c52..d9f413a5d5ab 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -12,7 +12,7 @@ import scala.collection.mutable import scala.compiletime.uninitialized import java.util.concurrent.TimeoutException -import scala.concurrent.duration.given +import scala.concurrent.duration.Duration import scala.concurrent.Await class GenBCode extends Phase { self => @@ -94,20 +94,15 @@ class GenBCode extends Phase { self => try val result = super.runOn(units) generatedClassHandler.complete() - for holder <- ctx.asyncTastyPromise do - try - val asyncState = Await.result(holder.promise.future, 5.seconds) - for reporter <- asyncState.pending do - reporter.relayReports(frontendAccess.backendReporting) - catch - case _: TimeoutException => - report.error( - """Timeout (5s) in backend while waiting for async writing of TASTy files to -Yearly-tasty-output, - | this may be a bug in the compiler. - | - |Alternatively consider turning off pipelining for this project.""".stripMargin - ) - end for + try + for + async <- ctx.run.nn.asyncTasty + bufferedReporter <- async.sync() + do + bufferedReporter.relayReports(frontendAccess.backendReporting) + catch + case ex: Exception => + report.error(s"exception from future: $ex, (${Option(ex.getCause())})") result finally // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index adced57d5801..193baaf7a55b 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -98,11 +98,15 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn depRecorder.clear() if !suspended then suspended = true - ctx.run.nn.suspendedUnits += this + val currRun = ctx.run.nn + currRun.suspendedUnits += this + val isInliningPhase = ctx.phase == Phases.inliningPhase if ctx.settings.XprintSuspension.value then - ctx.run.nn.suspendedHints += (this -> hint) - if ctx.phase == Phases.inliningPhase then + currRun.suspendedHints += (this -> (hint, isInliningPhase)) + if isInliningPhase then suspendedAtInliningPhase = true + else + currRun.suspendedAtTyperPhase = true throw CompilationUnit.SuspendException() private var myAssignmentSpans: Map[Int, List[Span]] | Null = null diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 1e3d002d4391..f2f104d1c387 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -54,10 +54,10 @@ class Driver { if (ctx.settings.XprintSuspension.value) val suspendedHints = run.suspendedHints.toList report.echo(i"compiling suspended $suspendedUnits%, %") - for (unit, hint) <- suspendedHints do - report.echo(s" $unit: $hint") + for (unit, (hint, atInlining)) <- suspendedHints do + report.echo(s" $unit at ${if atInlining then "inlining" else "typer"}: $hint") val run1 = compiler.newRun - run1.compileSuspendedUnits(suspendedUnits) + run1.compileSuspendedUnits(suspendedUnits, !run.suspendedAtTyperPhase) finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) protected def initCtx: Context = (new ContextBase).initialCtx @@ -66,13 +66,6 @@ class Driver { protected def command: CompilerCommand = ScalacCommand - private def setupAsyncTasty(ictx: FreshContext): Unit = inContext(ictx): - ictx.settings.YearlyTastyOutput.value match - case earlyOut if earlyOut.isDirectory && earlyOut.exists => - ictx.setInitialAsyncTasty() - case _ => - () // do nothing - /** Setup context with initialized settings from CLI arguments, then check if there are any settings that * would change the default behaviour of the compiler. * @@ -89,7 +82,6 @@ class Driver { Positioned.init(using ictx) inContext(ictx) { - setupAsyncTasty(ictx) if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then ictx.setProperty(ContextDoc, new ContextDocstrings) val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index ffc54e969b1f..02a0618bb6e9 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -37,6 +37,7 @@ import scala.io.Codec import Run.Progress import scala.compiletime.uninitialized import dotty.tools.dotc.transform.MegaPhase +import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder /** A compiler run. Exports various methods to compile source files */ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { @@ -130,7 +131,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint myUnits = us var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() - var suspendedHints: mutable.Map[CompilationUnit, String] = mutable.HashMap() + var suspendedHints: mutable.Map[CompilationUnit, (String, Boolean)] = mutable.HashMap() + + /** Were any units suspended in the typer phase? if so then pipeline tasty can not complete. */ + var suspendedAtTyperPhase: Boolean = false def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then @@ -231,6 +235,22 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if !progress.isCancelled() then progress.tickSubphase() + /** if true, then we are done writing pipelined TASTy files (i.e. finished in a previous run.) */ + private var myAsyncTastyWritten = false + + private var _asyncTasty: Option[AsyncTastyHolder] = None + + /** populated when this run needs to write pipeline TASTy files. */ + def asyncTasty: Option[AsyncTastyHolder] = _asyncTasty + + private def initializeAsyncTasty()(using Context): () => Unit = + // should we provide a custom ExecutionContext? + // currently it is just used to call the `apiPhaseCompleted` and `dependencyPhaseCompleted` callbacks in Zinc + import scala.concurrent.ExecutionContext.Implicits.global + val async = AsyncTastyHolder.init + _asyncTasty = Some(async) + () => async.cancel() + /** Will be set to true if any of the compiled compilation units contains * a pureFunctions language import. */ @@ -348,7 +368,14 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint runCtx.setProperty(CyclicReference.Trace, new CyclicReference.Trace()) runCtx.withProgressCallback: cb => _progress = Progress(cb, this, fusedPhases.map(_.traversals).sum) + val cancelAsyncTasty: () => Unit = + if !myAsyncTastyWritten && Phases.picklerPhase.exists && !ctx.settings.YearlyTastyOutput.isDefault then + initializeAsyncTasty() + else () => {} + runPhases(allPhases = fusedPhases)(using runCtx) + cancelAsyncTasty() + ctx.reporter.finalizeReporting() if (!ctx.reporter.hasErrors) Rewrites.writeBack() @@ -365,9 +392,12 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** Is this run started via a compilingSuspended? */ def isCompilingSuspended: Boolean = myCompilingSuspended - /** Compile units `us` which were suspended in a previous run */ - def compileSuspendedUnits(us: List[CompilationUnit]): Unit = + /** Compile units `us` which were suspended in a previous run, + * also signal if all necessary async tasty files were written in a previous run. + */ + def compileSuspendedUnits(us: List[CompilationUnit], asyncTastyWritten: Boolean): Unit = myCompilingSuspended = true + myAsyncTastyWritten = asyncTastyWritten for unit <- us do unit.suspended = false compileUnits(us) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index add8859948a6..ab6fda68a09e 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -31,11 +31,9 @@ import StdNames.nme import compiletime.uninitialized import scala.annotation.internal.sharable -import scala.concurrent.Promise import DenotTransformers.DenotTransformer import dotty.tools.dotc.profile.Profiler -import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder import dotty.tools.dotc.sbt.interfaces.{IncrementalCallback, ProgressCallback} import util.Property.Key import util.Store @@ -56,9 +54,8 @@ object Contexts { private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) private val (progressCallbackLoc, store11) = store10.newLocation[ProgressCallback | Null]() - private val (tastyPromiseLoc, store12) = store11.newLocation[Option[AsyncTastyHolder]](None) - private val initialStore = store12 + private val initialStore = store11 /** The current context */ inline def ctx(using ctx: Context): Context = ctx @@ -200,8 +197,6 @@ object Contexts { /** The current settings values */ def settingsState: SettingsState = store(settingsStateLoc) - def asyncTastyPromise: Option[AsyncTastyHolder] = store(tastyPromiseLoc) - /** The current compilation unit */ def compilationUnit: CompilationUnit = store(compilationUnitLoc) @@ -690,9 +685,6 @@ object Contexts { updateStore(compilationUnitLoc, compilationUnit) } - def setInitialAsyncTasty(): this.type = - assert(store(tastyPromiseLoc) == None, "trying to set async tasty promise twice!") - updateStore(tastyPromiseLoc, Some(AsyncTastyHolder(settings.YearlyTastyOutput.value, Promise()))) def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) def setIncCallback(callback: IncrementalCallback): this.type = updateStore(incCallbackLoc, callback) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 962529e9786a..7da119fedf52 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -90,14 +90,7 @@ class ExtractAPI extends Phase { val sourceFile = cls.source if sourceFile.exists && cls.isDefinedInCurrentRun then recordNonLocalClass(cls, sourceFile, cb) - for holder <- ctx.asyncTastyPromise do - import scala.concurrent.ExecutionContext.Implicits.global - // do not expect to be completed with failure - holder.promise.future.foreach: state => - if !state.hasErrors then - // We also await the promise at GenBCode to emit warnings/errors - cb.apiPhaseCompleted() - cb.dependencyPhaseCompleted() + ctx.run.nn.asyncTasty.foreach(_.signalAPIComplete()) private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit = def registerProductNames(fullClassName: String, binaryClassName: String) = diff --git a/compiler/src/dotty/tools/dotc/sbt/package.scala b/compiler/src/dotty/tools/dotc/sbt/package.scala index dc0df381f08f..12e7f6eceac7 100644 --- a/compiler/src/dotty/tools/dotc/sbt/package.scala +++ b/compiler/src/dotty/tools/dotc/sbt/package.scala @@ -6,10 +6,29 @@ import dotty.tools.dotc.core.NameOps.stripModuleClassSuffix import dotty.tools.dotc.core.Names.Name import dotty.tools.dotc.core.Names.termName +import interfaces.IncrementalCallback +import dotty.tools.io.FileWriters.BufferingReporter +import dotty.tools.dotc.core.Decorators.em + +import scala.util.chaining.given +import scala.util.control.NonFatal + inline val TermNameHash = 1987 // 300th prime inline val TypeNameHash = 1993 // 301st prime inline val InlineParamHash = 1997 // 302nd prime +def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): Option[BufferingReporter] = + val zincReporter = pending match + case Some(buffered) => buffered + case None => BufferingReporter() + try + cb.apiPhaseCompleted() + cb.dependencyPhaseCompleted() + catch + case NonFatal(t) => + zincReporter.exception(em"signaling API and Dependencies phases completion", t) + if zincReporter.hasErrors then Some(zincReporter) else None + extension (sym: Symbol) /** Mangle a JVM symbol name in a format better suited for internal uses by sbt. diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 30e6b0c8332f..b7a7d874db3f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -26,7 +26,12 @@ import scala.concurrent.Promise import dotty.tools.dotc.transform.Pickler.writeSigFilesAsync import scala.util.chaining.given -import dotty.tools.io.FileWriters.BufferingDelayedReporting +import dotty.tools.io.FileWriters.{EagerReporter, BufferingReporter} +import dotty.tools.dotc.sbt.interfaces.IncrementalCallback +import dotty.tools.dotc.sbt.asyncZincPhasesCompleted +import scala.concurrent.ExecutionContext +import scala.util.control.NonFatal +import java.util.concurrent.atomic.AtomicBoolean object Pickler { val name: String = "pickler" @@ -38,84 +43,158 @@ object Pickler { */ inline val ParallelPickling = true - class AsyncTastyHolder(val earlyOut: AbstractFile, val promise: Promise[AsyncTastyState]) - class AsyncTastyState(val hasErrors: Boolean, val pending: Option[BufferingDelayedReporting]) - - // Why we only write to early output in the first run? - // =================================================== - // TL;DR the point of pipeline compilation is to start downstream projects early, - // so we don't want to wait for suspended units to be compiled. - // - // But why is it safe to ignore suspended units? - // If this project contains a transparent macro that is called in the same project, - // the compilation unit of that call will be suspended (if the macro implementation - // is also in this project), causing a second run. - // However before we do that run, we will have already requested sbt to begin - // early downstream compilation. This means that the suspended definitions will not - // be visible in *early* downstream compilation. - // - // However, sbt will by default prevent downstream compilation happening in this scenario, - // due to the existence of macro definitions. So we are protected from failure if user tries - // to use the suspended definitions. - // - // Additionally, it is recommended for the user to move macro implementations to another project - // if they want to force early output. In this scenario the suspensions will no longer occur, so now - // they will become visible in the early-output. - // - // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force` - // for examples of this in action. - // - // Therefore we only need to write to early output in the first run. We also provide the option - // to diagnose suspensions with the `-Yno-suspended-units` flag. - def writeSigFilesAsync( - tasks: List[(String, Array[Byte])], - writer: EarlyFileWriter, - promise: Promise[AsyncTastyState])(using ctx: ReadOnlyContext): Unit = { - try - for (internalName, pickled) <- tasks do - val _ = writer.writeTasty(internalName, pickled) - finally - try - writer.close() - finally - promise.success( - AsyncTastyState( + /**A holder for syncronization points and reports when writing TASTy asynchronously. + * The callbacks should only be called once. + */ + class AsyncTastyHolder private ( + val earlyOut: AbstractFile, incCallback: IncrementalCallback | Null)(using @constructorOnly ex: ExecutionContext): + import scala.concurrent.Future as StdFuture + import scala.concurrent.Await + import scala.concurrent.duration.Duration + import AsyncTastyHolder.Signal + + private val _cancel = AtomicBoolean(false) + + /**Cancel any outstanding work. + * This should be done at the end of a run, e.g. if there were errors that prevented reaching the backend. */ + def cancel(): Unit = + while + val cancelled = _cancel.get() + !cancelled && !_cancel.compareAndSet(false, true) + do () + if incCallback != null then + asyncTastyWritten.trySuccess(None) // cancel the wait for TASTy writing + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Cancelled) // cancel the wait for API completion + + /** check if the work has been cancelled. */ + def cancelled: Boolean = _cancel.get() + + private val asyncTastyWritten = Promise[Option[AsyncTastyHolder.State]]() + private val asyncAPIComplete = + if incCallback == null then Promise.successful(Signal.Done) // no need to wait for API completion + else Promise[Signal]() + + private val backendFuture: StdFuture[Option[BufferingReporter]] = + val asyncState = asyncTastyWritten.future + .zipWith(asyncAPIComplete.future)((state, api) => state.filterNot(_ => api == Signal.Cancelled)) + asyncState.map: optState => + optState.flatMap: state => + if incCallback != null && state.done && !state.hasErrors then + asyncZincPhasesCompleted(incCallback, state.pending) + else state.pending + + /** awaits the state of async TASTy operations indefinitely, returns optionally any buffered reports. */ + def sync(): Option[BufferingReporter] = + Await.result(backendFuture, Duration.Inf) + + def signalAPIComplete(): Unit = + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Done) + + /** should only be called once */ + def signalAsyncTastyWritten()(using ctx: ReadOnlyContext): Unit = + val done = !ctx.run.suspendedAtTyperPhase + if done then + try + // when we are done, i.e. no suspended units, + // we should close the file system so it can be read in the same JVM process. + // Note: we close even if we have been cancelled. + earlyOut match + case jar: JarArchive => jar.close() + case _ => + catch + case NonFatal(t) => + ctx.reporter.error(em"Error closing early output: ${t}") + + asyncTastyWritten.trySuccess: + Some( + AsyncTastyHolder.State( hasErrors = ctx.reporter.hasErrors, + done = done, pending = ( ctx.reporter match - case buffered: BufferingDelayedReporting => Some(buffered) - case _ => None + case buffered: BufferingReporter => Some(buffered) + case _: EagerReporter => None // already reported ) ) ) - end try - end try + end signalAsyncTastyWritten + end AsyncTastyHolder + + object AsyncTastyHolder: + /** The state after writing async tasty. Any errors should have been reported, or pending. + * if suspendedUnits is true, then we can't signal Zinc yet. + */ + private class State(val hasErrors: Boolean, val done: Boolean, val pending: Option[BufferingReporter]) + private enum Signal: + case Done, Cancelled + + /**Create a holder for Asynchronous state of early-TASTy operations. + * the `ExecutionContext` parameter is used to call into Zinc to signal + * that API and Dependency phases are complete. + */ + def init(using Context, ExecutionContext): AsyncTastyHolder = + AsyncTastyHolder(ctx.settings.YearlyTastyOutput.value, ctx.incCallback) + + + /** Asynchronously writes TASTy files to the destination -Yearly-tasty-output. + * If no units have been suspended, then we are "done", which enables Zinc to be signalled. + * + * If there are suspended units, (due to calling a macro defined in the same run), then the API is incomplete, + * so it would be a mistake to signal Zinc. This is a sensible default, because Zinc by default will ignore the + * signal if there are macros in the API. + * - See `sbt-test/pipelining/pipelining-scala-macro` for an example. + * + * TODO: The user can override this default behaviour in Zinc to always listen to the signal, + * (e.g. if they define the macro implementation in an upstream, non-pipelined project). + * - See `sbt-test/pipelining/pipelining-scala-macro-force` where we force Zinc to listen to the signal. + * If the user wants force early output to be written, then they probably also want to benefit from pipelining, + * which then makes suspension problematic as it increases compilation times. + * Proposal: perhaps we should provide a flag `-Ystrict-pipelining` (as an alternative to `-Yno-suspended-units`), + * which fails in the condition of definition of a macro where its implementation is in the same project. + * (regardless of if it is used); this is also more strict than preventing suspension at typer. + * The user is then certain that they are always benefitting as much as possible from pipelining. + */ + def writeSigFilesAsync( + tasks: List[(String, Array[Byte])], + writer: EarlyFileWriter, + async: AsyncTastyHolder)(using ctx: ReadOnlyContext): Unit = { + try + try + for (internalName, pickled) <- tasks do + if !async.cancelled then + val _ = writer.writeTasty(internalName, pickled) + catch + case NonFatal(t) => ctx.reporter.exception(em"writing TASTy to early output", t) + finally + writer.close() + catch + case NonFatal(t) => ctx.reporter.exception(em"closing early output writer", t) + finally + async.signalAsyncTastyWritten() } - class EarlyFileWriter private (writer: TastyWriter, origin: AbstractFile): - def this(dest: AbstractFile)(using @constructorOnly ctx: ReadOnlyContext) = this(TastyWriter(dest), dest) - - export writer.writeTasty + class EarlyFileWriter private (writer: TastyWriter): + def this(dest: AbstractFile)(using @constructorOnly ctx: ReadOnlyContext) = this(TastyWriter(dest)) - def close(): Unit = - writer.close() - origin match { - case jar: JarArchive => jar.close() // also close the file system - case _ => - } + export writer.{writeTasty, close} } /** This phase pickles trees */ class Pickler extends Phase { import ast.tpd.* - def doAsyncTasty(using Context): Boolean = ctx.asyncTastyPromise.isDefined + private def doAsyncTasty(using Context): Boolean = ctx.run.nn.asyncTasty.isDefined + + private var fastDoAsyncTasty: Boolean = false override def phaseName: String = Pickler.name override def description: String = Pickler.description - // No need to repickle trees coming from TASTY + // No need to repickle trees coming from TASTY, however in the case that we need to write TASTy to early-output, + // then we need to run this phase to send the tasty from compilation units to the early-output. override def isRunnable(using Context): Boolean = super.isRunnable && (!ctx.settings.fromTasty.value || doAsyncTasty) @@ -189,7 +268,7 @@ class Pickler extends Phase { // skip the rest of the phase, as tasty is already "pickled", // however we still need to set up tasks to write TASTy to // early output when pipelining is enabled. - if doAsyncTasty then + if fastDoAsyncTasty then runFromTasty(unit) return () @@ -226,7 +305,7 @@ class Pickler extends Phase { val positionWarnings = new mutable.ListBuffer[Message]() def reportPositionWarnings() = positionWarnings.foreach(report.warning(_)) - val internalName = if doAsyncTasty then computeInternalName(cls) else "" + val internalName = if fastDoAsyncTasty then computeInternalName(cls) else "" def computePickled(): Array[Byte] = inContext(ctx.fresh) { serialized.run { scratch => @@ -258,7 +337,7 @@ class Pickler extends Phase { println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) println(i"**** end of pickled info of $cls") - if doAsyncTasty then + if fastDoAsyncTasty then serialized.commit(internalName, pickled) pickled @@ -289,19 +368,21 @@ class Pickler extends Phase { } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - val isConcurrent = useExecutor + val useExecutor = this.useExecutor - val writeTask: Option[() => Unit] = ctx.asyncTastyPromise.map: holder => - () => - given ReadOnlyContext = if isConcurrent then ReadOnlyContext.buffered else ReadOnlyContext.eager - val writer = Pickler.EarlyFileWriter(holder.earlyOut) - writeSigFilesAsync(serialized.result(), writer, holder.promise) + val writeTask: Option[() => Unit] = + ctx.run.nn.asyncTasty.map: async => + fastDoAsyncTasty = true + () => + given ReadOnlyContext = if useExecutor then ReadOnlyContext.buffered else ReadOnlyContext.eager + val writer = Pickler.EarlyFileWriter(async.earlyOut) + writeSigFilesAsync(serialized.result(), writer, async) def runPhase(writeCB: (doWrite: () => Unit) => Unit) = super.runOn(units).tap(_ => writeTask.foreach(writeCB)) val result = - if isConcurrent then + if useExecutor then executor.start() try runPhase: doWrite => diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index d8c397a6166f..3d0ddccfd1f8 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -36,15 +36,21 @@ import dotty.tools.dotc.report import dotty.tools.backend.jvm.PostProcessorFrontendAccess.BackendReporting import scala.annotation.constructorOnly - -/** Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed */ +import java.util.concurrent.atomic.AtomicReference +import java.util.concurrent.atomic.AtomicBoolean + +/** !!!Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed. + * this should probably be changed to wrap that class instead. + * + * Until then, any changes to this file should be copied to `dotty.tools.backend.jvm.ClassfileWriters` as well. + */ object FileWriters { type InternalName = String type NullableFile = AbstractFile | Null inline def ctx(using ReadOnlyContext): ReadOnlyContext = summon[ReadOnlyContext] - sealed trait DelayedReporting { + sealed trait DelayedReporter { def hasErrors: Boolean def error(message: Context ?=> Message, position: SourcePosition): Unit def warning(message: Context ?=> Message, position: SourcePosition): Unit @@ -52,9 +58,14 @@ object FileWriters { def error(message: Context ?=> Message): Unit = error(message, NoSourcePosition) def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) + final def exception(reason: Context ?=> Message, throwable: Throwable): Unit = + error({ + val trace = throwable.getStackTrace().nn.mkString("\n ") + em"An unhandled exception was thrown in the compiler while\n ${reason.message}.\n${throwable}\n $trace" + }, NoSourcePosition) } - final class EagerDelayedReporting(using captured: Context) extends DelayedReporting: + final class EagerReporter(using captured: Context) extends DelayedReporter: private var _hasErrors = false def hasErrors: Boolean = _hasErrors @@ -68,62 +79,96 @@ object FileWriters { def log(message: String): Unit = report.echo(message) - final class BufferingDelayedReporting extends DelayedReporting { + final class BufferingReporter extends DelayedReporter { // We optimise access to the buffered reports for the common case - that there are no warning/errors to report // We could use a listBuffer etc - but that would be extra allocation in the common case - // Note - all access is externally synchronized, as this allow the reports to be generated in on thread and - // consumed in another - private var bufferedReports = List.empty[Report] - private var _hasErrors = false + // buffered logs are updated atomically. + + private val _bufferedReports = AtomicReference(List.empty[Report]) + private val _hasErrors = AtomicBoolean(false) + enum Report(val relay: Context ?=> BackendReporting => Unit): case Error(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.error(message(ctx), position)) case Warning(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.warning(message(ctx), position)) case Log(message: String) extends Report(_.log(message)) - def hasErrors: Boolean = synchronized: - _hasErrors + /** Atomically record that an error occurred */ + private def recordError(): Unit = + while + val old = _hasErrors.get + !old && !_hasErrors.compareAndSet(old, true) + do () + + /** Atomically add a report to the log */ + private def recordReport(report: Report): Unit = + while + val old = _bufferedReports.get + !_bufferedReports.compareAndSet(old, report :: old) + do () + + /** atomically extract and clear the buffered reports */ + private def resetReports(): List[Report] = + while + val old = _bufferedReports.get + if _bufferedReports.compareAndSet(old, Nil) then + return old + else + true + do () + throw new AssertionError("Unreachable") + + def hasErrors: Boolean = _hasErrors.get() + def hasReports: Boolean = _bufferedReports.get().nonEmpty - def error(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: - bufferedReports ::= Report.Error({case given Context => message}, position) - _hasErrors = true + def error(message: Context ?=> Message, position: SourcePosition): Unit = + recordReport(Report.Error({case given Context => message}, position)) + recordError() - def warning(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: - bufferedReports ::= Report.Warning({case given Context => message}, position) + def warning(message: Context ?=> Message, position: SourcePosition): Unit = + recordReport(Report.Warning({case given Context => message}, position)) - def log(message: String): Unit = synchronized: - bufferedReports ::= Report.Log(message) + def log(message: String): Unit = + recordReport(Report.Log(message)) /** Should only be called from main compiler thread. */ - def relayReports(toReporting: BackendReporting)(using Context): Unit = synchronized: - if bufferedReports.nonEmpty then - bufferedReports.reverse.foreach(_.relay(toReporting)) - bufferedReports = Nil + def relayReports(toReporting: BackendReporting)(using Context): Unit = + val reports = resetReports() + if reports.nonEmpty then + reports.reverse.foreach(_.relay(toReporting)) } - trait ReadSettings: + trait ReadOnlySettings: def jarCompressionLevel: Int def debug: Boolean - trait ReadOnlyContext: + trait ReadOnlyRun: + def suspendedAtTyperPhase: Boolean - val settings: ReadSettings - val reporter: DelayedReporting + trait ReadOnlyContext: + val run: ReadOnlyRun + val settings: ReadOnlySettings + val reporter: DelayedReporter trait BufferedReadOnlyContext extends ReadOnlyContext: - val reporter: BufferingDelayedReporting + val reporter: BufferingReporter object ReadOnlyContext: - def readSettings(using ctx: Context): ReadSettings = new: + def readSettings(using ctx: Context): ReadOnlySettings = new: val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value val debug = ctx.settings.Ydebug.value + def readRun(using ctx: Context): ReadOnlyRun = new: + val suspendedAtTyperPhase = ctx.run.suspendedAtTyperPhase + def buffered(using Context): BufferedReadOnlyContext = new: val settings = readSettings - val reporter = BufferingDelayedReporting() + val reporter = BufferingReporter() + val run = readRun def eager(using Context): ReadOnlyContext = new: val settings = readSettings - val reporter = EagerDelayedReporting() + val reporter = EagerReporter() + val run = readRun /** * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the diff --git a/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..35b27f3d4662 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala @@ -0,0 +1,7 @@ +package a + +import scala.util.Success + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala b/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala new file mode 100644 index 000000000000..629f1c0e6cfe --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala @@ -0,0 +1,10 @@ +package a + +import org.junit.Test + +class Hello { + + @Test def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala b/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala new file mode 100644 index 000000000000..bbb4eb5ba7f7 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala @@ -0,0 +1,9 @@ +package b + +import a.A + +object Hello { + @main def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-cancel/build.sbt b/sbt-test/pipelining/pipelining-cancel/build.sbt new file mode 100644 index 000000000000..f23e65895c78 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/build.sbt @@ -0,0 +1,12 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ystop-after:pickler", // before ExtractAPI is reached, will cancel the pipeline output + ) + +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-cancel/test b/sbt-test/pipelining/pipelining-cancel/test new file mode 100644 index 000000000000..d84f55ca3c31 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/test @@ -0,0 +1,6 @@ +# - Test depending on a project where upstream runs short of reaching backend, +# and cancels pipelined tasty writing. +# - Because `a` finishes compile run before the sending the signal to Zinc +# that pipeline jar is written, sbt will continue to the downstream project anyway. +# - Downstream project `b` will fail as it can't find a.A from upstream. +-> b/compile diff --git a/sbt-test/pipelining/pipelining-scala-macro/build.sbt b/sbt-test/pipelining/pipelining-scala-macro/build.sbt index f8576cdae796..5f703bb0d815 100644 --- a/sbt-test/pipelining/pipelining-scala-macro/build.sbt +++ b/sbt-test/pipelining/pipelining-scala-macro/build.sbt @@ -6,14 +6,14 @@ ThisBuild / usePipelining := true // force a failure by always forcing early output. lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Ycheck:all", + // scalacOptions += "-Ycheck:all", scalacOptions += "-Xprint-suspension", Compile / incOptions := { val old = (Compile / incOptions).value val hooks = old.externalHooks val newHooks = hooks.withExternalLookup( new sbt.internal.inc.NoopExternalLookup { - @volatile var knownSuspension = false + @volatile var earlyOutputChecks = 0 def didFindMacros(analysis: xsbti.compile.CompileAnalysis) = { val foundMacros = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal.values.exists(_.hasMacro) @@ -23,20 +23,26 @@ lazy val a = project.in(file("a")) // force early output, this is safe because the macro class from `macros` will be available. override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + earlyOutputChecks += 1 + assert(earlyOutputChecks <= 2, "should only be called twice (apiPhaseCompleted, dependencyPhaseCompleted).") val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal val a_A = internalClasses.get("a.A") val a_ASuspendTyper = internalClasses.get("a.ASuspendTyper") val a_ASuspendInlining = internalClasses.get("a.ASuspendInlining") + + // both `a.A` and `a.ASuspendInlining` should be found in the analysis. + // even though `a.ASuspendInlining` suspends, it happens at inlining, so we should still + // record API for it in the first run. assert(a_A.isDefined, s"`a.A` wasn't found.") + assert(a_ASuspendInlining.isDefined, s"`a.ASuspendInlining` wasn't found.") - if (!knownSuspension) { - // this callback is called multiple times, so we only want to assert the first time, - // in subsequent runs the suspended definition will be "resumed", so a.ASuspendTyper be found. - knownSuspension = true - assert(a_ASuspendTyper.isEmpty, s"`a.ASuspendTyper` should have been suspended initially.") - } + // in run 1, `a.ASuspendTyper` would have suspended at typer, and not be present in Analysis. + // Therefore we wouldn't close the early output jar. + // Therefore, because it is present here, we waited to the second run to close the early output jar, + // at which point we recorded API for `a.ASuspendTyper`, and because we closed the early output jar, + // we send the signal to Zinc that the early output was written. + assert(a_ASuspendTyper.isDefined, s"`a.ASuspendTyper` wasn't found.") - assert(a_ASuspendInlining.isDefined, s"`a.ASuspendInlining` wasn't found.") // do what sbt does typically, // it will not force early output because macros are found From 7e1c4ca81b77cc38dd6a34b204ce2a1fba3734f8 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 15 Apr 2024 20:47:55 +0200 Subject: [PATCH 128/465] Fix isAliasType Symbols that had the TypeParam flag set were classified as alias types unless they also had the Deferred flag set. Maybe this did not break that much since Namer always added the Deferred for type parameters. But export forwarders use synthesized parameters which did not have Deferred set. --- compiler/src/dotty/tools/dotc/core/SymDenotations.scala | 3 ++- tests/neg/i20079/Lib_1.scala | 5 +++++ tests/neg/i20079/Test_2.scala | 6 ++++++ 3 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 tests/neg/i20079/Lib_1.scala create mode 100644 tests/neg/i20079/Test_2.scala diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index bfaaf78883ae..9c6aba1b30b8 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -689,7 +689,8 @@ object SymDenotations { final def isAbstractType(using Context): Boolean = this.is(DeferredType) /** Is this symbol an alias type? */ - final def isAliasType(using Context): Boolean = isAbstractOrAliasType && !this.is(Deferred) + final def isAliasType(using Context): Boolean = + isAbstractOrAliasType && !isAbstractOrParamType /** Is this symbol an abstract or alias type? */ final def isAbstractOrAliasType: Boolean = isType & !isClass diff --git a/tests/neg/i20079/Lib_1.scala b/tests/neg/i20079/Lib_1.scala new file mode 100644 index 000000000000..6d72042464ce --- /dev/null +++ b/tests/neg/i20079/Lib_1.scala @@ -0,0 +1,5 @@ +object Foo: + def xyz[A, CC[X] <: Iterable[X]](coll: CC[A]): Unit = () + +object Bar: + export Foo.xyz diff --git a/tests/neg/i20079/Test_2.scala b/tests/neg/i20079/Test_2.scala new file mode 100644 index 000000000000..c19d98b55bd8 --- /dev/null +++ b/tests/neg/i20079/Test_2.scala @@ -0,0 +1,6 @@ +object Test: + val ints = List(1) + Foo.xyz[Int, List](ints) + Foo.xyz[Int, scala.collection.View](ints) // error + Bar.xyz[Int, List](ints) + Bar.xyz[Int, scala.collection.View](ints) // error \ No newline at end of file From 413c667023fea75dbb84cac625375e0e554c3cea Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 15 Apr 2024 22:09:01 +0200 Subject: [PATCH 129/465] Use isAbstractOrParamType more There were some other occurrences of isAbstractType where it was not clear why type parameters should be excluded. Use isAbstractOrParamType as the new default. --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 2 +- compiler/src/dotty/tools/dotc/core/TypeErasure.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Deriving.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Synthesizer.scala | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 1c951a0c0846..8100f78e50e7 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -150,7 +150,7 @@ extension (tp: Type) case tp @ CapturingType(parent, refs) => val pcs = getBoxed(parent) if tp.isBoxed then refs ++ pcs else pcs - case tp: TypeRef if tp.symbol.isAbstractType => CaptureSet.empty + case tp: TypeRef if tp.symbol.isAbstractOrParamType => CaptureSet.empty case tp: TypeProxy => getBoxed(tp.superType) case tp: AndType => getBoxed(tp.tp1) ** getBoxed(tp.tp2) case tp: OrType => getBoxed(tp.tp1) ++ getBoxed(tp.tp2) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 0474aff4087a..b4d2934fb04b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -266,7 +266,7 @@ object TypeErasure { tp.paramNames, tp.paramNames map (Function.const(TypeBounds.upper(defn.ObjectType))), tp.resultType) if (defn.isPolymorphicAfterErasure(sym)) eraseParamBounds(sym.info.asInstanceOf[PolyType]) - else if (sym.isAbstractType) TypeAlias(WildcardType) + else if (sym.isAbstractOrParamType) TypeAlias(WildcardType) else if sym.is(ConstructorProxy) then NoType else if (sym.isConstructor) outer.addParam(sym.owner.asClass, erase(tp)(using preErasureCtx)) else if (sym.is(Label)) erase.eraseResult(sym.info)(using preErasureCtx) diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index f3be1dcff766..619dfcf4d7cb 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -31,7 +31,7 @@ trait Deriving { /** A version of Type#underlyingClassRef that works also for higher-kinded types */ private def underlyingClassRef(tp: Type): Type = tp match { case tp: TypeRef if tp.symbol.isClass => tp - case tp: TypeRef if tp.symbol.isAbstractType => NoType + case tp: TypeRef if tp.symbol.isAbstractOrParamType => NoType case tp: TermRef => NoType case tp: TypeProxy => underlyingClassRef(tp.superType) case _ => NoType diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index d244af12dd91..192fdd404aba 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -671,7 +671,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): def canManifest(tp: Manifestable, topLevel: Boolean) = val sym = tp.typeSymbol - !sym.isAbstractType + !sym.isAbstractOrParamType && hasStableErasure(tp) && !(topLevel && defn.isBottomClassAfterErasure(sym)) From a78214865a5613da682d1d517f6ec3b412b05861 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 16 Apr 2024 11:47:58 +0200 Subject: [PATCH 130/465] Avoid adding redundant Deferred flag to type parameters in Namer --- compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Namer.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 138cda099040..50825907be43 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -564,7 +564,7 @@ private class ExtractAPICollector(nonLocalClassSymbols: mutable.HashSet[Symbol]) if (sym.isAliasType) api.TypeAlias.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.hi)) else { - assert(sym.isAbstractType) + assert(sym.isAbstractOrParamType) api.TypeDeclaration.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.lo), apiType(tpe.bounds.hi)) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 1016fe467a0a..db015846391c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -274,7 +274,7 @@ class Namer { typer: Typer => analyzeRHS(body) case _ => if rhs.isEmpty || flags.is(Opaque) then flags |= Deferred - analyzeRHS(tree.rhs) + if flags.is(Param) then tree.rhs else analyzeRHS(tree.rhs) // to complete a constructor, move one context further out -- this // is the context enclosing the class. Note that the context in which a From aaacc439c6797e4d04004c9e34147b06d03e9afa Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 16 Apr 2024 13:21:09 +0200 Subject: [PATCH 131/465] Drop isAbstractType Always use isAbstractorParamType. quotes.reflect still uses isAbstractType for backwards compatibility, but it now also includes type parameters. This was the case anyway before for type parameters set up by Namer. --- compiler/src/dotty/tools/dotc/core/SymDenotations.scala | 3 --- compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala | 2 +- library/src/scala/quoted/Quotes.scala | 2 +- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 9c6aba1b30b8..73190991a2c1 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -685,9 +685,6 @@ object SymDenotations { def isWrappedToplevelDef(using Context): Boolean = !isConstructor && owner.isPackageObject - /** Is this symbol an abstract type? */ - final def isAbstractType(using Context): Boolean = this.is(DeferredType) - /** Is this symbol an alias type? */ final def isAliasType(using Context): Boolean = isAbstractOrAliasType && !isAbstractOrParamType diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 47ba9833fc2f..ed66ecc2d498 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2700,7 +2700,7 @@ object Types extends TypeUtils { symd.maybeOwner.membersNeedAsSeenFrom(prefix) && !symd.is(NonMember) || prefix.match case prefix: Types.ThisType => - (symd.isAbstractType + (symd.isAbstractOrParamType || symd.isTerm && !symd.flagsUNSAFE.isOneOf(Module | Final | Param) && !symd.isConstructor diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 81fadb6baa89..f98147cf5052 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2687,7 +2687,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def isAliasType: Boolean = self.denot.isAliasType def isAnonymousClass: Boolean = self.denot.isAnonymousClass def isAnonymousFunction: Boolean = self.denot.isAnonymousFunction - def isAbstractType: Boolean = self.denot.isAbstractType + def isAbstractType: Boolean = self.denot.isAbstractOrParamType def isClassConstructor: Boolean = self.denot.isClassConstructor def isSuperAccessor = self.name.is(dotc.core.NameKinds.SuperAccessorName) def isType: Boolean = self.isType diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index b49763c38221..1bb8405662ac 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -4025,7 +4025,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is this symbol an anonymous function? */ def isAnonymousFunction: Boolean - /** Is this symbol an abstract type? */ + /** Is this symbol an abstract type or a type parameter? */ def isAbstractType: Boolean /** Is this the constructor of a class? */ From 9d88c800ba518b184bb5f63259a782532d1abf96 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 16 Apr 2024 13:21:41 +0200 Subject: [PATCH 132/465] Drop redundant `butNot = Param` clause in isAnchor --- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5ffc81744d85..f4545db81e9a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -621,7 +621,7 @@ trait ImplicitRunInfo: private def isAnchor(sym: Symbol) = sym.isClass && !isExcluded(sym) || sym.isOpaqueAlias - || sym.is(Deferred, butNot = Param) + || sym.is(Deferred) || sym.info.isMatchAlias private def computeIScope(rootTp: Type): OfTypeImplicits = From 4bfc43ffa5bbca345ffe41b87bb91f0caf5158ba Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 16 Apr 2024 18:10:29 +0200 Subject: [PATCH 133/465] adjust cancellation --- .../src/dotty/tools/dotc/sbt/package.scala | 4 +-- .../dotty/tools/dotc/transform/Pickler.scala | 27 +++++++---------- compiler/src/dotty/tools/io/FileWriters.scala | 30 ++++++++----------- 3 files changed, 26 insertions(+), 35 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/package.scala b/compiler/src/dotty/tools/dotc/sbt/package.scala index 12e7f6eceac7..1c6b38b07a84 100644 --- a/compiler/src/dotty/tools/dotc/sbt/package.scala +++ b/compiler/src/dotty/tools/dotc/sbt/package.scala @@ -17,7 +17,7 @@ inline val TermNameHash = 1987 // 300th prime inline val TypeNameHash = 1993 // 301st prime inline val InlineParamHash = 1997 // 302nd prime -def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): Option[BufferingReporter] = +def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): BufferingReporter = val zincReporter = pending match case Some(buffered) => buffered case None => BufferingReporter() @@ -27,7 +27,7 @@ def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingR catch case NonFatal(t) => zincReporter.exception(em"signaling API and Dependencies phases completion", t) - if zincReporter.hasErrors then Some(zincReporter) else None + zincReporter extension (sym: Symbol) diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index b7a7d874db3f..6fe687072828 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -53,22 +53,21 @@ object Pickler { import scala.concurrent.duration.Duration import AsyncTastyHolder.Signal - private val _cancel = AtomicBoolean(false) + private val _cancelled = AtomicBoolean(false) /**Cancel any outstanding work. - * This should be done at the end of a run, e.g. if there were errors that prevented reaching the backend. */ + * This should be done at the end of a run, e.g. background work may be running even though + * errors in main thread will prevent reaching the backend. */ def cancel(): Unit = - while - val cancelled = _cancel.get() - !cancelled && !_cancel.compareAndSet(false, true) - do () - if incCallback != null then + if _cancelled.compareAndSet(false, true) then asyncTastyWritten.trySuccess(None) // cancel the wait for TASTy writing - if incCallback != null then - asyncAPIComplete.trySuccess(Signal.Cancelled) // cancel the wait for API completion + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Cancelled) // cancel the wait for API completion + else + () // nothing else to do /** check if the work has been cancelled. */ - def cancelled: Boolean = _cancel.get() + def cancelled: Boolean = _cancelled.get() private val asyncTastyWritten = Promise[Option[AsyncTastyHolder.State]]() private val asyncAPIComplete = @@ -81,7 +80,7 @@ object Pickler { asyncState.map: optState => optState.flatMap: state => if incCallback != null && state.done && !state.hasErrors then - asyncZincPhasesCompleted(incCallback, state.pending) + asyncZincPhasesCompleted(incCallback, state.pending).toBuffered else state.pending /** awaits the state of async TASTy operations indefinitely, returns optionally any buffered reports. */ @@ -112,11 +111,7 @@ object Pickler { AsyncTastyHolder.State( hasErrors = ctx.reporter.hasErrors, done = done, - pending = ( - ctx.reporter match - case buffered: BufferingReporter => Some(buffered) - case _: EagerReporter => None // already reported - ) + pending = ctx.reporter.toBuffered ) ) end signalAsyncTastyWritten diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index 3d0ddccfd1f8..87825b025734 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -38,6 +38,7 @@ import dotty.tools.backend.jvm.PostProcessorFrontendAccess.BackendReporting import scala.annotation.constructorOnly import java.util.concurrent.atomic.AtomicReference import java.util.concurrent.atomic.AtomicBoolean +import java.util.ConcurrentModificationException /** !!!Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed. * this should probably be changed to wrap that class instead. @@ -56,6 +57,11 @@ object FileWriters { def warning(message: Context ?=> Message, position: SourcePosition): Unit def log(message: String): Unit + final def toBuffered: Option[BufferingReporter] = this match + case buffered: BufferingReporter => + if buffered.hasReports then Some(buffered) else None + case _: EagerReporter => None + def error(message: Context ?=> Message): Unit = error(message, NoSourcePosition) def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) final def exception(reason: Context ?=> Message, throwable: Throwable): Unit = @@ -94,28 +100,18 @@ object FileWriters { /** Atomically record that an error occurred */ private def recordError(): Unit = - while - val old = _hasErrors.get - !old && !_hasErrors.compareAndSet(old, true) - do () + _hasErrors.set(true) /** Atomically add a report to the log */ private def recordReport(report: Report): Unit = - while - val old = _bufferedReports.get - !_bufferedReports.compareAndSet(old, report :: old) - do () + _bufferedReports.getAndUpdate(report :: _) - /** atomically extract and clear the buffered reports */ + /** atomically extract and clear the buffered reports, must only be called at a synchonization point. */ private def resetReports(): List[Report] = - while - val old = _bufferedReports.get - if _bufferedReports.compareAndSet(old, Nil) then - return old - else - true - do () - throw new AssertionError("Unreachable") + val curr = _bufferedReports.get() + if curr.nonEmpty && !_bufferedReports.compareAndSet(curr, Nil) then + throw ConcurrentModificationException("concurrent modification of buffered reports") + else curr def hasErrors: Boolean = _hasErrors.get() def hasReports: Boolean = _bufferedReports.get().nonEmpty From f0289a3be0533a7004146f39d6b70b79c1132ced Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 16 Apr 2024 18:25:29 +0200 Subject: [PATCH 134/465] Handle reach capabilities correctly when computing whether a function is dependent --- .../src/dotty/tools/dotc/core/Types.scala | 2 +- .../pos-custom-args/captures/dep-reach.scala | 21 +++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 tests/pos-custom-args/captures/dep-reach.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 47ba9833fc2f..7fb62583d9ef 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4054,7 +4054,7 @@ object Types extends TypeUtils { tp match case CapturingType(parent, refs) => (compute(status, parent, theAcc) /: refs.elems) { - (s, ref) => ref match + (s, ref) => ref.stripReach match case tp: TermParamRef if tp.binder eq thisLambdaType => combine(s, CaptureDeps) case _ => s } diff --git a/tests/pos-custom-args/captures/dep-reach.scala b/tests/pos-custom-args/captures/dep-reach.scala new file mode 100644 index 000000000000..56343fbf8e53 --- /dev/null +++ b/tests/pos-custom-args/captures/dep-reach.scala @@ -0,0 +1,21 @@ +object Test: + class C + type Proc = () => Unit + + def f(c: C^, d: C^): () ->{c, d} Unit = + def foo(xs: Proc*): () ->{xs*} Unit = + xs.head + val a: () ->{c} Unit = () => () + val b: () ->{d} Unit = () => () + val xx = foo(a, b) + xx + + def g(c: C^, d: C^): () ->{c, d} Unit = + + def foo(xs: Seq[() => Unit]): () ->{xs*} Unit = + xs.head + + val a: () ->{c} Unit = () => () + val b: () ->{d} Unit = () => () + val xx = foo(Seq(a, b)) + xx From ef03fc071424ff908b0299977e548bbf7049a050 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 15 Apr 2024 13:08:23 +0200 Subject: [PATCH 135/465] Use `MirrorSource.reduce` result for `companionPath` Fixes #20187 --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 20 ------------------- .../src/dotty/tools/dotc/core/TypeUtils.scala | 15 -------------- .../dotty/tools/dotc/typer/Synthesizer.scala | 15 +++++++------- tests/pos/i20187/A_1.scala | 19 ++++++++++++++++++ tests/pos/i20187/B_2.scala | 7 +++++++ 5 files changed, 34 insertions(+), 42 deletions(-) create mode 100644 tests/pos/i20187/A_1.scala create mode 100644 tests/pos/i20187/B_2.scala diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 71b85d97a187..13abfae0166c 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -471,26 +471,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case ConstantType(value) => Literal(value) } - /** A path that corresponds to the given type `tp`. Error if `tp` is not a refinement - * of an addressable singleton type. - */ - def pathFor(tp: Type)(using Context): Tree = { - def recur(tp: Type): Tree = tp match { - case tp: NamedType => - tp.info match { - case TypeAlias(alias) => recur(alias) - case _: TypeBounds => EmptyTree - case _ => singleton(tp) - } - case tp: TypeProxy => recur(tp.superType) - case _ => EmptyTree - } - recur(tp).orElse { - report.error(em"$tp is not an addressable singleton type") - TypeTree(tp) - } - } - /** A tree representing a `newXYZArray` operation of the right * kind for the given element type in `elemTpe`. No type arguments or * `length` arguments are given. diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index c76b5117dc89..4f0162a0fa96 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -116,21 +116,6 @@ class TypeUtils { def refinedWith(name: Name, info: Type)(using Context) = RefinedType(self, name, info) - /** The TermRef referring to the companion of the underlying class reference - * of this type, while keeping the same prefix. - */ - def mirrorCompanionRef(using Context): TermRef = self match { - case AndType(tp1, tp2) => - val c1 = tp1.classSymbol - val c2 = tp2.classSymbol - if c1.isSubClass(c2) then tp1.mirrorCompanionRef - else tp2.mirrorCompanionRef // precondition: the parts of the AndType have already been checked to be non-overlapping - case self @ TypeRef(prefix, _) if self.symbol.isClass => - prefix.select(self.symbol.companionModule).asInstanceOf[TermRef] - case self: TypeProxy => - self.superType.mirrorCompanionRef - } - /** Is this type a methodic type that takes at least one parameter? */ def takesParams(using Context): Boolean = self.stripPoly match case mt: MethodType => mt.paramNames.nonEmpty || mt.resType.takesParams diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index d244af12dd91..854bd52b4c72 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -15,6 +15,7 @@ import transform.SyntheticMembers.* import util.Property import ast.Trees.genericEmptyTree import annotation.{tailrec, constructorOnly} +import ast.tpd import ast.tpd.* import Synthesizer.* import sbt.ExtractDependencies.* @@ -265,10 +266,10 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): .refinedWith(tpnme.MirroredType, TypeAlias(mirroredType)) .refinedWith(tpnme.MirroredLabel, TypeAlias(ConstantType(Constant(label.toString)))) - /** A path referencing the companion of class type `clsType` */ - private def companionPath(clsType: Type, span: Span)(using Context) = - val ref = pathFor(clsType.mirrorCompanionRef) - assert(ref.symbol.is(Module) && (clsType.classSymbol.is(ModuleClass) || (ref.symbol.companionClass == clsType.classSymbol))) + /** A path referencing the companion of `cls` with prefix `pre` */ + private def companionPath(pre: Type, cls: Symbol, span: Span)(using Context) = + val ref = tpd.ref(TermRef(pre, cls.companionModule)) + assert(ref.symbol.is(Module) && (cls.is(ModuleClass) || ref.symbol.companionClass == cls)) ref.withSpan(span) private def checkFormal(formal: Type)(using Context): Boolean = @@ -428,7 +429,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): .refinedWith(tpnme.MirroredElemLabels, TypeAlias(elemsLabels)) } val mirrorRef = - if cls.useCompanionAsProductMirror then companionPath(mirroredType, span) + if cls.useCompanionAsProductMirror then companionPath(pre, cls, span) else if defn.isTupleClass(cls) then newTupleMirror(typeElems.size) // TODO: cls == defn.PairClass when > 22 else anonymousMirror(monoType, MirrorImpl.OfProduct(pre), span) withNoErrors(mirrorRef.cast(mirrorType).withSpan(span)) @@ -438,7 +439,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case Right(msrc) => msrc match case MirrorSource.Singleton(_, tref) => val singleton = tref.termSymbol // prefer alias name over the orignal name - val singletonPath = pathFor(tref).withSpan(span) + val singletonPath = tpd.singleton(tref).withSpan(span) if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object. val mirrorType = formal.constrained_& { mirrorCore(defn.Mirror_SingletonProxyClass, mirroredType, mirroredType, singleton.name) @@ -571,7 +572,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): .refinedWith(tpnme.MirroredElemLabels, TypeAlias(labels)) } val mirrorRef = - if cls.useCompanionAsSumMirror then companionPath(mirroredType, span) + if cls.useCompanionAsSumMirror then companionPath(pre, cls, span) else anonymousMirror(monoType, MirrorImpl.OfSum(childPres), span) withNoErrors(mirrorRef.cast(mirrorType)) else if acceptableMsg.nonEmpty then diff --git a/tests/pos/i20187/A_1.scala b/tests/pos/i20187/A_1.scala new file mode 100644 index 000000000000..32dbae995219 --- /dev/null +++ b/tests/pos/i20187/A_1.scala @@ -0,0 +1,19 @@ +import scala.deriving.Mirror + +enum E: + case Foo1() + case Foo2() + +class Outer: + case class Inner() +val o = new Outer + +type F = E.Foo1 +type G = Tuple.Head[E.Foo1 *: E.Foo2 *: EmptyTuple] +type H = Tuple.Head[o.Inner *: EmptyTuple] +type I = Tuple.Last[E *: EmptyTuple] + +def local = + case class Bar() + type B = Tuple.Head[Bar *: EmptyTuple] + summon[Mirror.Of[B]] diff --git a/tests/pos/i20187/B_2.scala b/tests/pos/i20187/B_2.scala new file mode 100644 index 000000000000..99cfc7ba9b91 --- /dev/null +++ b/tests/pos/i20187/B_2.scala @@ -0,0 +1,7 @@ +import scala.deriving.Mirror + +def Test = + summon[Mirror.Of[F]] // ok + summon[Mirror.Of[G]] // was crash + summon[Mirror.Of[H]] // was crash + summon[Mirror.Of[I]] // was crash From 2ba368df4c6bd3fe237ab912fe1e57c802711eba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Wed, 17 Apr 2024 13:11:08 +0200 Subject: [PATCH 136/465] Interactive: handle context bounds in extension construct workaround (#20201) Fixes https://github.com/scala/scala3/issues/19971 --- .../tools/dotc/interactive/Completion.scala | 7 +++- .../pc/tests/completion/CompletionSuite.scala | 39 +++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 2ff8ad1c6535..f2b63cbec8d5 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -186,7 +186,12 @@ object Completion: )(using Context): List[tpd.Tree] = untpdPath.collectFirst: case untpd.ExtMethods(paramss, _) => - val enclosingParam = paramss.flatten.find(_.span.contains(pos.span)) + val enclosingParam = paramss.flatten + .find(_.span.contains(pos.span)) + .flatMap: + case untpd.TypeDef(_, bounds: untpd.ContextBounds) => bounds.cxBounds.find(_.span.contains(pos.span)) + case other => Some(other) + enclosingParam.map: param => ctx.typer.index(paramss.flatten) val typedEnclosingParam = ctx.typer.typed(param) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 10a57f705ceb..f4f659db1541 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -1838,3 +1838,42 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin, topLines = Some(3) ) + + @Test def `context-bound-in-extension-construct` = + check( + """ + |object x { + | extension [T: Orde@@] + |} + |""".stripMargin, + """Ordered[T] scala.math + |Ordering[T] scala.math + |""".stripMargin, + topLines = Some(2) + ) + + @Test def `context-bounds-in-extension-construct` = + check( + """ + |object x { + | extension [T: Ordering: Orde@@] + |} + |""".stripMargin, + """Ordered[T] scala.math + |Ordering[T] scala.math + |""".stripMargin, + topLines = Some(2) + ) + + @Test def `type-bound-in-extension-construct` = + check( + """ + |object x { + | extension [T <: Orde@@] + |} + |""".stripMargin, + """Ordered[T] scala.math + |Ordering[T] scala.math + |""".stripMargin, + topLines = Some(2) + ) From be27e79188eeeaa071d7e611e915f7fc09391293 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Wed, 17 Apr 2024 13:19:09 +0200 Subject: [PATCH 137/465] Add match-type-printf test --- tests/pos/match-type-printf.scala | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 tests/pos/match-type-printf.scala diff --git a/tests/pos/match-type-printf.scala b/tests/pos/match-type-printf.scala new file mode 100644 index 000000000000..cf1ff043e310 --- /dev/null +++ b/tests/pos/match-type-printf.scala @@ -0,0 +1,19 @@ +import scala.compiletime.ops.int.+ +import scala.compiletime.ops.string.{CharAt, Length, Substring} +import scala.Tuple.* + +type ArgTypes[S <: String] <: Tuple = S match + case "" => EmptyTuple + case _ => + CharAt[S, 0] match + case '%' => + CharAt[S, 1] match + case 'd' => Int *: ArgTypes[Substring[S, 2, Length[S]]] + case 's' => String *: ArgTypes[Substring[S, 2, Length[S]]] + case _ => ArgTypes[Substring[S, 1, Length[S]]] + +def printf(s: String)(t: ArgTypes[s.type]): Unit = () + +def test() = + printf("%s is %d")(("Ada", 36)) // works in Scala 3.2.0, 3.3.0 and 3.4.0 + printf("%s is lorem %d")(("Ada", 36)) // works in Scala 3.4.0 but fails in Scala 3.2.0 and 3.3.0 From e673dd28cab7591d1fce8067b291cfe94fce0bc2 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Wed, 17 Apr 2024 16:41:05 +0200 Subject: [PATCH 138/465] Allow erased and non-erased contextual parameters in methods --- .../dotc/transform/ContextFunctionResults.scala | 5 ++--- tests/pos/i20206.scala | 15 +++++++++++++++ 2 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 tests/pos/i20206.scala diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala index 80115ca651bb..c31b2673e04a 100644 --- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala +++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala @@ -4,6 +4,7 @@ package transform import core.* import Contexts.*, Symbols.*, Types.*, Annotations.*, Constants.*, Phases.* +import Decorators.* import StdNames.nme import ast.untpd import ast.tpd.* @@ -115,11 +116,9 @@ object ContextFunctionResults: else tree match case Select(qual, name) => if name == nme.apply then - qual.tpe.nn.dealias match + qual.tpe.nn.widenDealias match case defn.FunctionTypeOfMethod(mt) if mt.isContextualMethod => integrateSelect(qual, n + 1) - case _ if defn.isContextFunctionClass(tree.symbol.maybeOwner) => // for TermRefs - integrateSelect(qual, n + 1) case _ => n > 0 && contextResultCount(tree.symbol) >= n else diff --git a/tests/pos/i20206.scala b/tests/pos/i20206.scala new file mode 100644 index 000000000000..690142140d23 --- /dev/null +++ b/tests/pos/i20206.scala @@ -0,0 +1,15 @@ +//> using options -experimental -Yno-experimental + +import language.experimental.erasedDefinitions + +erased trait A +trait B + +def foo1: A ?=> B ?=> Nothing = ??? +def foo2: (A, B) ?=> Nothing = ??? +def foo3: (B, A) ?=> Nothing = ??? + +def bar: (A, B) ?=> Nothing = + foo1 + foo2 + foo3 From 482bec85dd550a0e99a32ebf8021abc3d9e399d8 Mon Sep 17 00:00:00 2001 From: Yoonjae Jeon Date: Thu, 18 Apr 2024 02:46:51 +0900 Subject: [PATCH 139/465] Use val for classCtx in superOrThisCallContext --- compiler/src/dotty/tools/dotc/core/Contexts.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index ab6fda68a09e..6824fdb314dc 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -437,7 +437,7 @@ object Contexts { /** The super- or this-call context with given owner and locals. */ private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = { - var classCtx = outersIterator.dropWhile(!_.isClassDefContext).next() + val classCtx = outersIterator.dropWhile(!_.isClassDefContext).next() classCtx.outer.fresh.setOwner(owner) .setScope(locals) .setMode(classCtx.mode) From d1931aae1b87fbe87e429e6a08b0cb2a07ccb8f6 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 18 Apr 2024 02:03:54 +0200 Subject: [PATCH 140/465] Cache `myReduced = NoType` if match type reduction overflowed --- compiler/src/dotty/tools/dotc/core/Types.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index e5cdd3b0613d..5a4618edb3a3 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5173,17 +5173,17 @@ object Types extends TypeUtils { trace(i"reduce match type $this $hashCode", matchTypes, show = true): withMode(Mode.Type): setReductionContext() - def matchCases(cmp: MatchReducer): Type = + TypeComparer.reduceMatchWith: cmp => val saved = ctx.typerState.snapshot() try - cmp.matchCases(scrutinee.normalized, cases.map(MatchTypeCaseSpec.analyze(_))) + cmp.matchCases(scrutinee.normalized, cases.map(MatchTypeCaseSpec.analyze)) catch case ex: Throwable => + myReduced = NoType handleRecursive("reduce type ", i"$scrutinee match ...", ex) finally ctx.typerState.resetTo(saved) // this drops caseLambdas in constraint and undoes any typevar // instantiations during matchtype reduction - TypeComparer.reduceMatchWith(matchCases) //else println(i"no change for $this $hashCode / $myReduced") myReduced.nn From d7b69a36793d86eb831e36b5dfd87ff1eff4e543 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 18 Apr 2024 02:06:26 +0200 Subject: [PATCH 141/465] Only set match type reduction context if not up to date --- compiler/src/dotty/tools/dotc/core/Types.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 5a4618edb3a3..416dcef733ee 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5124,6 +5124,7 @@ object Types extends TypeUtils { } def isUpToDate: Boolean = + (reductionContext ne null) && reductionContext.keysIterator.forall: tp => reductionContext(tp) `eq` contextInfo(tp) @@ -5169,10 +5170,10 @@ object Types extends TypeUtils { then record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") + if !isUpToDate then setReductionContext() myReduced = trace(i"reduce match type $this $hashCode", matchTypes, show = true): withMode(Mode.Type): - setReductionContext() TypeComparer.reduceMatchWith: cmp => val saved = ctx.typerState.snapshot() try From 4a4ad17804865831855c988c86e4705f9daaee52 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 18 Apr 2024 02:15:13 +0200 Subject: [PATCH 142/465] Restructure `MatchType#reduced` try catch --- .../src/dotty/tools/dotc/core/Types.scala | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 416dcef733ee..2646b8eb76c3 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5171,20 +5171,19 @@ object Types extends TypeUtils { record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") if !isUpToDate then setReductionContext() - myReduced = - trace(i"reduce match type $this $hashCode", matchTypes, show = true): + val saved = ctx.typerState.snapshot() + try + myReduced = trace(i"reduce match type $this $hashCode", matchTypes, show = true): withMode(Mode.Type): TypeComparer.reduceMatchWith: cmp => - val saved = ctx.typerState.snapshot() - try - cmp.matchCases(scrutinee.normalized, cases.map(MatchTypeCaseSpec.analyze)) - catch case ex: Throwable => - myReduced = NoType - handleRecursive("reduce type ", i"$scrutinee match ...", ex) - finally - ctx.typerState.resetTo(saved) - // this drops caseLambdas in constraint and undoes any typevar - // instantiations during matchtype reduction + cmp.matchCases(scrutinee.normalized, cases.map(MatchTypeCaseSpec.analyze)) + catch case ex: Throwable => + myReduced = NoType + handleRecursive("reduce type ", i"$scrutinee match ...", ex) + finally + ctx.typerState.resetTo(saved) + // this drops caseLambdas in constraint and undoes any typevar + // instantiations during matchtype reduction //else println(i"no change for $this $hashCode / $myReduced") myReduced.nn From 95a8a9c03c95b189210578c43dd8485e361358c0 Mon Sep 17 00:00:00 2001 From: Jan Chyb <48855024+jchyb@users.noreply.github.com> Date: Thu, 18 Apr 2024 11:18:46 +0200 Subject: [PATCH 143/465] Introduce Best Effort compilation options (#17582) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR introduces 2 new experimental options: `-Ybest-effort` and `-Ywith-best-effort-tasty`. It also introduces the Best Effort TASTy format (BETASTy), a TASTy aligned file format able to hold some errored trees. Behaviour of the options and the format is documented as part of this PR in the `best-effort-compilation.md` docs file. `-Ybest-effort` allows to create .betasty files in a failing compilation, while `-Ywith-best-effort-tasty` allows to read them in subsequent compilations, so that we can get information needed for IDEs from broken modules, and modules depending on them. It is worth noting that these compilation depending on Betty will never reach transform phases, and will never produce any artifacts other then betasty and semanticdb. My overall aim was to be able to handle as many cases, with little as little maintainance necessary as possible. This is for example why pretty much the only required phases are Parser and Typer - they are enough for, as far as I know, all necessary metals completions and I did not find any issues with setting their products (best effort tasty files) as dependencies. Requiring, for example, PostTyper, would require for the errored trees to be able to somehow pass through that phase, meaning a large investment from me into working the existing known error cases through there (muddling the codebase in the process) and possibly from the maintainers working on both Typer (to be able to produce „correct” error trees) and PostTyper (to be able to consume „correct” errored trees), which would obviously make the entire initiative dreadful. This is also why any tests are able to be put into a blacklist file, in case something changes and a neg test will not pass, or a new test will be added as part of issue fix that does not play well with the best-effort features. --- .../dotty/tools/backend/jvm/GenBCode.scala | 2 + .../dotty/tools/backend/sjs/GenSJSIR.scala | 2 +- compiler/src/dotty/tools/dotc/Driver.scala | 7 +- compiler/src/dotty/tools/dotc/Run.scala | 10 +- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 4 +- compiler/src/dotty/tools/dotc/ast/tpd.scala | 4 +- .../dotc/classpath/DirectoryClassPath.scala | 2 +- .../tools/dotc/classpath/FileUtils.scala | 8 + .../tools/dotc/config/ScalaSettings.scala | 3 + .../src/dotty/tools/dotc/core/Contexts.scala | 18 ++ .../tools/dotc/core/DenotTransformers.scala | 2 + .../dotty/tools/dotc/core/Denotations.scala | 3 +- .../tools/dotc/core/SymDenotations.scala | 18 +- .../dotty/tools/dotc/core/SymbolLoaders.scala | 42 ++- .../dotty/tools/dotc/core/TypeErasure.scala | 7 +- .../src/dotty/tools/dotc/core/Types.scala | 8 +- .../core/tasty/BestEffortTastyWriter.scala | 43 +++ .../dotc/core/tasty/DottyUnpickler.scala | 31 ++- .../dotc/core/tasty/TastyAnsiiPrinter.scala | 4 +- .../dotc/core/tasty/TastyClassName.scala | 4 +- .../dotc/core/tasty/TastyHTMLPrinter.scala | 2 +- .../tools/dotc/core/tasty/TastyPickler.scala | 9 +- .../tools/dotc/core/tasty/TastyPrinter.scala | 29 +- .../dotc/core/tasty/TastyUnpickler.scala | 27 +- .../tools/dotc/core/tasty/TreePickler.scala | 190 ++++++++----- .../tools/dotc/core/tasty/TreeUnpickler.scala | 19 +- .../dotty/tools/dotc/fromtasty/TASTYRun.scala | 2 + .../tools/dotc/fromtasty/TastyFileUtil.scala | 14 +- .../dotty/tools/dotc/inlines/Inliner.scala | 2 +- .../tools/dotc/quoted/PickledQuotes.scala | 8 +- compiler/src/dotty/tools/dotc/report.scala | 16 ++ .../dotc/semanticdb/ExtractSemanticDB.scala | 2 +- .../tools/dotc/transform/MacroTransform.scala | 2 + .../tools/dotc/transform/MegaPhase.scala | 4 + .../dotty/tools/dotc/transform/Pickler.scala | 74 ++++-- .../src/dotty/tools/dotc/typer/Typer.scala | 1 + .../src/dotty/tools/io/FileExtension.scala | 5 + .../besteffort/BestEffortTastyFormat.scala | 45 ++++ .../BestEffortTastyHeaderUnpickler.scala | 175 ++++++++++++ .../dotc/neg-best-effort-pickling.blacklist | 19 ++ .../dotc/neg-best-effort-unpickling.blacklist | 17 ++ compiler/test/dotty/tools/TestSources.scala | 8 + .../dotc/BestEffortCompilationTests.scala | 59 +++++ .../dotc/core/tasty/CommentPicklingTest.scala | 2 +- .../dotc/core/tasty/PathPicklingTest.scala | 2 +- .../dotty/tools/vulpix/ParallelTesting.scala | 250 +++++++++++++++++- .../tools/vulpix/TestConfiguration.scala | 1 + .../internals/best-effort-compilation.md | 88 ++++++ docs/sidebar.yml | 1 + .../src/main/dotty/tools/pc/TastyUtils.scala | 2 +- project/Build.scala | 2 + .../tools/tasty/TastyHeaderUnpickler.scala | 92 ++++--- .../err/ExecutedMacro.scala | 2 + .../err/FailingTransparentInline.scala | 11 + .../main/Main.scala | 2 + .../err/BrokenMacros.scala | 13 + .../main/Main.scala | 3 + .../err/MirrorTypes.scala | 2 + .../main/MirrorExec.scala | 7 + .../err/SimpleTypeError.scala | 2 + .../simple-type-error/main/Main.scala | 2 + 61 files changed, 1202 insertions(+), 233 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala create mode 100644 compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyFormat.scala create mode 100644 compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyHeaderUnpickler.scala create mode 100644 compiler/test/dotc/neg-best-effort-pickling.blacklist create mode 100644 compiler/test/dotc/neg-best-effort-unpickling.blacklist create mode 100644 compiler/test/dotty/tools/dotc/BestEffortCompilationTests.scala create mode 100644 docs/_docs/internals/best-effort-compilation.md create mode 100644 tests/best-effort/broken-macro-executed-in-dependency/err/ExecutedMacro.scala create mode 100644 tests/best-effort/broken-macro-executed-in-dependency/err/FailingTransparentInline.scala create mode 100644 tests/best-effort/broken-macro-executed-in-dependency/main/Main.scala create mode 100644 tests/best-effort/broken-macro-executed-in-dependent/err/BrokenMacros.scala create mode 100644 tests/best-effort/broken-macro-executed-in-dependent/main/Main.scala create mode 100644 tests/best-effort/mirrors-in-dependency/err/MirrorTypes.scala create mode 100644 tests/best-effort/mirrors-in-dependency/main/MirrorExec.scala create mode 100644 tests/best-effort/simple-type-error/err/SimpleTypeError.scala create mode 100644 tests/best-effort/simple-type-error/main/Main.scala diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index d9f413a5d5ab..a616241d9a3e 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -21,6 +21,8 @@ class GenBCode extends Phase { self => override def description: String = GenBCode.description + override def isRunnable(using Context) = super.isRunnable && !ctx.usedBestEffortTasty + private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { val old = superCallsMap.getOrElse(sym, Set.empty) diff --git a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala index 2c5a6639dc8b..fbb9042affe7 100644 --- a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala +++ b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala @@ -12,7 +12,7 @@ class GenSJSIR extends Phase { override def description: String = GenSJSIR.description override def isRunnable(using Context): Boolean = - super.isRunnable && ctx.settings.scalajs.value + super.isRunnable && ctx.settings.scalajs.value && !ctx.usedBestEffortTasty def run(using Context): Unit = new JSCodeGen().run() diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index f2f104d1c387..580c0eae1810 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -39,6 +39,9 @@ class Driver { catch case ex: FatalError => report.error(ex.getMessage.nn) // signals that we should fail compilation. + case ex: Throwable if ctx.usedBestEffortTasty => + report.bestEffortError(ex, "Some best-effort tasty files were not able to be read.") + throw ex case ex: TypeError if !runOrNull.enrichedErrorMessage => println(runOrNull.enrichErrorMessage(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}")) throw ex @@ -102,8 +105,8 @@ class Driver { None else file.ext match case FileExtension.Jar => Some(file.path) - case FileExtension.Tasty => - TastyFileUtil.getClassPath(file) match + case FileExtension.Tasty | FileExtension.Betasty => + TastyFileUtil.getClassPath(file, ctx.withBestEffortTasty) match case Some(classpath) => Some(classpath) case _ => report.error(em"Could not load classname from: ${file.path}") diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 02a0618bb6e9..64e216a39b2a 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -329,9 +329,13 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint val profiler = ctx.profiler var phasesWereAdjusted = false + var forceReachPhaseMaybe = + if (ctx.isBestEffort && phases.exists(_.phaseName == "typer")) Some("typer") + else None + for phase <- allPhases do doEnterPhase(phase) - val phaseWillRun = phase.isRunnable + val phaseWillRun = phase.isRunnable || forceReachPhaseMaybe.nonEmpty if phaseWillRun then Stats.trackTime(s"phase time ms/$phase") { val start = System.currentTimeMillis @@ -344,6 +348,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint def printCtx(unit: CompilationUnit) = phase.printingContext( ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) lastPrintedTree = printTree(lastPrintedTree)(using printCtx(unit)) + + if forceReachPhaseMaybe.contains(phase.phaseName) then + forceReachPhaseMaybe = None + report.informTime(s"$phase ", start) Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) for (unit <- units) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index a1bba544cc06..34c87eedb081 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -919,12 +919,12 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => else cpy.PackageDef(tree)(pid, slicedStats) :: Nil case tdef: TypeDef => val sym = tdef.symbol - assert(sym.isClass) + assert(sym.isClass || ctx.tolerateErrorsForBestEffort) if (cls == sym || cls == sym.linkedClass) tdef :: Nil else Nil case vdef: ValDef => val sym = vdef.symbol - assert(sym.is(Module)) + assert(sym.is(Module) || ctx.tolerateErrorsForBestEffort) if (cls == sym.companionClass || cls == sym.moduleClass) vdef :: Nil else Nil case tree => diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 13abfae0166c..1716385410e6 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -47,7 +47,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _: RefTree | _: GenericApply | _: Inlined | _: Hole => ta.assignType(untpd.Apply(fn, args), fn, args) case _ => - assert(ctx.reporter.errorsReported) + assert(ctx.reporter.errorsReported || ctx.tolerateErrorsForBestEffort) ta.assignType(untpd.Apply(fn, args), fn, args) def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match @@ -56,7 +56,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _: RefTree | _: GenericApply => ta.assignType(untpd.TypeApply(fn, args), fn, args) case _ => - assert(ctx.reporter.errorsReported, s"unexpected tree for type application: $fn") + assert(ctx.reporter.errorsReported || ctx.tolerateErrorsForBestEffort, s"unexpected tree for type application: $fn") ta.assignType(untpd.TypeApply(fn, args), fn, args) def Literal(const: Constant)(using Context): Literal = diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 252f046ab548..aed5be45cb0d 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -285,7 +285,7 @@ case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[BinaryFil protected def createFileEntry(file: AbstractFile): BinaryFileEntry = BinaryFileEntry(file) protected def isMatchingFile(f: JFile): Boolean = - f.isTasty || (f.isClass && !f.hasSiblingTasty) + f.isTasty || f.isBestEffortTasty || (f.isClass && !f.hasSiblingTasty) private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) } diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index 030b0b61044a..4fe57a722780 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -23,8 +23,12 @@ object FileUtils { def hasTastyExtension: Boolean = file.ext.isTasty + def hasBetastyExtension: Boolean = file.ext.isBetasty + def isTasty: Boolean = !file.isDirectory && hasTastyExtension + def isBestEffortTasty: Boolean = !file.isDirectory && hasBetastyExtension + def isScalaBinary: Boolean = file.isClass || file.isTasty def isScalaOrJavaSource: Boolean = !file.isDirectory && file.ext.isScalaOrJava @@ -55,6 +59,9 @@ object FileUtils { def isTasty: Boolean = file.isFile && file.getName.endsWith(SUFFIX_TASTY) + def isBestEffortTasty: Boolean = file.isFile && file.getName.endsWith(SUFFIX_BETASTY) + + /** * Returns if there is an existing sibling `.tasty` file. */ @@ -69,6 +76,7 @@ object FileUtils { private val SUFFIX_CLASS = ".class" private val SUFFIX_SCALA = ".scala" private val SUFFIX_TASTY = ".tasty" + private val SUFFIX_BETASTY = ".betasty" private val SUFFIX_JAVA = ".java" private val SUFFIX_SIG = ".sig" diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index fc7e61c8ec71..db867f394297 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -414,6 +414,9 @@ private sealed trait YSettings: val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YbestEffort: Setting[Boolean] = BooleanSetting(ForkSetting, "Ybest-effort", "Enable best-effort compilation attempting to produce betasty to the META-INF/best-effort directory, regardless of errors, as part of the pickler phase.") + val YwithBestEffortTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Ywith-best-effort-tasty", "Allow to compile using best-effort tasty files. If such file is used, the compiler will stop after the pickler phase.") + // Experimental language features val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-kind-polymorphism", "Disable kind polymorphism.") val YexplicitNulls: Setting[Boolean] = BooleanSetting(ForkSetting, "Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 6824fdb314dc..29cb83000fde 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -474,6 +474,21 @@ object Contexts { /** Is the flexible types option set? */ def flexibleTypes: Boolean = base.settings.YexplicitNulls.value && !base.settings.YnoFlexibleTypes.value + + /** Is the best-effort option set? */ + def isBestEffort: Boolean = base.settings.YbestEffort.value + + /** Is the with-best-effort-tasty option set? */ + def withBestEffortTasty: Boolean = base.settings.YwithBestEffortTasty.value + + /** Were any best effort tasty dependencies used during compilation? */ + def usedBestEffortTasty: Boolean = base.usedBestEffortTasty + + /** Confirm that a best effort tasty dependency was used during compilation. */ + def setUsedBestEffortTasty(): Unit = base.usedBestEffortTasty = true + + /** Is either the best-effort option set or .betasty files were used during compilation? */ + def tolerateErrorsForBestEffort = isBestEffort || usedBestEffortTasty /** A fresh clone of this context embedded in this context. */ def fresh: FreshContext = freshOver(this) @@ -960,6 +975,9 @@ object Contexts { val sources: util.HashMap[AbstractFile, SourceFile] = util.HashMap[AbstractFile, SourceFile]() val files: util.HashMap[TermName, AbstractFile] = util.HashMap() + /** Was best effort file used during compilation? */ + private[core] var usedBestEffortTasty = false + // Types state /** A table for hash consing unique types */ private[core] val uniques: Uniques = Uniques() diff --git a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala index 59982fb99b5f..451561c1b84d 100644 --- a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala +++ b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala @@ -28,6 +28,8 @@ object DenotTransformers { /** The transformation method */ def transform(ref: SingleDenotation)(using Context): SingleDenotation + + override def isRunnable(using Context) = super.isRunnable && !ctx.usedBestEffortTasty } /** A transformer that only transforms the info field of denotations */ diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 8610d2095655..2418aba1978b 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -719,7 +719,8 @@ object Denotations { ctx.runId >= validFor.runId || ctx.settings.YtestPickler.value // mixing test pickler with debug printing can travel back in time || ctx.mode.is(Mode.Printing) // no use to be picky when printing error messages - || symbol.isOneOf(ValidForeverFlags), + || symbol.isOneOf(ValidForeverFlags) + || ctx.tolerateErrorsForBestEffort, s"denotation $this invalid in run ${ctx.runId}. ValidFor: $validFor") var d: SingleDenotation = this while ({ diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index bfaaf78883ae..f01d2faf86c4 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -720,12 +720,16 @@ object SymDenotations { * TODO: Find a more robust way to characterize self symbols, maybe by * spending a Flag on them? */ - final def isSelfSym(using Context): Boolean = owner.infoOrCompleter match { - case ClassInfo(_, _, _, _, selfInfo) => - selfInfo == symbol || - selfInfo.isInstanceOf[Type] && name == nme.WILDCARD - case _ => false - } + final def isSelfSym(using Context): Boolean = + if !ctx.isBestEffort || exists then + owner.infoOrCompleter match { + case ClassInfo(_, _, _, _, selfInfo) => + selfInfo == symbol || + selfInfo.isInstanceOf[Type] && name == nme.WILDCARD + case _ => false + } + else false + /** Is this definition contained in `boundary`? * Same as `ownersIterator contains boundary` but more efficient. @@ -2003,7 +2007,7 @@ object SymDenotations { case p :: parents1 => p.classSymbol match { case pcls: ClassSymbol => builder.addAll(pcls.baseClasses) - case _ => assert(isRefinementClass || p.isError || ctx.mode.is(Mode.Interactive), s"$this has non-class parent: $p") + case _ => assert(isRefinementClass || p.isError || ctx.mode.is(Mode.Interactive) || ctx.tolerateErrorsForBestEffort, s"$this has non-class parent: $p") } traverse(parents1) case nil => diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index d85708024ec6..fdc1ba9697d0 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -7,7 +7,7 @@ import java.nio.channels.ClosedByInterruptException import scala.util.control.NonFatal -import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension +import dotty.tools.dotc.classpath.FileUtils.{hasTastyExtension, hasBetastyExtension} import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions @@ -26,6 +26,7 @@ import parsing.JavaParsers.OutlineJavaParser import parsing.Parsers.OutlineParser import dotty.tools.tasty.{TastyHeaderUnpickler, UnpickleException, UnpicklerConfig, TastyVersion} import dotty.tools.dotc.core.tasty.TastyUnpickler +import dotty.tools.tasty.besteffort.BestEffortTastyHeaderUnpickler object SymbolLoaders { import ast.untpd.* @@ -198,7 +199,7 @@ object SymbolLoaders { enterToplevelsFromSource(owner, nameOf(classRep), src) case (Some(bin), _) => val completer = - if bin.hasTastyExtension then ctx.platform.newTastyLoader(bin) + if bin.hasTastyExtension || bin.hasBetastyExtension then ctx.platform.newTastyLoader(bin) else ctx.platform.newClassLoader(bin) enterClassAndModule(owner, nameOf(classRep), completer) } @@ -261,7 +262,8 @@ object SymbolLoaders { (idx + str.TOPLEVEL_SUFFIX.length + 1 != name.length || !name.endsWith(str.TOPLEVEL_SUFFIX)) } - def maybeModuleClass(classRep: ClassRepresentation): Boolean = classRep.name.last == '$' + def maybeModuleClass(classRep: ClassRepresentation): Boolean = + classRep.name.nonEmpty && classRep.name.last == '$' private def enterClasses(root: SymDenotation, packageName: String, flat: Boolean)(using Context) = { def isAbsent(classRep: ClassRepresentation) = @@ -416,34 +418,45 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader { } class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { - + val isBestEffortTasty = tastyFile.hasBetastyExtension private val unpickler: tasty.DottyUnpickler = handleUnpicklingExceptions: val tastyBytes = tastyFile.toByteArray - new tasty.DottyUnpickler(tastyFile, tastyBytes) // reads header and name table + new tasty.DottyUnpickler(tastyFile, tastyBytes, isBestEffortTasty) // reads header and name table val compilationUnitInfo: CompilationUnitInfo | Null = unpickler.compilationUnitInfo - def description(using Context): String = "TASTy file " + tastyFile.toString + def description(using Context): String = + if isBestEffortTasty then "Best Effort TASTy file " + tastyFile.toString + else "TASTy file " + tastyFile.toString override def doComplete(root: SymDenotation)(using Context): Unit = handleUnpicklingExceptions: - checkTastyUUID() val (classRoot, moduleRoot) = rootDenots(root.asClass) - unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource)) - if mayLoadTreesFromTasty then - classRoot.classSymbol.rootTreeOrProvider = unpickler - moduleRoot.classSymbol.rootTreeOrProvider = unpickler + if (!isBestEffortTasty || ctx.withBestEffortTasty) then + val tastyBytes = tastyFile.toByteArray + unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource)) + if mayLoadTreesFromTasty || isBestEffortTasty then + classRoot.classSymbol.rootTreeOrProvider = unpickler + moduleRoot.classSymbol.rootTreeOrProvider = unpickler + if isBestEffortTasty then + checkBeTastyUUID(tastyFile, tastyBytes) + ctx.setUsedBestEffortTasty() + else + checkTastyUUID() + else + report.error(em"Cannot read Best Effort TASTy $tastyFile without the ${ctx.settings.YwithBestEffortTasty.name} option") private def handleUnpicklingExceptions[T](thunk: =>T): T = try thunk catch case e: RuntimeException => + val tastyType = if (isBestEffortTasty) "Best Effort TASTy" else "TASTy" val message = e match case e: UnpickleException => - s"""TASTy file ${tastyFile.canonicalPath} could not be read, failing with: + s"""$tastyType file ${tastyFile.canonicalPath} could not be read, failing with: | ${Option(e.getMessage).getOrElse("")}""".stripMargin case _ => - s"""TASTy file ${tastyFile.canonicalPath} is broken, reading aborted with ${e.getClass} + s"""$tastyFile file ${tastyFile.canonicalPath} is broken, reading aborted with ${e.getClass} | ${Option(e.getMessage).getOrElse("")}""".stripMargin throw IOException(message, e) @@ -460,6 +473,9 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { // tasty file compiled by `-Yearly-tasty-output-write` comes from an early output jar. report.inform(s"No classfiles found for $tastyFile when checking TASTy UUID") + private def checkBeTastyUUID(tastyFile: AbstractFile, tastyBytes: Array[Byte])(using Context): Unit = + new BestEffortTastyHeaderUnpickler(tastyBytes).readHeader() + private def mayLoadTreesFromTasty(using Context): Boolean = ctx.settings.YretainTrees.value || ctx.settings.fromTasty.value } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 48fb1bab2da1..96912c4f7637 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -747,7 +747,9 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst assert(!etp.isInstanceOf[WildcardType] || inSigName, i"Unexpected WildcardType erasure for $tp") etp - /** Like translucentSuperType, but issue a fatal error if it does not exist. */ + /** Like translucentSuperType, but issue a fatal error if it does not exist. + * If using the best-effort option, the fatal error will not be issued. + */ private def checkedSuperType(tp: TypeProxy)(using Context): Type = val tp1 = tp.translucentSuperType if !tp1.exists then @@ -756,7 +758,8 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst MissingType(tycon.prefix, tycon.name) case _ => TypeError(em"Cannot resolve reference to $tp") - throw typeErr + if ctx.isBestEffort then report.error(typeErr.toMessage) + else throw typeErr tp1 /** Widen term ref, skipping any `()` parameter of an eventual getter. Used to erase a TermRef. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index e5cdd3b0613d..cd5fd83a0198 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3149,7 +3149,8 @@ object Types extends TypeUtils { if (ctx.erasedTypes) tref else cls.info match { case cinfo: ClassInfo => cinfo.selfType - case _: ErrorType | NoType if ctx.mode.is(Mode.Interactive) => cls.info + case _: ErrorType | NoType + if ctx.mode.is(Mode.Interactive) || ctx.tolerateErrorsForBestEffort => cls.info // can happen in IDE if `cls` is stale } @@ -3719,8 +3720,9 @@ object Types extends TypeUtils { def apply(tp1: Type, tp2: Type, soft: Boolean)(using Context): OrType = { def where = i"in union $tp1 | $tp2" - expectValueTypeOrWildcard(tp1, where) - expectValueTypeOrWildcard(tp2, where) + if !ctx.usedBestEffortTasty then + expectValueTypeOrWildcard(tp1, where) + expectValueTypeOrWildcard(tp2, where) assertUnerased() unique(new CachedOrType(tp1, tp2, soft)) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala b/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala new file mode 100644 index 000000000000..9cdfb042b8fb --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala @@ -0,0 +1,43 @@ +package dotty.tools.dotc +package core +package tasty + +import scala.language.unsafeNulls +import java.nio.file.{Path as JPath, Files as JFiles} +import java.nio.channels.ClosedByInterruptException +import java.io.DataOutputStream +import dotty.tools.io.{File, PlainFile} +import dotty.tools.dotc.core.Contexts.Context + +object BestEffortTastyWriter: + + def write(dir: JPath, units: List[CompilationUnit])(using Context): Unit = + if JFiles.exists(dir) then JFiles.createDirectories(dir) + + units.foreach { unit => + unit.pickled.foreach { (clz, binary) => + val parts = clz.fullName.mangledString.split('.') + val outPath = outputPath(parts.toList, dir) + val outTastyFile = new PlainFile(new File(outPath)) + val outstream = new DataOutputStream(outTastyFile.bufferedOutput) + try outstream.write(binary()) + catch case ex: ClosedByInterruptException => + try + outTastyFile.delete() // don't leave an empty or half-written tastyfile around after an interrupt + catch + case _: Throwable => + throw ex + finally outstream.close() + } + } + + def outputPath(parts: List[String], acc: JPath): JPath = + parts match + case Nil => throw new Exception("Invalid class name") + case last :: Nil => + val name = last.stripSuffix("$") + acc.resolve(s"$name.betasty") + case pkg :: tail => + val next = acc.resolve(pkg) + if !JFiles.exists(next) then JFiles.createDirectory(next) + outputPath(tail, next) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala index 4f083b09b015..3605a6cc9515 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala @@ -23,10 +23,14 @@ object DottyUnpickler { /** Exception thrown if classfile is corrupted */ class BadSignature(msg: String) extends RuntimeException(msg) - class TreeSectionUnpickler(compilationUnitInfo: CompilationUnitInfo, posUnpickler: Option[PositionUnpickler], commentUnpickler: Option[CommentUnpickler]) - extends SectionUnpickler[TreeUnpickler](ASTsSection) { + class TreeSectionUnpickler( + compilationUnitInfo: CompilationUnitInfo, + posUnpickler: Option[PositionUnpickler], + commentUnpickler: Option[CommentUnpickler], + isBestEffortTasty: Boolean + ) extends SectionUnpickler[TreeUnpickler](ASTsSection) { def unpickle(reader: TastyReader, nameAtRef: NameTable): TreeUnpickler = - new TreeUnpickler(reader, nameAtRef, compilationUnitInfo, posUnpickler, commentUnpickler) + new TreeUnpickler(reader, nameAtRef, compilationUnitInfo, posUnpickler, commentUnpickler, isBestEffortTasty) } class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler](PositionsSection) { @@ -46,15 +50,21 @@ object DottyUnpickler { } /** A class for unpickling Tasty trees and symbols. - * @param tastyFile tasty file from which we unpickle (used for CompilationUnitInfo) - * @param bytes the bytearray containing the Tasty file from which we unpickle - * @param mode the tasty file contains package (TopLevel), an expression (Term) or a type (TypeTree) + * @param tastyFile tasty file from which we unpickle (used for CompilationUnitInfo) + * @param bytes the bytearray containing the Tasty file from which we unpickle + * @param isBestEffortTasty specifies whether file should be unpickled as a Best Effort TASTy + * @param mode the tasty file contains package (TopLevel), an expression (Term) or a type (TypeTree) */ -class DottyUnpickler(tastyFile: AbstractFile, bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider { +class DottyUnpickler( + tastyFile: AbstractFile, + bytes: Array[Byte], + isBestEffortTasty: Boolean, + mode: UnpickleMode = UnpickleMode.TopLevel +) extends ClassfileParser.Embedded with tpd.TreeProvider { import tpd.* import DottyUnpickler.* - val unpickler: TastyUnpickler = new TastyUnpickler(bytes) + val unpickler: TastyUnpickler = new TastyUnpickler(bytes, isBestEffortTasty) val tastyAttributes: Attributes = unpickler.unpickle(new AttributesSectionUnpickler) @@ -67,7 +77,7 @@ class DottyUnpickler(tastyFile: AbstractFile, bytes: Array[Byte], mode: Unpickle private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler) private val commentUnpicklerOpt = unpickler.unpickle(new CommentsSectionUnpickler) - private val treeUnpickler = unpickler.unpickle(treeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt)).get + private val treeUnpickler = unpickler.unpickle(treeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt, isBestEffortTasty)).get /** Enter all toplevel classes and objects into their scopes * @param roots a set of SymDenotations that should be overwritten by unpickling @@ -78,8 +88,9 @@ class DottyUnpickler(tastyFile: AbstractFile, bytes: Array[Byte], mode: Unpickle protected def treeSectionUnpickler( posUnpicklerOpt: Option[PositionUnpickler], commentUnpicklerOpt: Option[CommentUnpickler], + withBestEffortTasty: Boolean ): TreeSectionUnpickler = - new TreeSectionUnpickler(compilationUnitInfo, posUnpicklerOpt, commentUnpicklerOpt) + new TreeSectionUnpickler(compilationUnitInfo, posUnpicklerOpt, commentUnpicklerOpt, withBestEffortTasty) protected def computeRootTrees(using Context): List[Tree] = treeUnpickler.unpickle(mode) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala index a3d8cedacb4a..3755b6e8b4b6 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala @@ -2,9 +2,9 @@ package dotty.tools.dotc package core package tasty -class TastyAnsiiPrinter(bytes: Array[Byte], testPickler: Boolean) extends TastyPrinter(bytes, testPickler) { +class TastyAnsiiPrinter(bytes: Array[Byte], isBestEffortTasty: Boolean, testPickler: Boolean) extends TastyPrinter(bytes, isBestEffortTasty, testPickler) { - def this(bytes: Array[Byte]) = this(bytes, testPickler = false) + def this(bytes: Array[Byte]) = this(bytes, isBestEffortTasty = false, testPickler = false) override protected def nameStr(str: String): String = Console.MAGENTA + str + Console.RESET override protected def treeStr(str: String): String = Console.YELLOW + str + Console.RESET diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala index 0a7068b65445..f9d8e10cf16a 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala @@ -12,9 +12,9 @@ import TastyUnpickler.* import dotty.tools.tasty.TastyFormat.ASTsSection /** Reads the package and class name of the class contained in this TASTy */ -class TastyClassName(bytes: Array[Byte]) { +class TastyClassName(bytes: Array[Byte], isBestEffortTasty: Boolean = false) { - val unpickler: TastyUnpickler = new TastyUnpickler(bytes) + val unpickler: TastyUnpickler = new TastyUnpickler(bytes, isBestEffortTasty) import unpickler.{nameAtRef, unpickle} /** Returns a tuple with the package and class names */ diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala index b234705413ae..b9cba2e09937 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala @@ -2,7 +2,7 @@ package dotty.tools.dotc package core package tasty -class TastyHTMLPrinter(bytes: Array[Byte]) extends TastyPrinter(bytes) { +class TastyHTMLPrinter(bytes: Array[Byte]) extends TastyPrinter(bytes, isBestEffortTasty = false, testPickler = false) { override protected def nameStr(str: String): String = s"$str" override protected def treeStr(str: String): String = s"$str" override protected def lengthStr(str: String): String = s"$str" diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala index 214f7a5f6702..e35ed5bb2466 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala @@ -6,6 +6,7 @@ package tasty import scala.language.unsafeNulls import dotty.tools.tasty.{TastyBuffer, TastyFormat, TastyHash} +import dotty.tools.tasty.besteffort.BestEffortTastyFormat import TastyFormat.* import TastyBuffer.* @@ -16,7 +17,7 @@ import Decorators.* object TastyPickler: private val versionString = s"Scala ${config.Properties.simpleVersionString}" -class TastyPickler(val rootCls: ClassSymbol) { +class TastyPickler(val rootCls: ClassSymbol, isBestEffortTasty: Boolean) { private val sections = new mutable.ArrayBuffer[(NameRef, TastyBuffer)] @@ -42,10 +43,12 @@ class TastyPickler(val rootCls: ClassSymbol) { val uuidHi: Long = otherSectionHashes.fold(0L)(_ ^ _) val headerBuffer = { - val buf = new TastyBuffer(header.length + TastyPickler.versionString.length + 32) - for (ch <- header) buf.writeByte(ch.toByte) + val fileHeader = if isBestEffortTasty then BestEffortTastyFormat.bestEffortHeader else header + val buf = new TastyBuffer(fileHeader.length + TastyPickler.versionString.length + 32) + for (ch <- fileHeader) buf.writeByte(ch.toByte) buf.writeNat(MajorVersion) buf.writeNat(MinorVersion) + if isBestEffortTasty then buf.writeNat(BestEffortTastyFormat.PatchVersion) buf.writeNat(ExperimentalVersion) buf.writeUtf8(TastyPickler.versionString) buf.writeUncompressedLong(uuidLow) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index 6850d87d1f4d..72f6895f122c 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -23,34 +23,37 @@ import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension object TastyPrinter: def showContents(bytes: Array[Byte], noColor: Boolean): String = - showContents(bytes, noColor, testPickler = false) + showContents(bytes, noColor, isBestEffortTasty = false, testPickler = false) - def showContents(bytes: Array[Byte], noColor: Boolean, testPickler: Boolean = false): String = + def showContents(bytes: Array[Byte], noColor: Boolean, isBestEffortTasty: Boolean, testPickler: Boolean = false): String = val printer = - if noColor then new TastyPrinter(bytes, testPickler) - else new TastyAnsiiPrinter(bytes, testPickler) + if noColor then new TastyPrinter(bytes, isBestEffortTasty, testPickler) + else new TastyAnsiiPrinter(bytes, isBestEffortTasty, testPickler) printer.showContents() def main(args: Array[String]): Unit = { // TODO: Decouple CliCommand from Context and use CliCommand.distill? + val betastyOpt = "-Ywith-best-effort-tasty" val lineWidth = 80 val line = "-" * lineWidth val noColor = args.contains("-color:never") + val allowBetasty = args.contains(betastyOpt) var printLastLine = false - def printTasty(fileName: String, bytes: Array[Byte]): Unit = + def printTasty(fileName: String, bytes: Array[Byte], isBestEffortTasty: Boolean): Unit = println(line) println(fileName) println(line) - println(showContents(bytes, noColor)) + println(showContents(bytes, noColor, isBestEffortTasty, testPickler = false)) println() printLastLine = true for arg <- args do if arg == "-color:never" then () // skip + else if arg == betastyOpt then () // skip else if arg.startsWith("-") then println(s"bad option '$arg' was ignored") - else if arg.endsWith(".tasty") then + else if arg.endsWith(".tasty") || (allowBetasty && arg.endsWith(".betasty")) then val path = Paths.get(arg) if Files.exists(path) then - printTasty(arg, Files.readAllBytes(path).nn) + printTasty(arg, Files.readAllBytes(path).nn, arg.endsWith(".betasty")) else println("File not found: " + arg) System.exit(1) @@ -58,7 +61,7 @@ object TastyPrinter: val jar = JarArchive.open(Path(arg), create = false) try for file <- jar.iterator() if file.hasTastyExtension do - printTasty(s"$arg ${file.path}", file.toByteArray) + printTasty(s"$arg ${file.path}", file.toByteArray, isBestEffortTasty = false) finally jar.close() else println(s"Not a '.tasty' or '.jar' file: $arg") @@ -68,11 +71,11 @@ object TastyPrinter: println(line) } -class TastyPrinter(bytes: Array[Byte], val testPickler: Boolean) { +class TastyPrinter(bytes: Array[Byte], isBestEffortTasty: Boolean, val testPickler: Boolean) { - def this(bytes: Array[Byte]) = this(bytes, testPickler = false) + def this(bytes: Array[Byte]) = this(bytes, isBestEffortTasty = false, testPickler = false) - class TastyPrinterUnpickler extends TastyUnpickler(bytes) { + class TastyPrinterUnpickler extends TastyUnpickler(bytes, isBestEffortTasty) { var namesStart: Addr = uninitialized var namesEnd: Addr = uninitialized override def readNames() = { @@ -130,7 +133,7 @@ class TastyPrinter(bytes: Array[Byte], val testPickler: Boolean) { }) class TreeSectionUnpickler(sb: StringBuilder) extends PrinterSectionUnpickler[Unit](ASTsSection) { - import dotty.tools.tasty.TastyFormat.* + import dotty.tools.tasty.besteffort.BestEffortTastyFormat.* // superset on TastyFormat def unpickle0(reader: TastyReader)(using refs: NameRefs): Unit = { import reader.* var indent = 0 diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala index 6fe648ee98d3..f034f03298b1 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala @@ -2,9 +2,12 @@ package dotty.tools.dotc package core package tasty +import java.util.UUID import scala.language.unsafeNulls import dotty.tools.tasty.{TastyFormat, TastyVersion, TastyBuffer, TastyReader, TastyHeaderUnpickler, UnpicklerConfig} +import dotty.tools.tasty.besteffort.{BestEffortTastyHeader, BestEffortTastyHeaderUnpickler} + import TastyFormat.NameTags.*, TastyFormat.nameTagToString import TastyBuffer.NameRef @@ -14,6 +17,18 @@ import NameKinds.* import dotty.tools.tasty.TastyHeader import dotty.tools.tasty.TastyBuffer.Addr +case class CommonTastyHeader( + uuid: UUID, + majorVersion: Int, + minorVersion: Int, + experimentalVersion: Int, + toolingVersion: String +): + def this(h: TastyHeader) = + this(h.uuid, h.majorVersion, h.minorVersion, h.experimentalVersion, h.toolingVersion) + def this(h: BestEffortTastyHeader) = + this(h.uuid, h.majorVersion, h.minorVersion, h.experimentalVersion, h.toolingVersion) + object TastyUnpickler { abstract class SectionUnpickler[R](val name: String) { @@ -63,10 +78,11 @@ object TastyUnpickler { import TastyUnpickler.* -class TastyUnpickler(protected val reader: TastyReader) { +class TastyUnpickler(protected val reader: TastyReader, isBestEffortTasty: Boolean) { import reader.* - def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) + def this(bytes: Array[Byte]) = this(new TastyReader(bytes), false) + def this(bytes: Array[Byte], isBestEffortTasty: Boolean) = this(new TastyReader(bytes), isBestEffortTasty) private val sectionReader = new mutable.HashMap[String, TastyReader] val nameAtRef: NameTable = new NameTable @@ -123,8 +139,11 @@ class TastyUnpickler(protected val reader: TastyReader) { result } - val header: TastyHeader = - new TastyHeaderUnpickler(scala3CompilerConfig, reader).readFullHeader() + val header: CommonTastyHeader = + if isBestEffortTasty then + new CommonTastyHeader(new BestEffortTastyHeaderUnpickler(scala3CompilerConfig, reader).readFullHeader()) + else + new CommonTastyHeader(new TastyHeaderUnpickler(reader).readFullHeader()) def readNames(): Unit = until(readEnd()) { nameAtRef.add(readNameContents()) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 0a8669292a74..be616b9054ae 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -6,6 +6,7 @@ package tasty import scala.language.unsafeNulls import dotty.tools.tasty.TastyFormat.* +import dotty.tools.tasty.besteffort.BestEffortTastyFormat.ERRORtype import dotty.tools.tasty.TastyBuffer.* import ast.Trees.* @@ -65,6 +66,17 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { fillRef(lengthAddr, currentAddr, relative = true) } + /** There are certain expectations with code which is naturally able to reach + * pickling phase as opposed to one that uses best-effort compilation features. + * When pickling betasty files, we do some custom checks, in case those + * expectations cannot be fulfilled, and if so, then we can try to do something + * else (usually pickle an ERRORtype). + * For regular non best-effort compilation (without -Ybest-effort with thrown errors + * and without using .betasty on classpath), this will always return true. + */ + private inline def passesConditionForErroringBestEffortCode(condition: => Boolean)(using Context): Boolean = + !((ctx.isBestEffort && ctx.reporter.errorsReported) || ctx.usedBestEffortTasty) || condition + def addrOfSym(sym: Symbol): Option[Addr] = symRefs.get(sym) @@ -295,9 +307,13 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { else if tpe.isImplicitMethod then mods |= Implicit pickleMethodic(METHODtype, tpe, mods) case tpe: ParamRef => - assert(pickleParamRef(tpe), s"orphan parameter reference: $tpe") + val pickled = pickleParamRef(tpe) + if !ctx.isBestEffort then assert(pickled, s"orphan parameter reference: $tpe") + else if !pickled then pickleErrorType() case tpe: LazyRef => pickleType(tpe.ref) + case _ if ctx.isBestEffort => + pickleErrorType() } def pickleMethodic(tag: Int, tpe: LambdaType, mods: FlagSet)(using Context): Unit = { @@ -321,8 +337,13 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickled } + def pickleErrorType(): Unit = { + writeByte(ERRORtype) + } + def pickleTpt(tpt: Tree)(using Context): Unit = - pickleTree(tpt) + if passesConditionForErroringBestEffortCode(tpt.isType) then pickleTree(tpt) + else pickleErrorType() def pickleTreeUnlessEmpty(tree: Tree)(using Context): Unit = { if (!tree.isEmpty) pickleTree(tree) @@ -336,39 +357,45 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { def pickleDef(tag: Int, mdef: MemberDef, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(using Context): Unit = { val sym = mdef.symbol - assert(symRefs(sym) == NoAddr, sym) - registerDef(sym) - writeByte(tag) - val addr = currentAddr - try - withLength { - pickleName(sym.name) - pickleParams - tpt match { - case _: Template | _: Hole => pickleTree(tpt) - case _ if tpt.isType => pickleTpt(tpt) + def isDefSymPreRegisteredAndTreeHasCorrectStructure() = + symRefs.get(sym) == Some(NoAddr) && // check if symbol id preregistered (with the preRegister method) + !(tag == TYPEDEF && tpt.isInstanceOf[Template] && !tpt.symbol.exists) // in case this is a TEMPLATE, check if we are able to pickle it + + if passesConditionForErroringBestEffortCode(isDefSymPreRegisteredAndTreeHasCorrectStructure()) then + assert(symRefs(sym) == NoAddr, sym) + registerDef(sym) + writeByte(tag) + val addr = currentAddr + try + withLength { + pickleName(sym.name) + pickleParams + tpt match { + case _: Template | _: Hole => pickleTree(tpt) + case _ if tpt.isType => pickleTpt(tpt) + case _ if ctx.isBestEffort => pickleErrorType() + } + if isOutlinePickle && sym.isTerm && isJavaPickle then + // TODO: if we introduce outline typing for Scala definitions + // then we will need to update the check here + pickleElidedUnlessEmpty(rhs, tpt.tpe) + else + pickleTreeUnlessEmpty(rhs) + pickleModifiers(sym, mdef) } - if isOutlinePickle && sym.isTerm && isJavaPickle then - // TODO: if we introduce outline typing for Scala definitions - // then we will need to update the check here - pickleElidedUnlessEmpty(rhs, tpt.tpe) - else - pickleTreeUnlessEmpty(rhs) - pickleModifiers(sym, mdef) - } - catch - case ex: Throwable => - if !ctx.settings.YnoDecodeStacktraces.value - && handleRecursive.underlyingStackOverflowOrNull(ex) != null then - throw StackSizeExceeded(mdef) - else - throw ex - if sym.is(Method) && sym.owner.isClass then - profile.recordMethodSize(sym, (currentAddr.index - addr.index) max 1, mdef.span) - for docCtx <- ctx.docCtx do - val comment = docCtx.docstrings.lookup(sym) - if comment != null then - docStrings(mdef) = comment + catch + case ex: Throwable => + if !ctx.settings.YnoDecodeStacktraces.value + && handleRecursive.underlyingStackOverflowOrNull(ex) != null then + throw StackSizeExceeded(mdef) + else + throw ex + if sym.is(Method) && sym.owner.isClass then + profile.recordMethodSize(sym, (currentAddr.index - addr.index) max 1, mdef.span) + for docCtx <- ctx.docCtx do + val comment = docCtx.docstrings.lookup(sym) + if comment != null then + docStrings(mdef) = comment } def pickleParam(tree: Tree)(using Context): Unit = { @@ -398,15 +425,17 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { else try tree match { case Ident(name) => - tree.tpe match { - case tp: TermRef if name != nme.WILDCARD => - // wildcards are pattern bound, need to be preserved as ids. - pickleType(tp) - case tp => - writeByte(if (tree.isType) IDENTtpt else IDENT) - pickleName(name) - pickleType(tp) - } + if passesConditionForErroringBestEffortCode(tree.hasType) then + tree.tpe match { + case tp: TermRef if name != nme.WILDCARD => + // wildcards are pattern bound, need to be preserved as ids. + pickleType(tp) + case tp => + writeByte(if (tree.isType) IDENTtpt else IDENT) + pickleName(name) + pickleType(tp) + } + else pickleErrorType() case This(qual) => // This may be needed when pickling a `This` inside a capture set. See #19662 and #19859. // In this case, we pickle the tree as null.asInstanceOf[tree.tpe]. @@ -422,6 +451,8 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { case ThisType(tref) => writeByte(QUALTHIS) pickleTree(qual.withType(tref)) + case _: ErrorType if ctx.isBestEffort => + pickleTree(qual) case _ => pickleCapturedThis case Select(qual, name) => name match { @@ -434,25 +465,31 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickleType(tp) } case _ => - val sig = tree.tpe.signature - var ename = tree.symbol.targetName - val selectFromQualifier = - name.isTypeName - || qual.isInstanceOf[Hole] // holes have no symbol - || sig == Signature.NotAMethod // no overload resolution necessary - || !tree.denot.symbol.exists // polymorphic function type - || tree.denot.asSingleDenotation.isRefinedMethod // refined methods have no defining class symbol - if selectFromQualifier then + if passesConditionForErroringBestEffortCode(tree.hasType) then + val sig = tree.tpe.signature + var ename = tree.symbol.targetName + val selectFromQualifier = + name.isTypeName + || qual.isInstanceOf[Hole] // holes have no symbol + || sig == Signature.NotAMethod // no overload resolution necessary + || !tree.denot.symbol.exists // polymorphic function type + || tree.denot.asSingleDenotation.isRefinedMethod // refined methods have no defining class symbol + if selectFromQualifier then + writeByte(if name.isTypeName then SELECTtpt else SELECT) + pickleNameAndSig(name, sig, ename) + pickleTree(qual) + else // select from owner + writeByte(SELECTin) + withLength { + pickleNameAndSig(name, tree.symbol.signature, ename) + pickleTree(qual) + pickleType(tree.symbol.owner.typeRef) + } + else writeByte(if name.isTypeName then SELECTtpt else SELECT) - pickleNameAndSig(name, sig, ename) + val ename = tree.symbol.targetName + pickleNameAndSig(name, Signature.NotAMethod, ename) pickleTree(qual) - else // select from owner - writeByte(SELECTin) - withLength { - pickleNameAndSig(name, tree.symbol.signature, ename) - pickleTree(qual) - pickleType(tree.symbol.owner.typeRef) - } } case Apply(fun, args) => if (fun.symbol eq defn.throwMethod) { @@ -480,12 +517,14 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { args.foreach(pickleTpt) } case Literal(const1) => - pickleConstant { - tree.tpe match { - case ConstantType(const2) => const2 - case _ => const1 + if passesConditionForErroringBestEffortCode(tree.hasType) then + pickleConstant { + tree.tpe match { + case ConstantType(const2) => const2 + case _ => const1 + } } - } + else pickleConstant(const1) case Super(qual, mix) => writeByte(SUPER) withLength { @@ -657,19 +696,22 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { writeByte(PACKAGE) withLength { pickleType(pid.tpe); pickleStats(stats) } case tree: TypeTree => - pickleType(tree.tpe) + if passesConditionForErroringBestEffortCode(tree.hasType) then pickleType(tree.tpe) + else pickleErrorType() case SingletonTypeTree(ref) => writeByte(SINGLETONtpt) pickleTree(ref) case RefinedTypeTree(parent, refinements) => if (refinements.isEmpty) pickleTree(parent) else { - val refineCls = refinements.head.symbol.owner.asClass - registerDef(refineCls) - pickledTypes(refineCls.typeRef) = currentAddr - writeByte(REFINEDtpt) - refinements.foreach(preRegister) - withLength { pickleTree(parent); refinements.foreach(pickleTree) } + if passesConditionForErroringBestEffortCode(refinements.head.symbol.exists) then + val refineCls = refinements.head.symbol.owner.asClass + registerDef(refineCls) + pickledTypes(refineCls.typeRef) = currentAddr + writeByte(REFINEDtpt) + refinements.foreach(preRegister) + withLength { pickleTree(parent); refinements.foreach(pickleTree) } + else pickleErrorType() } case AppliedTypeTree(tycon, args) => writeByte(APPLIEDtpt) @@ -735,10 +777,13 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickleTree(arg) } } + case other if ctx.isBestEffort => + pickleErrorType() } catch { case ex: TypeError => report.error(ex.toMessage, tree.srcPos.focus) + pickleErrorType() case ex: AssertionError => println(i"error when pickling tree $tree") throw ex @@ -840,6 +885,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { // a different toplevel class, it is impossible to pickle a reference to it. // Such annotations will be reconstituted when unpickling the child class. // See tests/pickling/i3149.scala + case _ if ctx.isBestEffort && !ann.symbol.denot.isError => true case _ => ann.symbol == defn.BodyAnnot // inline bodies are reconstituted automatically when unpickling } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 073edb536151..45bd58e3c7c1 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -42,6 +42,7 @@ import scala.collection.mutable import config.Printers.pickling import dotty.tools.tasty.TastyFormat.* +import dotty.tools.tasty.besteffort.BestEffortTastyFormat.ERRORtype import scala.annotation.constructorOnly import scala.annotation.internal.sharable @@ -53,12 +54,14 @@ import scala.compiletime.uninitialized * @param posUnpicklerOpt the unpickler for positions, if it exists * @param commentUnpicklerOpt the unpickler for comments, if it exists * @param attributeUnpicklerOpt the unpickler for attributes, if it exists + * @param isBestEffortTasty decides whether to unpickle as a Best Effort TASTy */ class TreeUnpickler(reader: TastyReader, nameAtRef: NameTable, compilationUnitInfo: CompilationUnitInfo, posUnpicklerOpt: Option[PositionUnpickler], - commentUnpicklerOpt: Option[CommentUnpickler]) { + commentUnpicklerOpt: Option[CommentUnpickler], + isBestEffortTasty: Boolean = false) { import TreeUnpickler.* import tpd.* @@ -444,6 +447,9 @@ class TreeUnpickler(reader: TastyReader, } case FLEXIBLEtype => FlexibleType(readType()) + case _ if isBestEffortTasty => + goto(end) + new PreviousErrorType } assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") result @@ -491,6 +497,9 @@ class TreeUnpickler(reader: TastyReader, typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) case BYNAMEtype => ExprType(readType()) + case ERRORtype => + if isBestEffortTasty then new PreviousErrorType + else throw new Error(s"Illegal ERRORtype in non Best Effort TASTy file") case _ => ConstantType(readConstant(tag)) } @@ -946,6 +955,7 @@ class TreeUnpickler(reader: TastyReader, val rhs = readTpt()(using localCtx) sym.info = new NoCompleter: + override def complete(denot: SymDenotation)(using Context): Unit = if !isBestEffortTasty then unsupported("complete") override def completerTypeParams(sym: Symbol)(using Context) = rhs.tpe.typeParams @@ -1021,8 +1031,14 @@ class TreeUnpickler(reader: TastyReader, case nu: New => try nu.tpe finally goto(end) + case other if isBestEffortTasty => + try other.tpe + finally goto(end) case SHAREDterm => forkAt(readAddr()).readParentType() + case SELECT if isBestEffortTasty => + goto(readEnd()) + new PreviousErrorType /** Read template parents * @param withArgs if false, only read enough of parent trees to determine their type @@ -1246,6 +1262,7 @@ class TreeUnpickler(reader: TastyReader, case path: TermRef => ref(path) case path: ThisType => untpd.This(untpd.EmptyTypeIdent).withType(path) case path: ConstantType => Literal(path.value) + case path: ErrorType if isBestEffortTasty => TypeTree(path) } } diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index 8ad9afb7d512..d01f60571601 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -27,6 +27,8 @@ class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { .map(e => e.stripSuffix(".tasty").replace("/", ".")) .toList case FileExtension.Tasty => TastyFileUtil.getClassName(file) + case FileExtension.Betasty if ctx.withBestEffortTasty => + TastyFileUtil.getClassName(file, withBestEffortTasty = true) case _ => report.error(em"File extension is not `tasty` or `jar`: ${file.path}") Nil diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala index d3a9550c4491..b1277accc621 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala @@ -7,6 +7,7 @@ import dotty.tools.dotc.core.tasty.TastyClassName import dotty.tools.dotc.core.StdNames.nme.EMPTY_PACKAGE import dotty.tools.io.AbstractFile import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension +import dotty.tools.dotc.classpath.FileUtils.hasBetastyExtension object TastyFileUtil { /** Get the class path of a tasty file @@ -18,9 +19,10 @@ object TastyFileUtil { * ``` * then `getClassName("./out/foo/Foo.tasty") returns `Some("./out")` */ - def getClassPath(file: AbstractFile): Option[String] = - getClassName(file).map { className => - val classInPath = className.replace(".", java.io.File.separator) + ".tasty" + def getClassPath(file: AbstractFile, fromBestEffortTasty: Boolean = false): Option[String] = + getClassName(file, fromBestEffortTasty).map { className => + val extension = if (fromBestEffortTasty) then ".betasty" else ".tasty" + val classInPath = className.replace(".", java.io.File.separator) + extension file.path.replace(classInPath, "") } @@ -33,11 +35,11 @@ object TastyFileUtil { * ``` * then `getClassName("./out/foo/Foo.tasty") returns `Some("foo.Foo")` */ - def getClassName(file: AbstractFile): Option[String] = { + def getClassName(file: AbstractFile, withBestEffortTasty: Boolean = false): Option[String] = { assert(file.exists) - assert(file.hasTastyExtension) + assert(file.hasTastyExtension || (withBestEffortTasty && file.hasBetastyExtension)) val bytes = file.toByteArray - val names = new TastyClassName(bytes).readName() + val names = new TastyClassName(bytes, file.hasBetastyExtension).readName() names.map { case (packageName, className) => val fullName = packageName match { case EMPTY_PACKAGE => s"${className.lastPart}" diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 1a884f2bd10b..629bc2ed3b16 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -834,7 +834,7 @@ class Inliner(val call: tpd.Tree)(using Context): override def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = super.typedSplice(tree, pt) match - case tree1 @ Splice(expr) if level == 0 && !hasInliningErrors => + case tree1 @ Splice(expr) if level == 0 && !hasInliningErrors && !ctx.usedBestEffortTasty => val expanded = expandMacro(expr, tree1.srcPos) transform.TreeChecker.checkMacroGeneratedTree(tree1, expanded) typedExpr(expanded) // Inline calls and constant fold code generated by the macro diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 8ebd1f6973f2..6d6e2ff01ad4 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -217,7 +217,7 @@ object PickledQuotes { /** Pickle tree into it's TASTY bytes s*/ private def pickle(tree: Tree)(using Context): Array[Byte] = { quotePickling.println(i"**** pickling quote of\n$tree") - val pickler = new TastyPickler(defn.RootClass) + val pickler = new TastyPickler(defn.RootClass, isBestEffortTasty = false) val treePkl = new TreePickler(pickler, Attributes.empty) treePkl.pickle(tree :: Nil) treePkl.compactify() @@ -229,7 +229,7 @@ object PickledQuotes { positionWarnings.foreach(report.warning(_)) val pickled = pickler.assembleParts() - quotePickling.println(s"**** pickled quote\n${TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")}") + quotePickling.println(s"**** pickled quote\n${TastyPrinter.showContents(pickled, ctx.settings.color.value == "never", isBestEffortTasty = false)}") pickled } @@ -266,10 +266,10 @@ object PickledQuotes { inContext(unpicklingContext) { - quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never")}") + quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never", isBestEffortTasty = false)}") val mode = if (isType) UnpickleMode.TypeTree else UnpickleMode.Term - val unpickler = new DottyUnpickler(NoAbstractFile, bytes, mode) + val unpickler = new DottyUnpickler(NoAbstractFile, bytes, isBestEffortTasty = false, mode) unpickler.enter(Set.empty) val tree = unpickler.tree diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index a63b6569fefe..10b0023992fe 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -81,6 +81,22 @@ object report: if ctx.settings.YdebugError.value then Thread.dumpStack() if ctx.settings.YdebugTypeError.value then ex.printStackTrace() + def bestEffortError(ex: Throwable, msg: String)(using Context): Unit = + val stackTrace = + Option(ex.getStackTrace()).map { st => + if st.nn.isEmpty then "" + else s"Stack trace: \n ${st.nn.mkString("\n ")}".stripMargin + }.getOrElse("") + // Build tools and dotty's test framework may check precisely for + // "Unsuccessful best-effort compilation." error text. + val fullMsg = + em"""Unsuccessful best-effort compilation. + |${msg} + |Cause: + | ${ex.toString.replace("\n", "\n ")} + |${stackTrace}""" + ctx.reporter.report(new Error(fullMsg, NoSourcePosition)) + def errorOrMigrationWarning(msg: Message, pos: SrcPos, migrationVersion: MigrationVersion)(using Context): Unit = if sourceVersion.isAtLeast(migrationVersion.errorFrom) then if !sourceVersion.isMigrating then error(msg, pos) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 77eef4564bbf..357202229e50 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -56,7 +56,7 @@ class ExtractSemanticDB private (phaseMode: ExtractSemanticDB.PhaseMode) extends override def isRunnable(using Context) = import ExtractSemanticDB.{semanticdbTarget, outputDirectory} def writesToOutputJar = semanticdbTarget.isEmpty && outputDirectory.isInstanceOf[JarArchive] - super.isRunnable && ctx.settings.Xsemanticdb.value && !writesToOutputJar + (super.isRunnable || ctx.isBestEffort) && ctx.settings.Xsemanticdb.value && !writesToOutputJar // Check not needed since it does not transform trees override def isCheckable: Boolean = false diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala index 887a962f7a65..137fbf4f837c 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -13,6 +13,8 @@ abstract class MacroTransform extends Phase { import ast.tpd.* + override def isRunnable(using Context) = super.isRunnable && !ctx.usedBestEffortTasty + override def run(using Context): Unit = { val unit = ctx.compilationUnit unit.tpdTree = atPhase(transformPhase)(newTransformer.transform(unit.tpdTree)) diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala index 252babe7058f..86acd009fd09 100644 --- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala @@ -136,6 +136,8 @@ object MegaPhase { override def run(using Context): Unit = singletonGroup.run + + override def isRunnable(using Context): Boolean = super.isRunnable && !ctx.usedBestEffortTasty } } import MegaPhase.* @@ -164,6 +166,8 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { relaxedTypingCache } + override def isRunnable(using Context): Boolean = super.isRunnable && !ctx.usedBestEffortTasty + private val cpy: TypedTreeCopier = cpyBetweenPhases /** Transform node using all phases in this group that have idxInGroup >= start */ diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 6fe687072828..8bb396ca4081 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -32,6 +32,7 @@ import dotty.tools.dotc.sbt.asyncZincPhasesCompleted import scala.concurrent.ExecutionContext import scala.util.control.NonFatal import java.util.concurrent.atomic.AtomicBoolean +import java.nio.file.Files object Pickler { val name: String = "pickler" @@ -191,7 +192,11 @@ class Pickler extends Phase { // No need to repickle trees coming from TASTY, however in the case that we need to write TASTy to early-output, // then we need to run this phase to send the tasty from compilation units to the early-output. override def isRunnable(using Context): Boolean = - super.isRunnable && (!ctx.settings.fromTasty.value || doAsyncTasty) + (super.isRunnable || ctx.isBestEffort) + && (!ctx.settings.fromTasty.value || doAsyncTasty) + && (!ctx.usedBestEffortTasty || ctx.isBestEffort) + // we do not want to pickle `.betasty` if do not plan to actually create the + // betasty file (as signified by the -Ybest-effort option) // when `-Yjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false @@ -238,7 +243,8 @@ class Pickler extends Phase { private val executor = Executor[Array[Byte]]() - private def useExecutor(using Context) = Pickler.ParallelPickling && !ctx.settings.YtestPickler.value + private def useExecutor(using Context) = + Pickler.ParallelPickling && !ctx.isBestEffort && !ctx.settings.YtestPickler.value private def printerContext(isOutline: Boolean)(using Context): Context = if isOutline then ctx.fresh.setPrinterFn(OutlinePrinter(_)) @@ -257,6 +263,7 @@ class Pickler extends Phase { override def run(using Context): Unit = { val unit = ctx.compilationUnit + val isBestEffort = ctx.reporter.errorsReported || ctx.usedBestEffortTasty pickling.println(i"unpickling in run ${ctx.runId}") if ctx.settings.fromTasty.value then @@ -292,9 +299,16 @@ class Pickler extends Phase { isOutline = isOutline ) - val pickler = new TastyPickler(cls) + val pickler = new TastyPickler(cls, isBestEffortTasty = isBestEffort) val treePkl = new TreePickler(pickler, attributes) - treePkl.pickle(tree :: Nil) + val successful = + try + treePkl.pickle(tree :: Nil) + true + catch + case NonFatal(ex) if ctx.isBestEffort => + report.bestEffortError(ex, "Some best-effort tasty files will not be generated.") + false Profile.current.recordTasty(treePkl.buf.length) val positionWarnings = new mutable.ListBuffer[Message]() @@ -329,7 +343,7 @@ class Pickler extends Phase { // println(i"rawBytes = \n$rawBytes%\n%") // DEBUG if ctx.settings.YprintTasty.value || pickling != noPrinter then println(i"**** pickled info of $cls") - println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) + println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never", isBestEffortTasty = false)) println(i"**** end of pickled info of $cls") if fastDoAsyncTasty then @@ -339,26 +353,27 @@ class Pickler extends Phase { } } - /** A function that returns the pickled bytes. Depending on `Pickler.ParallelPickling` - * either computes the pickled data in a future or eagerly before constructing the - * function value. - */ - val demandPickled: () => Array[Byte] = - if useExecutor then - val futurePickled = executor.schedule(computePickled) - () => - try futurePickled.force.get - finally reportPositionWarnings() - else - val pickled = computePickled() - reportPositionWarnings() - if ctx.settings.YtestPickler.value then - pickledBytes(cls) = (unit, pickled) - if ctx.settings.YtestPicklerCheck.value then - printedTasty(cls) = TastyPrinter.showContents(pickled, noColor = true, testPickler = true) - () => pickled - - unit.pickled += (cls -> demandPickled) + if successful then + /** A function that returns the pickled bytes. Depending on `Pickler.ParallelPickling` + * either computes the pickled data in a future or eagerly before constructing the + * function value. + */ + val demandPickled: () => Array[Byte] = + if useExecutor then + val futurePickled = executor.schedule(computePickled) + () => + try futurePickled.force.get + finally reportPositionWarnings() + else + val pickled = computePickled() + reportPositionWarnings() + if ctx.settings.YtestPickler.value then + pickledBytes(cls) = (unit, pickled) + if ctx.settings.YtestPicklerCheck.value then + printedTasty(cls) = TastyPrinter.showContents(pickled, noColor = true, isBestEffortTasty = false, testPickler = true) + () => pickled + + unit.pickled += (cls -> demandPickled) end for } @@ -396,6 +411,13 @@ class Pickler extends Phase { .setReporter(new ThrowingReporter(ctx.reporter)) .addMode(Mode.ReadPositions) ) + if ctx.isBestEffort then + val outpath = + ctx.settings.outputDir.value.jpath.toAbsolutePath.nn.normalize.nn + .resolve("META-INF").nn + .resolve("best-effort") + Files.createDirectories(outpath) + BestEffortTastyWriter.write(outpath.nn, result) result } @@ -405,7 +427,7 @@ class Pickler extends Phase { val resolveCheck = ctx.settings.YtestPicklerCheck.value val unpicklers = for ((cls, (unit, bytes)) <- pickledBytes) yield { - val unpickler = new DottyUnpickler(unit.source.file, bytes) + val unpickler = new DottyUnpickler(unit.source.file, bytes, isBestEffortTasty = false) unpickler.enter(roots = Set.empty) val optCheck = if resolveCheck then diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 612bd22ef19d..9d0150f49a1f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4322,6 +4322,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * tree that went unreported. A scenario where this happens is i1802.scala. */ def ensureReported(tp: Type) = tp match { + case err: PreviousErrorType if ctx.usedBestEffortTasty => // do nothing if error was already reported in previous compilation case err: ErrorType if !ctx.reporter.errorsReported => report.error(err.msg, tree.srcPos) case _ => } diff --git a/compiler/src/dotty/tools/io/FileExtension.scala b/compiler/src/dotty/tools/io/FileExtension.scala index 9d239477aed3..3aeef5b902ce 100644 --- a/compiler/src/dotty/tools/io/FileExtension.scala +++ b/compiler/src/dotty/tools/io/FileExtension.scala @@ -5,6 +5,7 @@ import dotty.tools.dotc.util.EnumFlags.FlagSet enum FileExtension(val toLowerCase: String): case Tasty extends FileExtension("tasty") + case Betasty extends FileExtension("betasty") case Class extends FileExtension("class") case Jar extends FileExtension("jar") case Scala extends FileExtension("scala") @@ -24,6 +25,8 @@ enum FileExtension(val toLowerCase: String): /** represents `".tasty"` */ def isTasty = this == Tasty + /** represents `".betasty"` */ + def isBetasty = this == Betasty /** represents `".class"` */ def isClass = this == Class /** represents `".scala"` */ @@ -60,6 +63,7 @@ object FileExtension: case "java" => Java case "zip" => Zip case "inc" => Inc + case "betasty" => Betasty case _ => slowLookup(s) // slower than initialLookup, keep in sync with initialLookup @@ -72,6 +76,7 @@ object FileExtension: else if s.equalsIgnoreCase("java") then Java else if s.equalsIgnoreCase("zip") then Zip else if s.equalsIgnoreCase("inc") then Inc + else if s.equalsIgnoreCase("betasty") then Betasty else External(s) def from(s: String): FileExtension = diff --git a/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyFormat.scala b/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyFormat.scala new file mode 100644 index 000000000000..99a24ce5f346 --- /dev/null +++ b/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyFormat.scala @@ -0,0 +1,45 @@ +package dotty.tools.tasty.besteffort + +import dotty.tools.tasty.TastyFormat + +/************************************************************************************* +Best Effort TASTy (.betasty) format extends the TASTy grammar with additional +terminal symbols and productions. Grammar notation is kept from the regular TASTy. +However, the lowercase prefixes describing the semantics (but otherwise not affecting +the grammar) may not always hold. + +The following are the added terminal Symbols to the grammar: + * `ERRORtype` - representing an error from a previous compilation + +The following are the added productions to the grammar: + + Standard-Section: "ASTs" +```none + Type = ERRORtype + Path = ERRORtype +``` +**************************************************************************************/ +object BestEffortTastyFormat { + export TastyFormat.{astTagToString => _, *} + + /** First four bytes of a best effort TASTy file, used instead of the regular header. + * Signifies that the TASTy can only be consumed by the compiler in the best effort mode. + * Other than that, versioning works as usual, disallowing Best Effort Tasty from older minor versions. + */ + final val bestEffortHeader: Array[Int] = Array(0x5C, 0xA1, 0xAB, 0x20) + + /** Natural number. Along with MajorVersion, MinorVersion and ExperimentalVersion + * numbers specifies the Best Effort TASTy format. For now, Best Effort TASTy holds + * no compatibility guarantees, making this a reserved space for when this would have + * to be changed. + */ + final val PatchVersion: Int = 0 + + // added AST tag - Best Effort TASTy only + final val ERRORtype = 50 + + def astTagToString(tag: Int) = tag match { + case ERRORtype => "ERRORtype" + case _ => TastyFormat.astTagToString(tag) + } +} diff --git a/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyHeaderUnpickler.scala b/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyHeaderUnpickler.scala new file mode 100644 index 000000000000..4325f55be4a7 --- /dev/null +++ b/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyHeaderUnpickler.scala @@ -0,0 +1,175 @@ +package dotty.tools.tasty.besteffort + +import java.util.UUID + +import BestEffortTastyFormat.{MajorVersion, MinorVersion, ExperimentalVersion, bestEffortHeader, header} +import dotty.tools.tasty.{UnpicklerConfig, TastyHeaderUnpickler, TastyReader, UnpickleException, TastyFormat, TastyVersion} + +/** + * The Best Effort Tasty Header consists of six fields: + * - uuid + * - contains a hash of the sections of the Best Effort TASTy file + * - majorVersion + * - matching the TASTy format version that last broke backwards compatibility + * - minorVersion + * - matching the TASTy format version that last broke forward compatibility + * - patchVersion + * - specyfing the best effort TASTy version. Currently unused, kept as a reserved space. + * Empty if it was serialized as a regular TASTy file with reagular tasty header. + * - experimentalVersion + * - 0 for final compiler version + * - positive for between minor versions and forward compatibility + * is broken since the previous stable version. + * - toolingVersion + * - arbitrary string representing the tooling that produced the Best Effort TASTy + */ +sealed abstract case class BestEffortTastyHeader( + uuid: UUID, + majorVersion: Int, + minorVersion: Int, + patchVersion: Option[Int], + experimentalVersion: Int, + toolingVersion: String +) + +class BestEffortTastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { + import BestEffortTastyHeaderUnpickler._ + import reader._ + + def this(reader: TastyReader) = this(UnpicklerConfig.generic, reader) + def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) + + def readHeader(): UUID = + readFullHeader().uuid + + def readFullHeader(): BestEffortTastyHeader = { + val hasBestEffortHeader = { + val readHeader = (for (i <- 0 until header.length) yield readByte()).toArray + + if (readHeader.sameElements(header)) false + else if (readHeader.sameElements(bestEffortHeader)) true + else throw new UnpickleException("not a TASTy or Best Effort TASTy file") + } + + val fileMajor = readNat() + val fileMinor = readNat() + val filePatch = + if hasBestEffortHeader then Some(readNat()) + else None + val fileExperimental = readNat() + val toolingVersion = { + val length = readNat() + val start = currentAddr + val end = start + length + goto(end) + new String(bytes, start.index, length) + } + + checkValidVersion(fileMajor, fileMinor, fileExperimental, toolingVersion, config) + + val uuid = new UUID(readUncompressedLong(), readUncompressedLong()) + new BestEffortTastyHeader(uuid, fileMajor, fileMinor, filePatch, fileExperimental, toolingVersion) {} + } + + private def check(cond: Boolean, msg: => String): Unit = { + if (!cond) throw new UnpickleException(msg) + } +} + +// Copy pasted from dotty.tools.tasty.TastyHeaderUnpickler +// Since that library has strong compatibility guarantees, we do not want +// to add any more methods just to support an experimental feature +// (like best-effort compilation options). +object BestEffortTastyHeaderUnpickler { + + private def check(cond: Boolean, msg: => String): Unit = { + if (!cond) throw new UnpickleException(msg) + } + + private def checkValidVersion(fileMajor: Int, fileMinor: Int, fileExperimental: Int, toolingVersion: String, config: UnpicklerConfig) = { + val toolMajor: Int = config.majorVersion + val toolMinor: Int = config.minorVersion + val toolExperimental: Int = config.experimentalVersion + val validVersion = TastyFormat.isVersionCompatible( + fileMajor = fileMajor, + fileMinor = fileMinor, + fileExperimental = fileExperimental, + compilerMajor = toolMajor, + compilerMinor = toolMinor, + compilerExperimental = toolExperimental + ) + check(validVersion, { + // failure means that the TASTy file cannot be read, therefore it is either: + // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor + // version supported by this compiler + // - any experimental in an older minor, in which case the library should be recompiled by the stable + // compiler in the same minor. + // - older experimental in the same minor, in which case the compiler is also experimental, and the library + // should be recompiled by the current compiler + // - forward incompatible, in which case the compiler must be upgraded to the same version as the file. + val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental) + val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) + + val compat = Compatibility.failReason(file = fileVersion, read = toolVersion) + + val what = if (compat < 0) "Backward" else "Forward" + val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion)) + val fix = ( + if (compat < 0) { + val newCompiler = + if (compat == Compatibility.BackwardIncompatibleMajor) toolVersion.minStable + else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable + else toolVersion // recompile the experimental library with the current experimental compiler + recompileFix(newCompiler, config) + } + else upgradeFix(fileVersion, config) + ) + signature + fix + tastyAddendum + }) + } + + private def signatureString( + fileVersion: TastyVersion, toolVersion: TastyVersion, what: String, tool: Option[String]) = { + val optProducedBy = tool.fold("")(t => s", produced by $t") + s"""$what incompatible TASTy file has version ${fileVersion.show}$optProducedBy, + | expected ${toolVersion.validRange}. + |""".stripMargin + } + + private def recompileFix(producerVersion: TastyVersion, config: UnpicklerConfig) = { + val addendum = config.recompileAdditionalInfo + val newTool = config.upgradedProducerTool(producerVersion) + s""" The source of this file should be recompiled by $newTool.$addendum""".stripMargin + } + + private def upgradeFix(fileVersion: TastyVersion, config: UnpicklerConfig) = { + val addendum = config.upgradeAdditionalInfo(fileVersion) + val newTool = config.upgradedReaderTool(fileVersion) + s""" To read this ${fileVersion.kind} file, use $newTool.$addendum""".stripMargin + } + + private def tastyAddendum: String = """ + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + + private object Compatibility { + final val BackwardIncompatibleMajor = -3 + final val BackwardIncompatibleExperimental = -2 + final val ExperimentalRecompile = -1 + final val ExperimentalUpgrade = 1 + final val ForwardIncompatible = 2 + + /** Given that file can't be read, extract the reason */ + def failReason(file: TastyVersion, read: TastyVersion): Int = + if (file.major == read.major && file.minor == read.minor && file.isExperimental && read.isExperimental) { + if (file.experimental < read.experimental) ExperimentalRecompile // recompile library as compiler is too new + else ExperimentalUpgrade // they should upgrade compiler as library is too new + } + else if (file.major < read.major) + BackwardIncompatibleMajor // pre 3.0.0 + else if (file.isExperimental && file.major == read.major && file.minor <= read.minor) + // e.g. 3.4.0 reading 3.4.0-RC1-NIGHTLY, or 3.3.0 reading 3.0.2-RC1-NIGHTLY + BackwardIncompatibleExperimental + else ForwardIncompatible + } +} diff --git a/compiler/test/dotc/neg-best-effort-pickling.blacklist b/compiler/test/dotc/neg-best-effort-pickling.blacklist new file mode 100644 index 000000000000..ff02be107a8a --- /dev/null +++ b/compiler/test/dotc/neg-best-effort-pickling.blacklist @@ -0,0 +1,19 @@ +export-in-extension.scala +i12456.scala +i8623.scala +i1642.scala +i16696.scala +constructor-proxy-values.scala +i9328.scala +i15414.scala +i6796.scala +i14013.scala +toplevel-cyclic +curried-dependent-ift.scala +i17121.scala +illegal-match-types.scala +i13780-1.scala + +# semantic db generation fails in the first compilation +i1642.scala +i15158.scala diff --git a/compiler/test/dotc/neg-best-effort-unpickling.blacklist b/compiler/test/dotc/neg-best-effort-unpickling.blacklist new file mode 100644 index 000000000000..1e22d919f25a --- /dev/null +++ b/compiler/test/dotc/neg-best-effort-unpickling.blacklist @@ -0,0 +1,17 @@ +# cyclic reference crashes +i4368.scala +i827.scala +cycles.scala +i5332.scala +i4369c.scala +i1806.scala +i0091-infpaths.scala +exports.scala +i14834.scala + +# other type related crashes +i4653.scala +overrideClass.scala + +# repeating on a top level type definition +i18750.scala diff --git a/compiler/test/dotty/tools/TestSources.scala b/compiler/test/dotty/tools/TestSources.scala index a288e49c5eb9..b2133b2fb182 100644 --- a/compiler/test/dotty/tools/TestSources.scala +++ b/compiler/test/dotty/tools/TestSources.scala @@ -64,6 +64,14 @@ object TestSources { if Properties.usingScalaLibraryTasty then loadList(patmatExhaustivityScala2LibraryTastyBlacklistFile) else Nil + // neg best effort tests lists + + def negBestEffortPicklingBlacklistFile: String = "compiler/test/dotc/neg-best-effort-pickling.blacklist" + def negBestEffortUnpicklingBlacklistFile: String = "compiler/test/dotc/neg-best-effort-unpickling.blacklist" + + def negBestEffortPicklingBlacklisted: List[String] = loadList(negBestEffortPicklingBlacklistFile) + def negBestEffortUnpicklingBlacklisted: List[String] = loadList(negBestEffortUnpicklingBlacklistFile) + // load lists private def loadList(path: String): List[String] = { diff --git a/compiler/test/dotty/tools/dotc/BestEffortCompilationTests.scala b/compiler/test/dotty/tools/dotc/BestEffortCompilationTests.scala new file mode 100644 index 000000000000..681c92f266d2 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/BestEffortCompilationTests.scala @@ -0,0 +1,59 @@ +package dotty +package tools +package dotc + +import scala.concurrent.duration._ +import dotty.tools.vulpix._ +import org.junit.{ Test, AfterClass } +import reporting.TestReporter +import java.io.{File => JFile} + +import scala.language.unsafeNulls + +class BestEffortCompilationTests { + import ParallelTesting._ + import vulpix.TestConfiguration._ + import BestEffortCompilationTests._ + import CompilationTest.aggregateTests + + // Since TASTy and beTASTy files are read in a lazy manner (only when referenced by the source .scala file) + // we test by using the "-from-tasty" option. This guarantees that the tasty files will be read + // (and that the Best Effort TASTy reader will be tested), but we unfortunately skip the useful + // interactions a tree derived from beTASTy could have with other frontend phases. + @Test def negTestFromBestEffortTasty: Unit = { + // Can be reproduced with + // > sbt + // > scalac --Ybest-effort -Xsemanticdb + // > scalac --from-tasty -Ywith-best-effort-tasty META_INF/best-effort/ + + implicit val testGroup: TestGroup = TestGroup("negTestFromBestEffortTasty") + compileBestEffortTastyInDir(s"tests${JFile.separator}neg", bestEffortBaselineOptions, + picklingFilter = FileFilter.exclude(TestSources.negBestEffortPicklingBlacklisted), + unpicklingFilter = FileFilter.exclude(TestSources.negBestEffortUnpicklingBlacklisted) + ).checkNoCrash() + } + + // Tests an actual use case of this compilation mode, where symbol definitions of the downstream + // projects depend on the best effort tasty files generated with the Best Effort dir option + @Test def bestEffortIntergrationTest: Unit = { + implicit val testGroup: TestGroup = TestGroup("bestEffortIntegrationTests") + compileBestEffortIntegration(s"tests${JFile.separator}best-effort", bestEffortBaselineOptions) + .noCrashWithCompilingDependencies() + } +} + +object BestEffortCompilationTests extends ParallelTesting { + def maxDuration = 45.seconds + def numberOfSlaves = Runtime.getRuntime.availableProcessors() + def safeMode = Properties.testsSafeMode + def isInteractive = SummaryReport.isInteractive + def testFilter = Properties.testsFilter + def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests + + implicit val summaryReport: SummaryReporting = new SummaryReport + @AfterClass def tearDown(): Unit = { + super.cleanup() + summaryReport.echoSummary() + } +} diff --git a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala index db58ff36ac42..11406070ce7a 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala @@ -117,7 +117,7 @@ class CommentPicklingTest { implicit val ctx: Context = setup(args, initCtx).map(_._2).getOrElse(initCtx) ctx.initialize() val trees = files.flatMap { f => - val unpickler = new DottyUnpickler(AbstractFile.getFile(f.jpath), f.toByteArray()) + val unpickler = new DottyUnpickler(AbstractFile.getFile(f.jpath), f.toByteArray(), isBestEffortTasty = false) unpickler.enter(roots = Set.empty) unpickler.rootTrees(using ctx) } diff --git a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala index 66463e3ff66c..326a2dc87b2a 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala @@ -48,7 +48,7 @@ class PathPicklingTest { val jar = JarArchive.open(Path(s"$out/out.jar"), create = false) try for file <- jar.iterator() if file.name.endsWith(".tasty") do - sb.append(TastyPrinter.showContents(file.toByteArray, noColor = true)) + sb.append(TastyPrinter.showContents(file.toByteArray, noColor = true, isBestEffortTasty = false)) finally jar.close() sb.toString() diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index e9975ed25b6d..880a3bd1cc53 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -158,6 +158,12 @@ trait ParallelTesting extends RunnerOrchestration { self => } } + private sealed trait FromTastyCompilationMode + private case object NotFromTasty extends FromTastyCompilationMode + private case object FromTasty extends FromTastyCompilationMode + private case object FromBestEffortTasty extends FromTastyCompilationMode + private case class WithBestEffortTasty(bestEffortDir: JFile) extends FromTastyCompilationMode + /** A group of files that may all be compiled together, with the same flags * and output directory */ @@ -166,7 +172,7 @@ trait ParallelTesting extends RunnerOrchestration { self => files: Array[JFile], flags: TestFlags, outDir: JFile, - fromTasty: Boolean = false, + fromTasty: FromTastyCompilationMode = NotFromTasty, decompilation: Boolean = false ) extends TestSource { def sourceFiles: Array[JFile] = files.filter(isSourceFile) @@ -225,9 +231,11 @@ trait ParallelTesting extends RunnerOrchestration { self => private final def compileTestSource(testSource: TestSource): Try[List[TestReporter]] = Try(testSource match { case testSource @ JointCompilationSource(name, files, flags, outDir, fromTasty, decompilation) => - val reporter = - if (fromTasty) compileFromTasty(flags, outDir) - else compile(testSource.sourceFiles, flags, outDir) + val reporter = fromTasty match + case NotFromTasty => compile(testSource.sourceFiles, flags, outDir) + case FromTasty => compileFromTasty(flags, outDir) + case FromBestEffortTasty => compileFromBestEffortTasty(flags, outDir) + case WithBestEffortTasty(bestEffortDir) => compileWithBestEffortTasty(testSource.sourceFiles, bestEffortDir, flags, outDir) List(reporter) case testSource @ SeparateCompilationSource(_, dir, flags, outDir) => @@ -665,6 +673,31 @@ trait ParallelTesting extends RunnerOrchestration { self => reporter + protected def compileFromBestEffortTasty(flags0: TestFlags, targetDir: JFile): TestReporter = { + val classes = flattenFiles(targetDir).filter(isBestEffortTastyFile).map(_.toString) + val flags = flags0 and "-from-tasty" and "-Ywith-best-effort-tasty" + val reporter = mkReporter + val driver = new Driver + + driver.process(flags.all ++ classes, reporter = reporter) + + reporter + } + + protected def compileWithBestEffortTasty(files0: Array[JFile], bestEffortDir: JFile, flags0: TestFlags, targetDir: JFile): TestReporter = { + val flags = flags0 + .and("-Ywith-best-effort-tasty") + .and("-d", targetDir.getPath) + val reporter = mkReporter + val driver = new Driver + + val args = Array("-classpath", flags.defaultClassPath + JFile.pathSeparator + bestEffortDir.toString) ++ flags.options + + driver.process(args ++ files0.map(_.toString), reporter = reporter) + + reporter + } + protected def compileFromTasty(flags0: TestFlags, targetDir: JFile): TestReporter = { val tastyOutput = new JFile(targetDir.getPath + "_from-tasty") tastyOutput.mkdir() @@ -988,6 +1021,22 @@ trait ParallelTesting extends RunnerOrchestration { self => override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = None } + private final class NoBestEffortErrorsTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit summaryReport: SummaryReporting) + extends Test(testSources, times, threadLimit, suppressAllOutput) { + override def suppressErrors = true + override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = + val unsucceffulBestEffortErrorMsg = "Unsuccessful best-effort compilation." + val failedBestEffortCompilation: Seq[TestReporter] = + reporters.collect{ + case testReporter if testReporter.errors.exists(_.msg.message.startsWith(unsucceffulBestEffortErrorMsg)) => + testReporter + } + if !failedBestEffortCompilation.isEmpty then + Some(failedBestEffortCompilation.flatMap(_.consoleOutput.split("\n")).mkString("\n")) + else + None + } + /** The `CompilationTest` is the main interface to `ParallelTesting`, it * can be instantiated via one of the following methods: @@ -1127,12 +1176,28 @@ trait ParallelTesting extends RunnerOrchestration { self => def checkWarnings()(implicit summaryReport: SummaryReporting): this.type = checkPass(new WarnTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput), "Warn") + /** Creates a "neg" test run, which makes sure that each test manages successful + * best-effort compilation, without any errors related to pickling/unpickling + * of betasty files. + */ + def checkNoBestEffortError()(implicit summaryReport: SummaryReporting): this.type = { + val test = new NoBestEffortErrorsTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite() + + cleanup() + + if (test.didFail) { + fail("Best-effort test should not have shown a \"Unsuccessful best-effort compilation\" error, but did") + } + + this + } + /** Creates a "neg" test run, which makes sure that each test generates the * correct number of errors at the correct positions. It also makes sure * that none of these tests crashes the compiler. */ def checkExpectedErrors()(implicit summaryReport: SummaryReporting): this.type = - val test = new NegTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite() + val test = new NegTest(targets, times, threadLimit, shouldSuppressOutput).executeTestSuite() cleanup() @@ -1504,7 +1569,7 @@ trait ParallelTesting extends RunnerOrchestration { self => flags: TestFlags, outDir: JFile, fromTasty: Boolean = false, - ) extends JointCompilationSource(name, Array(file), flags, outDir, fromTasty) { + ) extends JointCompilationSource(name, Array(file), flags, outDir, if (fromTasty) FromTasty else NotFromTasty) { override def buildInstructions(errors: Int, warnings: Int): String = { val runOrPos = if (file.getPath.startsWith(s"tests${JFile.separator}run${JFile.separator}")) "run" else "pos" @@ -1538,6 +1603,147 @@ trait ParallelTesting extends RunnerOrchestration { self => ) } + /** A two step compilation test for best effort compilation pickling and unpickling. + * + * First, erroring neg test files are compiled with the `-Ybest-effort` option. + * If successful, then the produced Best Effort TASTy is re-compiled with + * '-Ywith-best-effort-tasty' to test the TastyReader for Best Effort TASTy. + */ + def compileBestEffortTastyInDir(f: String, flags: TestFlags, picklingFilter: FileFilter, unpicklingFilter: FileFilter)( + implicit testGroup: TestGroup): BestEffortCompilationTest = { + val bestEffortFlag = "-Ybest-effort" + val semanticDbFlag = "-Xsemanticdb" + assert(!flags.options.contains(bestEffortFlag), "Best effort compilation flag should not be added manually") + + val outDir = defaultOutputDir + testGroup + JFile.separator + val sourceDir = new JFile(f) + checkRequirements(f, sourceDir, outDir) + + val (dirsStep1, filteredPicklingFiles) = compilationTargets(sourceDir, picklingFilter) + val (dirsStep2, filteredUnpicklingFiles) = compilationTargets(sourceDir, unpicklingFilter) + + class BestEffortCompilation( + name: String, + file: JFile, + flags: TestFlags, + outputDir: JFile + ) extends JointCompilationSource(name, Array(file), flags.and(bestEffortFlag).and(semanticDbFlag), outputDir) { + override def buildInstructions(errors: Int, warnings: Int): String = { + s"""| + |Test '$title' compiled with a compiler crash, + |the test can be reproduced by running: + | + | sbt "scalac $bestEffortFlag $semanticDbFlag $file" + | + |These tests can be disabled by adding `${file.getName}` to `compiler${JFile.separator}test${JFile.separator}dotc${JFile.separator}neg-best-effort-pickling.blacklist` + |""".stripMargin + } + } + + class CompilationFromBestEffortTasty( + name: String, + file: JFile, + flags: TestFlags, + bestEffortDir: JFile, + ) extends JointCompilationSource(name, Array(file), flags, bestEffortDir, fromTasty = FromBestEffortTasty) { + + override def buildInstructions(errors: Int, warnings: Int): String = { + def beTastyFiles(file: JFile): Array[JFile] = + file.listFiles.flatMap { innerFile => + if (innerFile.isDirectory) beTastyFiles(innerFile) + else if (isBestEffortTastyFile(innerFile)) Array(innerFile) + else Array.empty[JFile] + } + val beTastyFilesString = beTastyFiles(bestEffortDir).mkString(" ") + s"""| + |Test '$title' compiled with a compiler crash, + |the test can be reproduced by running: + | + | sbt "scalac -Ybest-effort $file" + | sbt "scalac --from-tasty -Ywith-best-effort-tasty $beTastyFilesString" + | + |These tests can be disabled by adding `${file.getName}` to `compiler${JFile.separator}test${JFile.separator}dotc${JFile.separator}neg-best-effort-unpickling.blacklist` + | + |""".stripMargin + } + } + + val (bestEffortTargets, targetAndBestEffortDirs) = + filteredPicklingFiles.map { f => + val outputDir = createOutputDirsForFile(f, sourceDir, outDir) + val bestEffortDir = new JFile(outputDir, s"META-INF${JFile.separator}best-effort") + ( + BestEffortCompilation(testGroup.name, f, flags, outputDir), + (f, bestEffortDir) + ) + }.unzip + val (_, bestEffortDirs) = targetAndBestEffortDirs.unzip + val fileToBestEffortDirMap = targetAndBestEffortDirs.toMap + + val picklingSet = filteredPicklingFiles.toSet + val fromTastyTargets = + filteredUnpicklingFiles.filter(picklingSet.contains(_)).map { f => + val bestEffortDir = fileToBestEffortDirMap(f) + new CompilationFromBestEffortTasty(testGroup.name, f, flags, bestEffortDir) + } + + new BestEffortCompilationTest( + new CompilationTest(bestEffortTargets).keepOutput, + new CompilationTest(fromTastyTargets).keepOutput, + bestEffortDirs, + shouldDelete = true + ) + } + + /** A two step integration test for best effort compilation. + * + * Directories found in the directory `f` represent separate tests and must contain + * the 'err' and 'main' directories. First the (erroring) contents of the 'err' + * directory are compiled with the `Ybest-effort` option. + * Then, are the contents of 'main' are compiled with the previous best effort directory + * on the classpath using the option `-Ywith-best-effort-tasty`. + */ + def compileBestEffortIntegration(f: String, flags: TestFlags)(implicit testGroup: TestGroup) = { + val bestEffortFlag = "-Ybest-effort" + val semanticDbFlag = "-Xsemanticdb" + val withBetastyFlag = "-Ywith-best-effort-tasty" + val sourceDir = new JFile(f) + val dirs = sourceDir.listFiles.toList + assert(dirs.forall(_.isDirectory), s"All files in $f have to be directories.") + + val (step1Targets, step2Targets, bestEffortDirs) = dirs.map { dir => + val step1SourceDir = new JFile(dir, "err") + val step2SourceDir = new JFile(dir, "main") + + val step1SourceFiles = step1SourceDir.listFiles + val step2SourceFiles = step2SourceDir.listFiles + + val outDir = defaultOutputDir + testGroup + JFile.separator + dir.getName().toString + JFile.separator + + val step1OutDir = createOutputDirsForDir(step1SourceDir, step1SourceDir, outDir) + val step2OutDir = createOutputDirsForDir(step2SourceDir, step2SourceDir, outDir) + + val step1Compilation = JointCompilationSource( + testGroup.name, step1SourceFiles, flags.and(bestEffortFlag).and(semanticDbFlag), step1OutDir, fromTasty = NotFromTasty + ) + + val bestEffortDir = new JFile(step1OutDir, s"META-INF${JFile.separator}best-effort") + + val step2Compilation = JointCompilationSource( + testGroup.name, step2SourceFiles, flags.and(withBetastyFlag).and(semanticDbFlag), step2OutDir, fromTasty = WithBestEffortTasty(bestEffortDir) + ) + (step1Compilation, step2Compilation, bestEffortDir) + }.unzip3 + + BestEffortCompilationTest( + new CompilationTest(step1Targets).keepOutput, + new CompilationTest(step2Targets).keepOutput, + bestEffortDirs, + true + ) + } + + class TastyCompilationTest(step1: CompilationTest, step2: CompilationTest, shouldDelete: Boolean)(implicit testGroup: TestGroup) { def keepOutput: TastyCompilationTest = @@ -1564,6 +1770,35 @@ trait ParallelTesting extends RunnerOrchestration { self => } } + class BestEffortCompilationTest(step1: CompilationTest, step2: CompilationTest, bestEffortDirs: List[JFile], shouldDelete: Boolean)(implicit testGroup: TestGroup) { + + def checkNoCrash()(implicit summaryReport: SummaryReporting): this.type = { + step1.checkNoBestEffortError() // Compile all files to generate the class files with best effort tasty + step2.checkNoBestEffortError() // Compile with best effort tasty + + if (shouldDelete) { + CompilationTest.aggregateTests(step1, step2).delete() + def delete(file: JFile): Unit = { + if (file.isDirectory) file.listFiles.foreach(delete) + try Files.delete(file.toPath) + catch { + case _: NoSuchFileException => // already deleted, everything's fine + } + } + bestEffortDirs.foreach(t => delete(t)) + } + + this + } + + def noCrashWithCompilingDependencies()(implicit summaryReport: SummaryReporting): this.type = { + step1.checkNoBestEffortError() // Compile all files to generate the class files with best effort tasty + step2.checkCompile() // Compile with best effort tasty + + this + } + } + /** This function behaves similar to `compileFilesInDir` but it ignores * sub-directories and as such, does **not** perform separate compilation * tests. @@ -1601,4 +1836,7 @@ object ParallelTesting { def isTastyFile(f: JFile): Boolean = f.getName.endsWith(".tasty") + def isBestEffortTastyFile(f: JFile): Boolean = + f.getName.endsWith(".betasty") + } diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala index f5540304da89..086d590fbfc7 100644 --- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala +++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala @@ -69,6 +69,7 @@ object TestConfiguration { val noYcheckCommonOptions = Array("-indent") ++ checkOptions ++ noCheckOptions val defaultOptions = TestFlags(basicClasspath, commonOptions) val noYcheckOptions = TestFlags(basicClasspath, noYcheckCommonOptions) + val bestEffortBaselineOptions = TestFlags(basicClasspath, noCheckOptions) val unindentOptions = TestFlags(basicClasspath, Array("-no-indent") ++ checkOptions ++ noCheckOptions ++ yCheckOptions) val withCompilerOptions = defaultOptions.withClasspath(withCompilerClasspath).withRunClasspath(withCompilerClasspath) diff --git a/docs/_docs/internals/best-effort-compilation.md b/docs/_docs/internals/best-effort-compilation.md new file mode 100644 index 000000000000..2fed951c3fd8 --- /dev/null +++ b/docs/_docs/internals/best-effort-compilation.md @@ -0,0 +1,88 @@ +--- +layout: doc-page +title: Best Effort Compilation +--- + +Best-effort compilation is a compilation mode introduced with the aim of improving IDE integration. It allows to generate +tasty-like artifacts and semanticdb files in erroring programs. + +It is composed of two experimental compiler options: +* `-Ybest-effort` produces Best Effort TASTy (`.betasty`) files to the `META-INF/best-effort` directory +* `-Ywith-best-effort-tasty` allows to read Best Effort TASTy files, and if such file is read from the classpath then +limits compilation to the frontend phases + +This feature aims to force through to the typer phase regardless of errors, and then serialize tasty-like files +obtained from the error trees into the best effort directory (`META-INF/best-effort`) and also serialize semanticdb as normal. + +The exact execution pattern is as follows: + +```none +Parser + │ + │ regardless of errors + ˅ +TyperPhase ────────────────────────────────────┐ + │ │ + │ │ + │ with errors │ no errors + │ │ + │ ˅ + │ Every following frontend pass until semanticdb.ExtractSemanticDB (interrupted in the case of errors) + │ │ + │ │ regardless of errors + ˅ ˅ +semanticdb.ExtractSemanticDB ──────────────────┐ + │ │ + │ with errors │ no errors + │ │ + │ ˅ + │ Every following frontend pass until Pickler (interrupted in the case of errors) + │ │ + │ │ regardless of errors + ˅ ˅ +Pickler (with added printing of best effort tasty to the best effort target directory) + │ │ + │ with errors │ no errors + ˅ ˅ +End compilation Execute latter passes +``` + +This is because the IDE is able to retrieve useful info even when skipping phases like PostTyper. + +This execution structure where we skip phases depending on the errors found is motivated by the desire +to avoid additionally handling errored trees in as many phases as possible, therefore also decreasing +maintenance load. This way phases like PostTyper do not have to be continually adjusted to handle trees +with errors from typer and usually the IDE is able to retrieve enough information with just the typer phase. + +An unfortunate consequence of this structure is the fact that we lose access to phases allowing for incremental +compilation, which is something that could be adressed in the future. + +`-Ywith-best-effort-tasty` option allows reading Best Effort TASTy files from classpath. If such file is read, then +the compiler is disallowed from proceeding to any non-frontend phase. This is to be used either in combination with +`-Ybest-effort` option to produce Best Effort TASTy using failing dependencies, or in the Presentation Compiler +to access symbols derived from failing projects. + +## Best Effort TASTy format + +The Best Effort TASTy (`.betasty`) format is a file format produced by the compiler when the `-Ybest-effort` option +is used. It is characterised by a different header and an addition of the `ERRORtype` type, which represents errored types in +the compiler. The Best Effort TASTy format also extends the regular TASTy grammar to allow the handling of as +large amount of incorrect trees produced by the compiler as possible. The format is defined as part of the +`dotty.tools.besteffort.BestEffortTastyFormat` object. + +Since currently the format holds an experimental status, no compatibility rules are defined for now, and the specification +may change between the patch compiler versions, if need be. + +For performance reasons, if no errors are detected in the frontend phases, a betasty file mey be serialized in the format of +regular TASTy file, characterized by the use of Tasty header instead of Best Effort TASTy header in the `.betasty` file. + +## Testing + +The testing procedure reuses the `tests/neg` negative tests that are usually meant to produce errors. First they are compiled +with the `-Ybest-effort` option (testing the TreePickler for errored trees), then later, the tree is reconstructed using +the previously created Best Effort TASTy, with `-Yread-tasty` and `-Ywith-best-effort-tasty` options. This is to test the +TreeUnpickler for those Best Effort TASTy files. + +One of the goals of this feature is to keep the maintainance cost low, and to not let this feature hinder the pace of the +overall development of the compiler. Because of that, the tests can be freely disabled in `compiler/neg-best-effort.blacklist` +(testing TreePickler) and `compiler/neg-best-effort-from-tasty.blacklist` (testing TreeUnpickler). diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 65d7ac2f9ee4..a0011b026cef 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -216,5 +216,6 @@ subsection: - page: internals/debug-macros.md - page: internals/gadts.md - page: internals/coverage.md + - page: internals/best-effort-compilation.md - page: release-notes-0.1.2.md hidden: true diff --git a/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala b/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala index 747f104cfede..62a947aeb50b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala @@ -21,7 +21,7 @@ object TastyUtils: private def normalTasty(tastyURI: URI): String = val tastyBytes = Files.readAllBytes(Paths.get(tastyURI)) - new TastyPrinter(tastyBytes.nn).showContents() + new TastyPrinter(tastyBytes.nn, isBestEffortTasty = false, testPickler = false).showContents() private def htmlTasty( tastyURI: URI, diff --git a/project/Build.scala b/project/Build.scala index 69441d0aaa01..cfffda810f75 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -394,6 +394,7 @@ object Build { "-skip-by-id:scala.runtime.MatchCase", "-skip-by-id:dotty.tools.tasty", "-skip-by-id:dotty.tools.tasty.util", + "-skip-by-id:dotty.tools.tasty.besteffort", "-project-footer", s"Copyright (c) 2002-$currentYear, LAMP/EPFL", "-author", "-groups", @@ -2287,6 +2288,7 @@ object ScaladocConfigs { "scala.runtime.MatchCase", "dotty.tools.tasty", "dotty.tools.tasty.util", + "dotty.tools.tasty.besteffort" )) def projectFooter = ProjectFooter(s"Copyright (c) 2002-$currentYear, LAMP/EPFL") def defaultTemplate = DefaultTemplate("static-site-main") diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala index a51541192321..78c5c0ba72b9 100644 --- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala +++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala @@ -103,7 +103,7 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { val fileVersion = TastyVersion(fileMajor, fileMinor, 0) val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) val signature = signatureString(fileVersion, toolVersion, what = "Backward", tool = None) - val fix = recompileFix(toolVersion.minStable) + val fix = recompileFix(toolVersion.minStable, config) throw new UnpickleException(signature + fix + tastyAddendum) } else { @@ -117,43 +117,7 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { new String(bytes, start.index, length) } - val validVersion = TastyFormat.isVersionCompatible( - fileMajor = fileMajor, - fileMinor = fileMinor, - fileExperimental = fileExperimental, - compilerMajor = toolMajor, - compilerMinor = toolMinor, - compilerExperimental = toolExperimental - ) - - check(validVersion, { - // failure means that the TASTy file cannot be read, therefore it is either: - // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor - // version supported by this compiler - // - any experimental in an older minor, in which case the library should be recompiled by the stable - // compiler in the same minor. - // - older experimental in the same minor, in which case the compiler is also experimental, and the library - // should be recompiled by the current compiler - // - forward incompatible, in which case the compiler must be upgraded to the same version as the file. - val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental) - val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) - - val compat = Compatibility.failReason(file = fileVersion, read = toolVersion) - - val what = if (compat < 0) "Backward" else "Forward" - val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion)) - val fix = ( - if (compat < 0) { - val newCompiler = - if (compat == Compatibility.BackwardIncompatibleMajor) toolVersion.minStable - else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable - else toolVersion // recompile the experimental library with the current experimental compiler - recompileFix(newCompiler) - } - else upgradeFix(fileVersion) - ) - signature + fix + tastyAddendum - }) + checkValidVersion(fileMajor, fileMinor, fileExperimental, toolingVersion, config) val uuid = new UUID(readUncompressedLong(), readUncompressedLong()) new TastyHeader(uuid, fileMajor, fileMinor, fileExperimental, toolingVersion) {} @@ -161,11 +125,56 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { } def isAtEnd: Boolean = reader.isAtEnd +} + +object TastyHeaderUnpickler { private def check(cond: Boolean, msg: => String): Unit = { if (!cond) throw new UnpickleException(msg) } + private def checkValidVersion(fileMajor: Int, fileMinor: Int, fileExperimental: Int, toolingVersion: String, config: UnpicklerConfig) = { + val toolMajor: Int = config.majorVersion + val toolMinor: Int = config.minorVersion + val toolExperimental: Int = config.experimentalVersion + val validVersion = TastyFormat.isVersionCompatible( + fileMajor = fileMajor, + fileMinor = fileMinor, + fileExperimental = fileExperimental, + compilerMajor = toolMajor, + compilerMinor = toolMinor, + compilerExperimental = toolExperimental + ) + check(validVersion, { + // failure means that the TASTy file cannot be read, therefore it is either: + // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor + // version supported by this compiler + // - any experimental in an older minor, in which case the library should be recompiled by the stable + // compiler in the same minor. + // - older experimental in the same minor, in which case the compiler is also experimental, and the library + // should be recompiled by the current compiler + // - forward incompatible, in which case the compiler must be upgraded to the same version as the file. + val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental) + val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) + + val compat = Compatibility.failReason(file = fileVersion, read = toolVersion) + + val what = if (compat < 0) "Backward" else "Forward" + val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion)) + val fix = ( + if (compat < 0) { + val newCompiler = + if (compat == Compatibility.BackwardIncompatibleMajor) toolVersion.minStable + else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable + else toolVersion // recompile the experimental library with the current experimental compiler + recompileFix(newCompiler, config) + } + else upgradeFix(fileVersion, config) + ) + signature + fix + tastyAddendum + }) + } + private def signatureString( fileVersion: TastyVersion, toolVersion: TastyVersion, what: String, tool: Option[String]) = { val optProducedBy = tool.fold("")(t => s", produced by $t") @@ -174,13 +183,13 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { |""".stripMargin } - private def recompileFix(producerVersion: TastyVersion) = { + private def recompileFix(producerVersion: TastyVersion, config: UnpicklerConfig) = { val addendum = config.recompileAdditionalInfo val newTool = config.upgradedProducerTool(producerVersion) s""" The source of this file should be recompiled by $newTool.$addendum""".stripMargin } - private def upgradeFix(fileVersion: TastyVersion) = { + private def upgradeFix(fileVersion: TastyVersion, config: UnpicklerConfig) = { val addendum = config.upgradeAdditionalInfo(fileVersion) val newTool = config.upgradedReaderTool(fileVersion) s""" To read this ${fileVersion.kind} file, use $newTool.$addendum""".stripMargin @@ -189,9 +198,6 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { private def tastyAddendum: String = """ | Please refer to the documentation for information on TASTy versioning: | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin -} - -object TastyHeaderUnpickler { private object Compatibility { final val BackwardIncompatibleMajor = -3 diff --git a/tests/best-effort/broken-macro-executed-in-dependency/err/ExecutedMacro.scala b/tests/best-effort/broken-macro-executed-in-dependency/err/ExecutedMacro.scala new file mode 100644 index 000000000000..a6a071c9b85e --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependency/err/ExecutedMacro.scala @@ -0,0 +1,2 @@ +object ExecutedMacro: + val failingMacro = FailingTransparent.execute() diff --git a/tests/best-effort/broken-macro-executed-in-dependency/err/FailingTransparentInline.scala b/tests/best-effort/broken-macro-executed-in-dependency/err/FailingTransparentInline.scala new file mode 100644 index 000000000000..9f9fdc22ee4b --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependency/err/FailingTransparentInline.scala @@ -0,0 +1,11 @@ +object FailingTransparentInline: + sealed trait Foo + case class FooA() extends Foo + case class FooB() extends Foo + + transparent inline def execute(): Foo = ${ executeImpl() } + def executeImpl(using Quotes)() = { + val a = 0 + a.asInstanceOf[String] + FooB() + } diff --git a/tests/best-effort/broken-macro-executed-in-dependency/main/Main.scala b/tests/best-effort/broken-macro-executed-in-dependency/main/Main.scala new file mode 100644 index 000000000000..6603d4ee0cc1 --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependency/main/Main.scala @@ -0,0 +1,2 @@ +object Main: + ExecutedMacro.failingMacro diff --git a/tests/best-effort/broken-macro-executed-in-dependent/err/BrokenMacros.scala b/tests/best-effort/broken-macro-executed-in-dependent/err/BrokenMacros.scala new file mode 100644 index 000000000000..73d121022b23 --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependent/err/BrokenMacros.scala @@ -0,0 +1,13 @@ +import scala.quoted._ +object BrokenMacros: + transparent inline def macro1() = ${macroImpl()} + def macroImpl(using Quotes)(): Expr[String] = + val a: Int = "str" // source of the error + '{a} + + sealed trait Foo + case class FooA() extends Foo + case class FooB() + transparent inline def macro2(): Foo = ${macro2Impl()} + def macro2Impl(using Quotes)(): Expr[Foo] = + '{FooB()} diff --git a/tests/best-effort/broken-macro-executed-in-dependent/main/Main.scala b/tests/best-effort/broken-macro-executed-in-dependent/main/Main.scala new file mode 100644 index 000000000000..d382bd4aabd7 --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependent/main/Main.scala @@ -0,0 +1,3 @@ +object Main + val a = BrokenMacros.macro1() + val b = BrokenMacros.macro2() diff --git a/tests/best-effort/mirrors-in-dependency/err/MirrorTypes.scala b/tests/best-effort/mirrors-in-dependency/err/MirrorTypes.scala new file mode 100644 index 000000000000..280805ba8ab9 --- /dev/null +++ b/tests/best-effort/mirrors-in-dependency/err/MirrorTypes.scala @@ -0,0 +1,2 @@ +object MirrorTypes: + case class BrokenType(a: NonExistent, b: Int) diff --git a/tests/best-effort/mirrors-in-dependency/main/MirrorExec.scala b/tests/best-effort/mirrors-in-dependency/main/MirrorExec.scala new file mode 100644 index 000000000000..12052a27b57d --- /dev/null +++ b/tests/best-effort/mirrors-in-dependency/main/MirrorExec.scala @@ -0,0 +1,7 @@ +import scala.deriving.Mirror + +object MirrorExec: + transparent inline def getNames[T](using m: Mirror.Of[T]): m.MirroredElemTypes = + scala.compiletime.erasedValue[m.MirroredElemTypes] + + val ab = getNames[MirrorTypes.BrokenType] diff --git a/tests/best-effort/simple-type-error/err/SimpleTypeError.scala b/tests/best-effort/simple-type-error/err/SimpleTypeError.scala new file mode 100644 index 000000000000..cf9ad8c8d56a --- /dev/null +++ b/tests/best-effort/simple-type-error/err/SimpleTypeError.scala @@ -0,0 +1,2 @@ +object SimpleTypeError: + def foo: Int = "string" diff --git a/tests/best-effort/simple-type-error/main/Main.scala b/tests/best-effort/simple-type-error/main/Main.scala new file mode 100644 index 000000000000..c1e821d790e7 --- /dev/null +++ b/tests/best-effort/simple-type-error/main/Main.scala @@ -0,0 +1,2 @@ +object Main: + SimpleTypeError.foo From a5195f83571aba62be6eae82ce284fa92028ffd3 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 18 Apr 2024 12:01:52 +0200 Subject: [PATCH 144/465] Avoid recomputing `isUpToDate` --- compiler/src/dotty/tools/dotc/core/Types.scala | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 2646b8eb76c3..a198a1c3a2c5 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5123,11 +5123,6 @@ object Types extends TypeUtils { tp.underlying } - def isUpToDate: Boolean = - (reductionContext ne null) && - reductionContext.keysIterator.forall: tp => - reductionContext(tp) `eq` contextInfo(tp) - def setReductionContext(): Unit = new TypeTraverser: var footprint: Set[Type] = Set() @@ -5162,15 +5157,22 @@ object Types extends TypeUtils { matchTypes.println(i"footprint for $thisMatchType $hashCode: ${footprint.toList.map(x => (x, contextInfo(x)))}%, %") end setReductionContext + def changedReductionContext(): Boolean = + val isUpToDate = + (reductionContext ne null) && + reductionContext.keysIterator.forall: tp => + reductionContext(tp) `eq` contextInfo(tp) + if !isUpToDate then setReductionContext() + !isUpToDate + record("MatchType.reduce called") if !Config.cacheMatchReduced || myReduced == null - || !isUpToDate + || changedReductionContext() || MatchTypeTrace.isRecording then record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") - if !isUpToDate then setReductionContext() val saved = ctx.typerState.snapshot() try myReduced = trace(i"reduce match type $this $hashCode", matchTypes, show = true): From 02533edc8c1e795f45b90c87bc1fbb6ad8004dab Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 18 Apr 2024 13:38:00 +0200 Subject: [PATCH 145/465] Fix overloaded default methods test in RefChecks It used `cls.info` instead of `cls.thisType`, which caused symbols accessible only through the self type of `cls` to be forgotten. Fixes #18555 --- .../src/dotty/tools/dotc/typer/RefChecks.scala | 2 +- tests/pos/i18555.scala | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i18555.scala diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 1397b05ec3b5..cdfd137e5661 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -52,7 +52,7 @@ object RefChecks { }} for (name <- defaultMethodNames) { - val methods = clazz.info.member(name).alternatives.map(_.symbol) + val methods = clazz.thisType.member(name).alternatives.map(_.symbol) val haveDefaults = methods.filter(_.hasDefaultParams) if (haveDefaults.length > 1) { val owners = haveDefaults map (_.owner) diff --git a/tests/pos/i18555.scala b/tests/pos/i18555.scala new file mode 100644 index 000000000000..84198409370e --- /dev/null +++ b/tests/pos/i18555.scala @@ -0,0 +1,14 @@ +trait GenericCollectionWithCommands { + self: PackSupport => + + def bar(foo: Int = 1): Any = ??? + def bar(writer: GenericCollectionWithCommands.this.pack.Writer[Any]): Any = ??? +} + +trait PackSupport { + val pack: SerializationPack +} + +trait SerializationPack { + type Writer[A] +} \ No newline at end of file From 66f7f9599ff8ce592e9a6e9e60432ae410e95d54 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 18 Apr 2024 13:39:30 +0200 Subject: [PATCH 146/465] Use explicit nulls for reductionContext --- compiler/src/dotty/tools/dotc/core/Types.scala | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index a198a1c3a2c5..9080d5933693 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5098,7 +5098,7 @@ object Types extends TypeUtils { def underlying(using Context): Type = bound private var myReduced: Type | Null = null - private var reductionContext: util.MutableMap[Type, Type] = uninitialized + private var reductionContext: util.MutableMap[Type, Type] | Null = null override def tryNormalize(using Context): Type = try @@ -5153,15 +5153,12 @@ object Types extends TypeUtils { cases.foreach(traverse) reductionContext = util.HashMap() for tp <- footprint do - reductionContext(tp) = contextInfo(tp) + reductionContext.nn(tp) = contextInfo(tp) matchTypes.println(i"footprint for $thisMatchType $hashCode: ${footprint.toList.map(x => (x, contextInfo(x)))}%, %") end setReductionContext def changedReductionContext(): Boolean = - val isUpToDate = - (reductionContext ne null) && - reductionContext.keysIterator.forall: tp => - reductionContext(tp) `eq` contextInfo(tp) + val isUpToDate = reductionContext != null && reductionContext.nn.iterator.forall(contextInfo(_) `eq` _) if !isUpToDate then setReductionContext() !isUpToDate @@ -5193,10 +5190,9 @@ object Types extends TypeUtils { /** True if the reduction uses GADT constraints. */ def reducesUsingGadt(using Context): Boolean = - (reductionContext ne null) && reductionContext.keysIterator.exists { - case tp: TypeRef => reductionContext(tp).exists - case _ => false - } + reductionContext != null && reductionContext.nn.iterator.exists: + case (tp: TypeRef, tpCtx) => tpCtx.exists + case _ => false override def computeHash(bs: Binders): Int = doHash(bs, scrutinee, bound :: cases) From 7539fb51b9897816013262b54bd9e98114dcd24c Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 18 Apr 2024 13:40:56 +0200 Subject: [PATCH 147/465] Piggy-back some unrelated tests --- tests/pos/first-class-patterns.scala | 23 +++++++++++++++++++++++ tests/pos/into-bigint.scala | 21 +++++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 tests/pos/first-class-patterns.scala create mode 100644 tests/pos/into-bigint.scala diff --git a/tests/pos/first-class-patterns.scala b/tests/pos/first-class-patterns.scala new file mode 100644 index 000000000000..98d7faf4d8e7 --- /dev/null +++ b/tests/pos/first-class-patterns.scala @@ -0,0 +1,23 @@ + + // Trait of all extractors with unapply methods + trait Matcher[A, B]: + def unapply(x: A): Option[B] + + // An extractor defined by an unappy method + object Even extends Matcher[Int, Int]: + def unapply(x: Int): Option[Int] = + if x % 2 == 0 then Some(x) else None + + // Method using a given extractor in pattern position + def collect[A, B](xs: List[A], m: Matcher[A, B]): List[B] = + xs match + case Nil => Nil + case m(x) :: xs1 => x :: collect(xs1, m) + case _ :: xs1 => collect(xs1, m) + + @main def test = + val xs = List(1, 2, 3, 4) + val ys = collect(xs, Even) + println(ys) + + diff --git a/tests/pos/into-bigint.scala b/tests/pos/into-bigint.scala new file mode 100644 index 000000000000..d7ecee40b3ba --- /dev/null +++ b/tests/pos/into-bigint.scala @@ -0,0 +1,21 @@ +import language.experimental.into + +class BigInt(x: Int): + def + (other: into BigInt): BigInt = ??? + def * (other: into BigInt): BigInt = ??? + +object BigInt: + given Conversion[Int, BigInt] = BigInt(_) + + extension (x: into BigInt) + def + (other: BigInt): BigInt = ??? + def * (other: BigInt): BigInt = ??? + +@main def Test = + val x = BigInt(2) + val y = 3 + val a1 = x + y + val a2 = y * x + val a3 = x * x + val a4 = y + y + From 25334fd7ad99c98873be27c3ec3ff6637fe0cd78 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Thu, 18 Apr 2024 13:51:27 +0200 Subject: [PATCH 148/465] rename BestEffortCompilationTests to not run them in testCompilation --- ...ompilationTests.scala => BestEffortOptionsTests.scala} | 6 +++--- compiler/test/dotty/tools/vulpix/ParallelTesting.scala | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) rename compiler/test/dotty/tools/dotc/{BestEffortCompilationTests.scala => BestEffortOptionsTests.scala} (94%) diff --git a/compiler/test/dotty/tools/dotc/BestEffortCompilationTests.scala b/compiler/test/dotty/tools/dotc/BestEffortOptionsTests.scala similarity index 94% rename from compiler/test/dotty/tools/dotc/BestEffortCompilationTests.scala rename to compiler/test/dotty/tools/dotc/BestEffortOptionsTests.scala index 681c92f266d2..1e7262f5fd8d 100644 --- a/compiler/test/dotty/tools/dotc/BestEffortCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BestEffortOptionsTests.scala @@ -10,10 +10,10 @@ import java.io.{File => JFile} import scala.language.unsafeNulls -class BestEffortCompilationTests { +class BestEffortOptionsTests { import ParallelTesting._ import vulpix.TestConfiguration._ - import BestEffortCompilationTests._ + import BestEffortOptionsTests._ import CompilationTest.aggregateTests // Since TASTy and beTASTy files are read in a lazy manner (only when referenced by the source .scala file) @@ -42,7 +42,7 @@ class BestEffortCompilationTests { } } -object BestEffortCompilationTests extends ParallelTesting { +object BestEffortOptionsTests extends ParallelTesting { def maxDuration = 45.seconds def numberOfSlaves = Runtime.getRuntime.availableProcessors() def safeMode = Properties.testsSafeMode diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index 880a3bd1cc53..6ff0d3fba5ca 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -1610,7 +1610,7 @@ trait ParallelTesting extends RunnerOrchestration { self => * '-Ywith-best-effort-tasty' to test the TastyReader for Best Effort TASTy. */ def compileBestEffortTastyInDir(f: String, flags: TestFlags, picklingFilter: FileFilter, unpicklingFilter: FileFilter)( - implicit testGroup: TestGroup): BestEffortCompilationTest = { + implicit testGroup: TestGroup): BestEffortOptionsTest = { val bestEffortFlag = "-Ybest-effort" val semanticDbFlag = "-Xsemanticdb" assert(!flags.options.contains(bestEffortFlag), "Best effort compilation flag should not be added manually") @@ -1687,7 +1687,7 @@ trait ParallelTesting extends RunnerOrchestration { self => new CompilationFromBestEffortTasty(testGroup.name, f, flags, bestEffortDir) } - new BestEffortCompilationTest( + new BestEffortOptionsTest( new CompilationTest(bestEffortTargets).keepOutput, new CompilationTest(fromTastyTargets).keepOutput, bestEffortDirs, @@ -1735,7 +1735,7 @@ trait ParallelTesting extends RunnerOrchestration { self => (step1Compilation, step2Compilation, bestEffortDir) }.unzip3 - BestEffortCompilationTest( + BestEffortOptionsTest( new CompilationTest(step1Targets).keepOutput, new CompilationTest(step2Targets).keepOutput, bestEffortDirs, @@ -1770,7 +1770,7 @@ trait ParallelTesting extends RunnerOrchestration { self => } } - class BestEffortCompilationTest(step1: CompilationTest, step2: CompilationTest, bestEffortDirs: List[JFile], shouldDelete: Boolean)(implicit testGroup: TestGroup) { + class BestEffortOptionsTest(step1: CompilationTest, step2: CompilationTest, bestEffortDirs: List[JFile], shouldDelete: Boolean)(implicit testGroup: TestGroup) { def checkNoCrash()(implicit summaryReport: SummaryReporting): this.type = { step1.checkNoBestEffortError() // Compile all files to generate the class files with best effort tasty From be9e090dfa75091d91ebe6925ccee693ce14be56 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Thu, 18 Apr 2024 14:59:47 +0200 Subject: [PATCH 149/465] Fix the visibility check in `markFree` --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 29 +++++++++++++++++-- tests/neg-custom-args/captures/i16725.scala | 15 ++++++++++ tests/neg-custom-args/captures/i20169.scala | 8 +++++ 3 files changed, 49 insertions(+), 3 deletions(-) create mode 100644 tests/neg-custom-args/captures/i16725.scala create mode 100644 tests/neg-custom-args/captures/i20169.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 9b6217033ede..0d6ab2654d7a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -358,9 +358,17 @@ class CheckCaptures extends Recheck, SymTransformer: def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = if !cs.isAlwaysEmpty then forallOuterEnvsUpTo(ctx.owner.topLevelClass): env => - def isVisibleFromEnv(sym: Symbol) = - (env.kind == EnvKind.NestedInOwner || env.owner != sym) - && env.owner.isContainedIn(sym) + // Whether a symbol is defined inside the owner of the environment? + inline def isContainedInEnv(sym: Symbol) = + if env.kind == EnvKind.NestedInOwner then + sym.isProperlyContainedIn(env.owner) + else + sym.isContainedIn(env.owner) + // A captured reference with the symbol `sym` is visible from the environment + // if `sym` is not defined inside the owner of the environment + inline def isVisibleFromEnv(sym: Symbol) = !isContainedInEnv(sym) + // Only captured references that are visible from the environment + // should be included. val included = cs.filter: case ref: TermRef => isVisibleFromEnv(ref.symbol.owner) case ref: ThisType => isVisibleFromEnv(ref.cls) @@ -378,6 +386,7 @@ class CheckCaptures extends Recheck, SymTransformer: // there won't be an apply; need to include call captures now includeCallCaptures(tree.symbol, tree.srcPos) else + //debugShowEnvs() markFree(tree.symbol, tree.srcPos) super.recheckIdent(tree, pt) @@ -946,6 +955,19 @@ class CheckCaptures extends Recheck, SymTransformer: expected end addOuterRefs + /** A debugging method for showing the envrionments during capture checking. */ + private def debugShowEnvs()(using Context): Unit = + def showEnv(env: Env): String = i"Env(${env.owner}, ${env.kind}, ${env.captured})" + val sb = StringBuilder() + @annotation.tailrec def walk(env: Env | Null): Unit = + if env != null then + sb ++= showEnv(env) + sb ++= "\n" + walk(env.outer0) + sb ++= "===== Current Envs ======\n" + walk(curEnv) + sb ++= "===== End ======\n" + println(sb.result()) /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions * @@ -1085,6 +1107,7 @@ class CheckCaptures extends Recheck, SymTransformer: pos) } if !insertBox then // unboxing + //debugShowEnvs() markFree(criticalSet, pos) adaptedType(!boxed) else diff --git a/tests/neg-custom-args/captures/i16725.scala b/tests/neg-custom-args/captures/i16725.scala new file mode 100644 index 000000000000..ff06b3be78a7 --- /dev/null +++ b/tests/neg-custom-args/captures/i16725.scala @@ -0,0 +1,15 @@ +import language.experimental.captureChecking +@annotation.capability +class IO: + def brewCoffee(): Unit = ??? +def usingIO[T](op: IO => T): T = ??? + +type Wrapper[T] = [R] -> (f: T => R) -> R +def mk[T](x: T): Wrapper[T] = [R] => f => f(x) +def useWrappedIO(wrapper: Wrapper[IO]): () -> Unit = + () => + wrapper: io => // error + io.brewCoffee() +def main(): Unit = + val escaped = usingIO(io => useWrappedIO(mk(io))) + escaped() // boom diff --git a/tests/neg-custom-args/captures/i20169.scala b/tests/neg-custom-args/captures/i20169.scala new file mode 100644 index 000000000000..13e8c87f8e0c --- /dev/null +++ b/tests/neg-custom-args/captures/i20169.scala @@ -0,0 +1,8 @@ +case class Box[T](x: T): + def foreach(f: T => Unit): Unit = f(x) + +def runOps(ops: Box[() => Unit]): () -> Unit = + val applyFn: (() => Unit) -> Unit = f => f() + val fn: () -> Unit = () => + ops.foreach(applyFn) // error + fn From 59ae630d46f3d4293d8c8ef55fd3fda4ca97dfad Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Thu, 18 Apr 2024 16:37:28 +0200 Subject: [PATCH 150/465] Avoid boxing the type argument of `asInstanceOf` --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 9ab41859f170..0b5b1f6532b0 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -407,8 +407,12 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tree @ TypeApply(fn, args) => traverse(fn) - for case arg: TypeTree <- args do - transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + fn match + case Select(qual, nme.asInstanceOf_) => + // No need to box type arguments of an asInstanceOf call. See #20224. + case _ => + for case arg: TypeTree <- args do + transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed case tree: TypeDef if tree.symbol.isClass => inContext(ctx.withOwner(tree.symbol)): From 74f8159649815b673f5e213f91de98d247eda86b Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 18 Apr 2024 17:12:03 +0200 Subject: [PATCH 151/465] add regression test for issue #17930 --- tests/run/i17930/Foo_1.scala | 13 +++++++++++++ tests/run/i17930/app_2.scala | 5 +++++ 2 files changed, 18 insertions(+) create mode 100644 tests/run/i17930/Foo_1.scala create mode 100644 tests/run/i17930/app_2.scala diff --git a/tests/run/i17930/Foo_1.scala b/tests/run/i17930/Foo_1.scala new file mode 100644 index 000000000000..0ee4fa711f58 --- /dev/null +++ b/tests/run/i17930/Foo_1.scala @@ -0,0 +1,13 @@ +package eu.joaocosta.defaultstest + +object Foo { + def foo(x: Int, y: Int = 5): Int = x + y +} + +object Bar { + export Foo.* +} + +object App { + println(Bar.foo(2)) // Works +} diff --git a/tests/run/i17930/app_2.scala b/tests/run/i17930/app_2.scala new file mode 100644 index 000000000000..64ba6bff18c5 --- /dev/null +++ b/tests/run/i17930/app_2.scala @@ -0,0 +1,5 @@ +import eu.joaocosta.defaultstest._ + +@main def Test = + println(Foo.foo(2)) // Works + println(Bar.foo(2)) // Fails with "missing argument for parameter y of method foo in object Bar: (x: Int, y: Int): Int" From 5a15c86d0354d9e55e062e7ef5c95e188e3edf3d Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 18 Apr 2024 19:34:41 +0200 Subject: [PATCH 152/465] Allow @retains arguments to be context functions Suppress the creation of apply methods for these arguments Fixes #20231 --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 1 + compiler/src/dotty/tools/dotc/typer/Typer.scala | 5 +++++ library/src/scala/annotation/retains.scala | 12 ++++++++++-- library/src/scala/annotation/retainsByName.scala | 2 +- tests/pos-custom-args/captures/i20231.scala | 4 ++++ 5 files changed, 21 insertions(+), 3 deletions(-) create mode 100644 tests/pos-custom-args/captures/i20231.scala diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 932a7d72d33e..e813786068a5 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1056,6 +1056,7 @@ class Definitions { @tu lazy val RetainsAnnot: ClassSymbol = requiredClass("scala.annotation.retains") @tu lazy val RetainsCapAnnot: ClassSymbol = requiredClass("scala.annotation.retainsCap") @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") + @tu lazy val RetainsArgAnnot: ClassSymbol = requiredClass("scala.annotation.retainsArg") @tu lazy val PublicInBinaryAnnot: ClassSymbol = requiredClass("scala.annotation.publicInBinary") @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 9d0150f49a1f..9150ad6be392 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4047,10 +4047,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer true } + def isRetainsArg(pt: Type) = pt match + case AnnotatedType(arg, annot) => annot.symbol == defn.RetainsArgAnnot + case _ => false + if (implicitFun || caseCompanion) && !isApplyProto(pt) && pt != SingletonTypeProto && pt != LhsProto + && !isRetainsArg(pt) && !ctx.mode.is(Mode.Pattern) && !tree.isInstanceOf[SplicePattern] && !ctx.isAfterTyper diff --git a/library/src/scala/annotation/retains.scala b/library/src/scala/annotation/retains.scala index 4fa14e635136..909adc13a1c2 100644 --- a/library/src/scala/annotation/retains.scala +++ b/library/src/scala/annotation/retains.scala @@ -12,12 +12,20 @@ package scala.annotation * non-standard capturing type syntax. */ @experimental -class retains(xs: Any*) extends annotation.StaticAnnotation +class retains(xs: (Any@retainsArg)*) extends annotation.StaticAnnotation -/** Equivalent in meaning to `@retains(cap)`, but consumes less bytecode. +/** Equivalent in meaning to `@retains(cap)`, but consumes less bytecode. */ @experimental class retainsCap() extends annotation.StaticAnnotation // This special case is needed to be able to load standard library modules without // cyclic reference errors. Specifically, load sequences involving IterableOnce. +/** Internal use, only for parameters of `retains` and `retainsByName`. + */ +@experimental +class retainsArg extends annotation.StaticAnnotation + // This annotation prevents argument references to retains and retainsByName from being + // augmented with explicit arguments. That's unsound in general, but necessary + // since a captureRef could have an impure context function type, A ?=> B, but + // we still need to have the unapplied captureRef in the annotation. diff --git a/library/src/scala/annotation/retainsByName.scala b/library/src/scala/annotation/retainsByName.scala index 421e0400c4e1..e6e3dafcb752 100644 --- a/library/src/scala/annotation/retainsByName.scala +++ b/library/src/scala/annotation/retainsByName.scala @@ -2,5 +2,5 @@ package scala.annotation /** An annotation that indicates capture of an enclosing by-name type */ -@experimental class retainsByName(xs: Any*) extends annotation.StaticAnnotation +@experimental class retainsByName(xs: (Any@retainsArg)*) extends annotation.StaticAnnotation diff --git a/tests/pos-custom-args/captures/i20231.scala b/tests/pos-custom-args/captures/i20231.scala new file mode 100644 index 000000000000..5557bc9929e6 --- /dev/null +++ b/tests/pos-custom-args/captures/i20231.scala @@ -0,0 +1,4 @@ +class Async +class C(val x: Async ?=> Unit) +def foo(x: Async ?=> Unit): C^{x} = C(x) +def foo(x: Async ?=> Unit)(using Async): C^{x} = C(x) \ No newline at end of file From 829e84a9da26b28379f2797edea3d457112e90bc Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 18 Apr 2024 19:42:33 +0200 Subject: [PATCH 153/465] Update stdLibExperimentalDefinitions --- tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 407f789beb58..2d0081977435 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -30,6 +30,7 @@ val experimentalDefinitionInLibrary = Set( "scala.annotation.retains", "scala.annotation.retainsByName", "scala.annotation.retainsCap", + "scala.annotation.retainsArg", "scala.Pure", "scala.caps", "scala.caps$", From bdbf2058fd13ecf538d480bdbea68df37722db92 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Thu, 18 Apr 2024 18:28:59 +0200 Subject: [PATCH 154/465] Specialize the printer in TreeChecker --- compiler/src/dotty/tools/dotc/transform/TreeChecker.scala | 5 +++-- tests/neg-macros/i19842-a.check | 2 +- tests/neg-macros/i19842-b.check | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 0515a6978a47..2ebe33a9a14f 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -2,6 +2,7 @@ package dotty.tools package dotc package transform +import config.Printers.checks as printer import core.Names.Name import core.DenotTransformers.* import core.SymDenotations.* @@ -445,7 +446,7 @@ object TreeChecker { res catch case NonFatal(ex) if !ctx.run.enrichedErrorMessage => val treeStr = tree.show(using ctx.withPhase(ctx.phase.prev.megaPhase)) - println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) + printer.println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) throw ex } @@ -844,7 +845,7 @@ object TreeChecker { // We want make sure that transparent inline macros are checked in the same way that // non transparent macros are, so we try to prepare a context which would make // the checks behave the same way for both types of macros. - // + // // E.g. Different instances of skolem types are by definition not able to be a subtype of // one another, however in practice this is only upheld during typer phase, and we do not want // it to be upheld during this check. diff --git a/tests/neg-macros/i19842-a.check b/tests/neg-macros/i19842-a.check index af5dbc604f93..30b295cd05a5 100644 --- a/tests/neg-macros/i19842-a.check +++ b/tests/neg-macros/i19842-a.check @@ -8,7 +8,7 @@ |Parents in tree: [trait Serializer] | | at scala.runtime.Scala3RunTime$.assertFailed(Scala3RunTime.scala:8) - | at dotty.tools.dotc.transform.TreeChecker$.checkParents(TreeChecker.scala:209) + | at dotty.tools.dotc.transform.TreeChecker$.checkParents(TreeChecker.scala:210) | at scala.quoted.runtime.impl.QuotesImpl$reflect$ClassDef$.module(QuotesImpl.scala:257) | at scala.quoted.runtime.impl.QuotesImpl$reflect$ClassDef$.module(QuotesImpl.scala:256) | at Macros$.makeSerializer(Macro.scala:25) diff --git a/tests/neg-macros/i19842-b.check b/tests/neg-macros/i19842-b.check index 95a71fb9f98d..d84d916acb66 100644 --- a/tests/neg-macros/i19842-b.check +++ b/tests/neg-macros/i19842-b.check @@ -8,7 +8,7 @@ |Parents in tree: [class Object, trait Serializer, trait Foo] | | at scala.runtime.Scala3RunTime$.assertFailed(Scala3RunTime.scala:8) - | at dotty.tools.dotc.transform.TreeChecker$.checkParents(TreeChecker.scala:209) + | at dotty.tools.dotc.transform.TreeChecker$.checkParents(TreeChecker.scala:210) | at scala.quoted.runtime.impl.QuotesImpl$reflect$ClassDef$.module(QuotesImpl.scala:257) | at scala.quoted.runtime.impl.QuotesImpl$reflect$ClassDef$.module(QuotesImpl.scala:256) | at Macros$.makeSerializer(Macro.scala:27) From 72f2da643d8cdd99712df8cd272ece64c34c1966 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 2 Apr 2024 13:14:28 +0200 Subject: [PATCH 155/465] Strengthen MiMa checks Using the latest patch release of the current minor version may lead to some breakages that are not properly checked/tagged in the MiMa filers. For example, if we accidentally introduce a breaking change in 3.M.1 but check against 3.M.2, we would lose this braking change before the next minor release. If we would check against 3.M.0, we would catch this. In general we need to track all the changes that have happened in the current minor version. There should not be any breaking changes unless the PR needs a minor release. --- project/Build.scala | 27 ++++++++++++++++----------- project/MiMaFilters.scala | 20 ++++++++++---------- 2 files changed, 26 insertions(+), 21 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 69441d0aaa01..d8bcd131f232 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -97,17 +97,22 @@ object Build { val publishedDottyVersion = referenceVersion val sbtDottyVersion = "0.5.5" - /** Version against which we check binary compatibility. + /** Minor version against which we check binary compatibility. * - * This must be the latest published release in the same versioning line. - * For example, if the next version is going to be 3.1.4, then this must be - * set to 3.1.3. If it is going to be 3.1.0, it must be set to the latest - * 3.0.x release. + * This must be the earliest published release in the same versioning line. + * For a baseVersion `3.M.P` the mimaPreviousDottyVersion should be set to: + * - `3.M.0` if `P > 0` + * - `3.(M-1).0` if `P = 0` */ - val previousDottyVersion = "3.4.1" + val mimaPreviousDottyVersion = "3.4.0" - /** Version against which we check binary compatibility. */ - val ltsDottyVersion = "3.3.0" + /** LTS version against which we check binary compatibility. + * + * This must be the earliest published release in the LTS versioning line. + * For example, if the latest LTS release is be 3.3.4, then this must be + * set to 3.3.0. + */ + val mimaPreviousLTSDottyVersion = "3.3.0" object CompatMode { final val BinaryCompatible = 0 @@ -500,7 +505,7 @@ object Build { case cv: Disabled => thisProjectID.name case cv: Binary => s"${thisProjectID.name}_${cv.prefix}3${cv.suffix}" } - (thisProjectID.organization % crossedName % previousDottyVersion) + (thisProjectID.organization % crossedName % mimaPreviousDottyVersion) }, mimaCheckDirection := (compatMode match { @@ -1149,7 +1154,7 @@ object Build { }, tastyMiMaConfig ~= { _.withMoreProblemFilters(TastyMiMaFilters.StdlibBootstrapped) }, tastyMiMaReportIssues := tastyMiMaReportIssues.dependsOn(Def.task { - val minorVersion = previousDottyVersion.split('.')(1) + val minorVersion = mimaPreviousDottyVersion.split('.')(1) // TODO find a way around this and test in the CI streams.value.log.warn( s"""To allow TASTy-MiMa to read TASTy files generated by this version of the compile you must: @@ -2183,7 +2188,7 @@ object Build { case cv: Disabled => thisProjectID.name case cv: Binary => s"${thisProjectID.name}_${cv.prefix}3${cv.suffix}" } - (thisProjectID.organization % crossedName % ltsDottyVersion) + (thisProjectID.organization % crossedName % mimaPreviousLTSDottyVersion) }, mimaForwardIssueFilters := MiMaFilters.Scala3Library.ForwardsBreakingChanges, mimaBackwardIssueFilters := MiMaFilters.Scala3Library.BackwardsBreakingChanges, diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 0051d744f787..46c060cacf39 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -7,12 +7,12 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of the library - Build.previousDottyVersion -> Seq( + Build.mimaPreviousDottyVersion -> Seq( ProblemFilters.exclude[DirectMissingMethodProblem]("scala.annotation.experimental.this"), ), // Additions since last LTS - Build.ltsDottyVersion -> Seq( + Build.mimaPreviousLTSDottyVersion -> Seq( ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefMethods"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefTypeTest"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#defnModule.FunctionClass"), @@ -48,10 +48,10 @@ object MiMaFilters { // Only exceptional cases should be added here. // Breaking changes since last reference version - Build.previousDottyVersion -> Seq.empty, // We should never break backwards compatibility + Build.mimaPreviousDottyVersion -> Seq.empty, // We should never break backwards compatibility // Breaking changes since last LTS - Build.ltsDottyVersion -> Seq( + Build.mimaPreviousLTSDottyVersion -> Seq( // Quotes is assumed to only be implemented by the compiler and on the same version of the library. // It is exceptionally OK to break this compatibility. In these cases, there add new abstract methods that would // potentially not be implemented by others. If some other library decides to implement these, @@ -71,35 +71,35 @@ object MiMaFilters { object TastyCore { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of tasty core - Build.previousDottyVersion -> Seq( + Build.mimaPreviousDottyVersion -> Seq( ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype") ), // Additions since last LTS - Build.ltsDottyVersion -> Seq( + Build.mimaPreviousLTSDottyVersion -> Seq( ) ) val BackwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Breaking changes since last LTS - Build.ltsDottyVersion -> Seq.empty // We should never break backwards compatibility + Build.mimaPreviousLTSDottyVersion -> Seq.empty // We should never break backwards compatibility ) } object Interfaces { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of interfaces - Build.previousDottyVersion -> Seq( + Build.mimaPreviousDottyVersion -> Seq( ), // Additions since last LTS - Build.ltsDottyVersion -> Seq( + Build.mimaPreviousLTSDottyVersion -> Seq( ) ) val BackwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Breaking changes since last LTS - Build.ltsDottyVersion -> Seq.empty // We should never break backwards compatibility + Build.mimaPreviousLTSDottyVersion -> Seq.empty // We should never break backwards compatibility ) } From 0876baf8c0a1f607a930b7f9340a76284891f10e Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 19 Apr 2024 18:55:57 +0200 Subject: [PATCH 156/465] Fix conversion of this.fld capture refs under separate compilation --- compiler/src/dotty/tools/dotc/core/TypeUtils.scala | 9 +++++++++ compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- tests/pos/captureRef-separate-compilation/Future_1.scala | 5 +++++ tests/pos/captureRef-separate-compilation/test_2.scala | 5 +++++ 4 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 tests/pos/captureRef-separate-compilation/Future_1.scala create mode 100644 tests/pos/captureRef-separate-compilation/test_2.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index 4f0162a0fa96..a3d6ab065a77 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -135,5 +135,14 @@ class TypeUtils { case _ => val cls = self.underlyingClassRef(refinementOK = false).typeSymbol cls.isTransparentClass && (!traitOnly || cls.is(Trait)) + + /** Is this type the ThisType of class `cls?`. Note we can't use `self eq cls.thisType` for this, + * since ThisTypes take TermRef parameters and semantically equal TermRefs could have different + * forms (for instance one could use as a prefix the ThisType of an enclosing static module or package, + * and the other could select it from something further out) + */ + def isThisTypeOf(cls: Symbol)(using Context) = self match + case self: Types.ThisType => self.cls == cls + case _ => false } } diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 747c3a0fd3dd..870e985fd48e 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2991,7 +2991,7 @@ object Types extends TypeUtils { */ override def isTrackableRef(using Context) = ((prefix eq NoPrefix) - || symbol.is(ParamAccessor) && (prefix eq symbol.owner.thisType) + || symbol.is(ParamAccessor) && prefix.isThisTypeOf(symbol.owner) || isRootCapability ) && !symbol.isOneOf(UnstableValueFlags) diff --git a/tests/pos/captureRef-separate-compilation/Future_1.scala b/tests/pos/captureRef-separate-compilation/Future_1.scala new file mode 100644 index 000000000000..c01aeadd9360 --- /dev/null +++ b/tests/pos/captureRef-separate-compilation/Future_1.scala @@ -0,0 +1,5 @@ +class Future[T] +object Future: + class Collector[T](fs: (Future[T]^)*) + class MutableCollector[T](val futures: (Future[T]^)*) extends Collector[T](futures*): + def add(future: Future[T]^{futures*}) = ??? diff --git a/tests/pos/captureRef-separate-compilation/test_2.scala b/tests/pos/captureRef-separate-compilation/test_2.scala new file mode 100644 index 000000000000..35714090a9ac --- /dev/null +++ b/tests/pos/captureRef-separate-compilation/test_2.scala @@ -0,0 +1,5 @@ +def test = + val collector: Future.MutableCollector[Int] = Future.MutableCollector() + collector.add(???) + + From 736c6f4cc945dc3418cf68c74fbd680ca060e69b Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 19 Apr 2024 18:57:29 +0200 Subject: [PATCH 157/465] Move test to proper directory --- .../captures}/captureRef-separate-compilation/Future_1.scala | 0 .../captures}/captureRef-separate-compilation/test_2.scala | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename tests/{pos => pos-custom-args/captures}/captureRef-separate-compilation/Future_1.scala (100%) rename tests/{pos => pos-custom-args/captures}/captureRef-separate-compilation/test_2.scala (100%) diff --git a/tests/pos/captureRef-separate-compilation/Future_1.scala b/tests/pos-custom-args/captures/captureRef-separate-compilation/Future_1.scala similarity index 100% rename from tests/pos/captureRef-separate-compilation/Future_1.scala rename to tests/pos-custom-args/captures/captureRef-separate-compilation/Future_1.scala diff --git a/tests/pos/captureRef-separate-compilation/test_2.scala b/tests/pos-custom-args/captures/captureRef-separate-compilation/test_2.scala similarity index 100% rename from tests/pos/captureRef-separate-compilation/test_2.scala rename to tests/pos-custom-args/captures/captureRef-separate-compilation/test_2.scala From 36722f1652aebd6f53d85f0fddef920763ed9abf Mon Sep 17 00:00:00 2001 From: Pascal Weisenburger Date: Sat, 20 Apr 2024 12:53:57 +0200 Subject: [PATCH 158/465] add missing dependency to AsyncFile community build project --- community-build/src/scala/dotty/communitybuild/projects.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index ae030dc66336..cc0f0ac1f20f 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -605,7 +605,7 @@ object projects: project = "AsyncFile", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scissLog, scalatest), + dependencies = List(scissLog, scissModel, scalatest), ) lazy val scissSpan = SbtCommunityProject( From 2f7511058f0233bf093ddd564321fda7d3f8371e Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 20 Apr 2024 17:40:39 +0200 Subject: [PATCH 159/465] Make inline proxy vals have inferred types Fixes #20237 since it erases illegal capture sets containing skolem types. --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 16 ++++++++++------ .../src/dotty/tools/dotc/inlines/Inliner.scala | 6 +++--- tests/pos/i20237.scala | 15 +++++++++++++++ 3 files changed, 28 insertions(+), 9 deletions(-) create mode 100644 tests/pos/i20237.scala diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 9ab41859f170..60691d279c38 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -369,12 +369,16 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def setupTraverser(recheckDef: DefRecheck) = new TreeTraverserWithPreciseImportContexts: def transformResultType(tpt: TypeTree, sym: Symbol)(using Context): Unit = - transformTT(tpt, - boxed = !ccConfig.allowUniversalInBoxed && sym.is(Mutable, butNot = Method), - // types of mutable variables are boxed in pre 3.3 codee - exact = sym.allOverriddenSymbols.hasNext, - // types of symbols that override a parent don't get a capture set TODO drop - ) + try + transformTT(tpt, + boxed = !ccConfig.allowUniversalInBoxed && sym.is(Mutable, butNot = Method), + // types of mutable variables are boxed in pre 3.3 codee + exact = sym.allOverriddenSymbols.hasNext, + // types of symbols that override a parent don't get a capture set TODO drop + ) + catch case ex: IllegalCaptureRef => + capt.println(i"fail while transforming result type $tpt of $sym") + throw ex val addDescription = new TypeTraverser: def traverse(tp: Type) = tp match case tp @ CapturingType(parent, refs) => diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 629bc2ed3b16..4ee3682626c4 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -237,7 +237,7 @@ class Inliner(val call: tpd.Tree)(using Context): if bindingFlags.is(Inline) && argIsBottom then newArg = Typed(newArg, TypeTree(formal.widenExpr)) // type ascribe RHS to avoid type errors in expansion. See i8612.scala if isByName then DefDef(boundSym, newArg) - else ValDef(boundSym, newArg) + else ValDef(boundSym, newArg, inferred = true) }.withSpan(boundSym.span) inlining.println(i"parameter binding: $binding, $argIsBottom") buf += binding @@ -319,7 +319,7 @@ class Inliner(val call: tpd.Tree)(using Context): else pre val binding = accountForOpaques( - ValDef(selfSym.asTerm, QuoteUtils.changeOwnerOfTree(rhs, selfSym)).withSpan(selfSym.span)) + ValDef(selfSym.asTerm, QuoteUtils.changeOwnerOfTree(rhs, selfSym), inferred = true).withSpan(selfSym.span)) bindingsBuf += binding inlining.println(i"proxy at $level: $selfSym = ${bindingsBuf.last}") lastSelf = selfSym @@ -368,7 +368,7 @@ class Inliner(val call: tpd.Tree)(using Context): RefinedType(parent, refinement._1, TypeAlias(refinement._2)) ) val refiningSym = newSym(InlineBinderName.fresh(), Synthetic, refinedType).asTerm - val refiningDef = ValDef(refiningSym, tpd.ref(ref).cast(refinedType)).withSpan(span) + val refiningDef = ValDef(refiningSym, tpd.ref(ref).cast(refinedType), inferred = true).withSpan(span) inlining.println(i"add opaque alias proxy $refiningDef for $ref in $tp") bindingsBuf += refiningDef opaqueProxies += ((ref, refiningSym.termRef)) diff --git a/tests/pos/i20237.scala b/tests/pos/i20237.scala new file mode 100644 index 000000000000..da3e902b78b4 --- /dev/null +++ b/tests/pos/i20237.scala @@ -0,0 +1,15 @@ +import language.experimental.captureChecking +import scala.annotation.capability + +@capability class Cap: + def use[T](body: Cap ?=> T) = body(using this) + +class Box[T](body: Cap ?=> T): + inline def open(using cap: Cap) = cap.use(body) + +object Box: + def make[T](body: Cap ?=> T)(using Cap): Box[T]^{body} = Box(body) + +def main = + given Cap = new Cap + val box = Box.make(1).open \ No newline at end of file From c8722e4a2798e7433baa0faa5081ade919210ea9 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 21 Apr 2024 18:21:16 +0200 Subject: [PATCH 160/465] CC: Give more info when context function parameters leak Previously we had: parameter `$contextual1` leaks into outer capture set of type parameter `T` of method `apply`. We now give info in what type the parameter appeared and who owns the method. It's still not great, but at least we see more info that could tell us about the context. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 10 ++- .../captures/effect-swaps.check | 19 ++++++ .../captures/effect-swaps.scala | 68 +++++++++++++++++++ .../captures/leaking-iterators.check | 2 +- .../captures/usingLogFile.check | 6 +- 5 files changed, 98 insertions(+), 7 deletions(-) create mode 100644 tests/neg-custom-args/captures/effect-swaps.check create mode 100644 tests/neg-custom-args/captures/effect-swaps.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 9b6217033ede..4f57b5e0ed7a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -20,7 +20,7 @@ import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} import StdNames.nme -import NameKinds.{DefaultGetterName, WildcardParamName} +import NameKinds.{DefaultGetterName, WildcardParamName, UniqueNameKind} import reporting.trace /** The capture checker */ @@ -1288,10 +1288,14 @@ class CheckCaptures extends Recheck, SymTransformer: val added = widened.filter(isAllowed(_)) capt.println(i"heal $ref in $cs by widening to $added") if !added.subCaptures(cs, frozen = false).isOK then - val location = if meth.exists then i" of $meth" else "" + val location = if meth.exists then i" of ${meth.showLocated}" else "" + val paramInfo = + if ref.paramName.info.kind.isInstanceOf[UniqueNameKind] + then i"${ref.paramName} from ${ref.binder}" + else i"${ref.paramName}" val debugSetInfo = if ctx.settings.YccDebug.value then i" $cs" else "" report.error( - i"local reference ${ref.paramName} leaks into outer capture set$debugSetInfo of type parameter $paramName$location", + i"local reference $paramInfo leaks into outer capture set$debugSetInfo of type parameter $paramName$location", tree.srcPos) else widened.elems.foreach(recur) diff --git a/tests/neg-custom-args/captures/effect-swaps.check b/tests/neg-custom-args/captures/effect-swaps.check new file mode 100644 index 000000000000..086dce7f1841 --- /dev/null +++ b/tests/neg-custom-args/captures/effect-swaps.check @@ -0,0 +1,19 @@ +-- Error: tests/neg-custom-args/captures/effect-swaps.scala:62:6 ------------------------------------------------------- +61 | Result: +62 | Future: // error, escaping label from Result + | ^ + |local reference contextual$1 from (using contextual$1: boundary.Label[box Result[box Future[box T^?]^{fr, contextual$1, contextual$1}, box E^?]^?]^): + | box Result[box Future[box T^?]^{fr, contextual$1, contextual$1}, box E^?]^? leaks into outer capture set of type parameter T of method apply in object boundary +63 | fr.await.ok + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from effect-swaps.scala:37 +37 | boundary: + | ^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- +-- Error: tests/neg-custom-args/captures/effect-swaps.scala:66:11 ------------------------------------------------------ +66 | Result.make: // error, escaping label from Result + | ^^^^^^^^^^^ + |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9}, box E^?]]^): + | box Future[box T^?]^{fr, contextual$9} leaks into outer capture set of type parameter T of method make in object Result diff --git a/tests/neg-custom-args/captures/effect-swaps.scala b/tests/neg-custom-args/captures/effect-swaps.scala new file mode 100644 index 000000000000..e8e609411b15 --- /dev/null +++ b/tests/neg-custom-args/captures/effect-swaps.scala @@ -0,0 +1,68 @@ +import annotation.capability + +object boundary: + + @capability final class Label[-T] + + /** Abort current computation and instead return `value` as the value of + * the enclosing `boundary` call that created `label`. + */ + def break[T](value: T)(using label: Label[T]): Nothing = ??? + + def apply[T](body: Label[T] ?=> T): T = ??? +end boundary + +import boundary.{Label, break} + +class Async +class Future[+T]: + this: Future[T]^ => + def await(using Async^): T = ??? +object Future: + def apply[T](op: Async^ ?=> T)(using Async): Future[T]^{op} = ??? + +abstract class Result[+T, +E] +case class Ok[+T](value: T) extends Result[T, Nothing] +case class Err[+E](value: E) extends Result[Nothing, E] + +object Result: + extension [T, E](r: Result[T, E]) + + /** `_.ok` propagates Err to current Label */ + inline def ok(using Label[Result[Nothing, E]]): T = r match + case r: Ok[_] => r.value + case err => break(err.asInstanceOf[Err[E]]) + + transparent inline def apply[T, E](inline body: Label[Result[T, E]] ?=> T): Result[T, E] = + boundary: + val result = body + Ok(result) + + // same as apply, but not an inline method + def make[T, E](body: Label[Result[T, E]] ?=> T): Result[T, E] = + boundary: + val result = body + Ok(result) + +end Result + +def test[T, E](using Async) = + val good1: List[Future[Result[T, E]]] => Future[Result[List[T], E]] = frs => + Future: + Result: + frs.map(_.await.ok) // OK + + val good2: Result[Future[T], E] => Future[Result[T, E]] = rf => + Future: + Result: + rf.ok.await // OK, Future argument has type Result[T] + + def fail3(fr: Future[Result[T, E]]^) = + Result: + Future: // error, escaping label from Result + fr.await.ok + + def fail4(fr: Future[Result[T, E]]^) = + Result.make: // error, escaping label from Result + Future: + fr.await.ok diff --git a/tests/neg-custom-args/captures/leaking-iterators.check b/tests/neg-custom-args/captures/leaking-iterators.check index 0481a9a4d9e2..2f47a26e894a 100644 --- a/tests/neg-custom-args/captures/leaking-iterators.check +++ b/tests/neg-custom-args/captures/leaking-iterators.check @@ -1,4 +1,4 @@ -- Error: tests/neg-custom-args/captures/leaking-iterators.scala:56:2 -------------------------------------------------- 56 | usingLogFile: log => // error | ^^^^^^^^^^^^ - | local reference log leaks into outer capture set of type parameter R of method usingLogFile + | local reference log leaks into outer capture set of type parameter R of method usingLogFile in package cctest diff --git a/tests/neg-custom-args/captures/usingLogFile.check b/tests/neg-custom-args/captures/usingLogFile.check index ef0c5d1e77c9..bf5c1dc4f83a 100644 --- a/tests/neg-custom-args/captures/usingLogFile.check +++ b/tests/neg-custom-args/captures/usingLogFile.check @@ -1,12 +1,12 @@ -- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:14 ------------------------------------------------------ 23 | val later = usingLogFile { f => () => f.write(0) } // error | ^^^^^^^^^^^^ - | local reference f leaks into outer capture set of type parameter T of method usingLogFile + | local reference f leaks into outer capture set of type parameter T of method usingLogFile in object Test2 -- Error: tests/neg-custom-args/captures/usingLogFile.scala:28:23 ------------------------------------------------------ 28 | private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error | ^^^^^^^^^^^^ - | local reference f leaks into outer capture set of type parameter T of method usingLogFile + | local reference f leaks into outer capture set of type parameter T of method usingLogFile in object Test2 -- Error: tests/neg-custom-args/captures/usingLogFile.scala:44:16 ------------------------------------------------------ 44 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error | ^^^^^^^^^ - | local reference f leaks into outer capture set of type parameter T of method usingFile + | local reference f leaks into outer capture set of type parameter T of method usingFile in object Test3 From 8c6daaf99c42679e257633357ce1e8331370c9fe Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Fri, 22 Mar 2024 04:55:04 +0000 Subject: [PATCH 161/465] Find universal capability from parents --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 9b6217033ede..c56d5ff090d8 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -519,6 +519,16 @@ class CheckCaptures extends Recheck, SymTransformer: if sym.isConstructor then val cls = sym.owner.asClass + /** Check if the class or one of its parents has a root capability, + * which means that the class has a capability annotation or an impure + * function type. + */ + def hasUniversalCapability(tp: Type): Boolean = tp match + case CapturingType(parent, ref) => + ref.isUniversal || hasUniversalCapability(parent) + case tp => + tp.isCapabilityClassRef || tp.parents.exists(hasUniversalCapability) + /** First half of result pair: * Refine the type of a constructor call `new C(t_1, ..., t_n)` * to C{val x_1: T_1, ..., x_m: T_m} where x_1, ..., x_m are the tracked @@ -528,7 +538,8 @@ class CheckCaptures extends Recheck, SymTransformer: */ def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = var refined: Type = core - var allCaptures: CaptureSet = initCs + var allCaptures: CaptureSet = if hasUniversalCapability(core) + then CaptureSet.universal else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol if getter.termRef.isTracked && !getter.is(Private) then From 09b7f166f3dcb68ed5264396a8c463bbc665dc10 Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Fri, 22 Mar 2024 16:11:43 +0000 Subject: [PATCH 162/465] Attempt to pass and check capability from parents correctly --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 11 +++++++++++ .../dotty/tools/dotc/cc/CheckCaptures.scala | 12 +----------- compiler/src/dotty/tools/dotc/cc/Setup.scala | 9 ++------- .../dotty/tools/dotc/core/TypeComparer.scala | 9 ++++++++- .../captures/extending-cap-classes.scala | 15 +++++++++++++++ .../extending-impure-function.scala.scala | 18 ++++++++++++++++++ 6 files changed, 55 insertions(+), 19 deletions(-) create mode 100644 tests/neg-custom-args/captures/extending-cap-classes.scala create mode 100644 tests/neg-custom-args/captures/extending-impure-function.scala.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 1c951a0c0846..6a8874839fb5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -207,6 +207,17 @@ extension (tp: Type) case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot) case _ => false + /** Check if the class has universal capability, which means: + * 1. the class has a capability annotation, + * 2. the class is an impure function type, + * 3. or one of its base classes has universal capability. + */ + def hasUniversalCapability(using Context): Boolean = tp match + case CapturingType(parent, ref) => + ref.isUniversal || parent.hasUniversalCapability + case tp => + tp.isCapabilityClassRef || tp.parents.exists(_.hasUniversalCapability) + /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index c56d5ff090d8..845d4bfdc8c2 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -519,16 +519,6 @@ class CheckCaptures extends Recheck, SymTransformer: if sym.isConstructor then val cls = sym.owner.asClass - /** Check if the class or one of its parents has a root capability, - * which means that the class has a capability annotation or an impure - * function type. - */ - def hasUniversalCapability(tp: Type): Boolean = tp match - case CapturingType(parent, ref) => - ref.isUniversal || hasUniversalCapability(parent) - case tp => - tp.isCapabilityClassRef || tp.parents.exists(hasUniversalCapability) - /** First half of result pair: * Refine the type of a constructor call `new C(t_1, ..., t_n)` * to C{val x_1: T_1, ..., x_m: T_m} where x_1, ..., x_m are the tracked @@ -538,7 +528,7 @@ class CheckCaptures extends Recheck, SymTransformer: */ def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = var refined: Type = core - var allCaptures: CaptureSet = if hasUniversalCapability(core) + var allCaptures: CaptureSet = if core.hasUniversalCapability then CaptureSet.universal else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 9ab41859f170..082074c84ffc 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -269,12 +269,6 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: CapturingType(fntpe, cs, boxed = false) else fntpe - /** Map references to capability classes C to C^ */ - private def expandCapabilityClass(tp: Type): Type = - if tp.isCapabilityClassRef - then CapturingType(tp, defn.expandedUniversalSet, boxed = false) - else tp - private def recur(t: Type): Type = normalizeCaptures(mapOver(t)) def apply(t: Type) = @@ -297,7 +291,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case t: TypeVar => this(t.underlying) case t => - if t.isCapabilityClassRef + // Map references to capability classes C to C^ + if t.hasUniversalCapability then CapturingType(t, defn.expandedUniversalSet, boxed = false) else recur(t) end expandAliases diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index cee1ec7fffa8..0c237a0a5fd0 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -895,13 +895,20 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling canWidenAbstract && acc(true, tp) def tryBaseType(cls2: Symbol) = - val base = nonExprBaseType(tp1, cls2) + var base = nonExprBaseType(tp1, cls2) if base.exists && (base ne tp1) && (!caseLambda.exists || widenAbstractOKFor(tp2) || tp1.widen.underlyingClassRef(refinementOK = true).exists) then def checkBase = + // Strip existing capturing set from base type + base = base.stripCapturing + // Pass capture set of tp1 to base type + tp1 match + case tp1 @ CapturingType(_, refs1) => + base = CapturingType(base, refs1, tp1.isBoxed) + case _ => isSubType(base, tp2, if tp1.isRef(cls2) then approx else approx.addLow) && recordGadtUsageIf { MatchType.thatReducesUsingGadt(tp1) } if tp1.widenDealias.isInstanceOf[AndType] || base.isInstanceOf[OrType] then diff --git a/tests/neg-custom-args/captures/extending-cap-classes.scala b/tests/neg-custom-args/captures/extending-cap-classes.scala new file mode 100644 index 000000000000..17497e415a1e --- /dev/null +++ b/tests/neg-custom-args/captures/extending-cap-classes.scala @@ -0,0 +1,15 @@ +import annotation.capability + +class C1 +@capability class C2 extends C1 +class C3 extends C2 + +def test = + val x1: C1 = new C1 + val x2: C1 = new C2 // error + val x3: C1 = new C3 // error + + val y1: C2 = new C2 + val y2: C2 = new C3 + + val z1: C3 = new C3 \ No newline at end of file diff --git a/tests/neg-custom-args/captures/extending-impure-function.scala.scala b/tests/neg-custom-args/captures/extending-impure-function.scala.scala new file mode 100644 index 000000000000..25e7e035c9df --- /dev/null +++ b/tests/neg-custom-args/captures/extending-impure-function.scala.scala @@ -0,0 +1,18 @@ +class F extends (Int => Unit) { + def apply(x: Int): Unit = () +} + +def test = + val x1 = new (Int => Unit) { + def apply(x: Int): Unit = () + } + + val x2: Int -> Unit = new (Int => Unit) { // error + def apply(x: Int): Unit = () + } + + val y1: Int => Unit = new F + val y2: Int -> Unit = new F // error + + val z1 = () => () + val z2: () -> Unit = () => () From 83a409d85120e92a9d20ac9a1b3e1aec01b714ec Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Thu, 4 Apr 2024 03:57:56 +0000 Subject: [PATCH 163/465] Ignore capturing from parents when computing base type --- compiler/src/dotty/tools/dotc/core/SymDenotations.scala | 4 ++-- compiler/src/dotty/tools/dotc/core/TypeComparer.scala | 9 +-------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index f01d2faf86c4..05c16c38d646 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -23,7 +23,7 @@ import scala.util.control.NonFatal import config.Config import reporting.* import collection.mutable -import cc.{CapturingType, derivedCapturingType} +import cc.{CapturingType, derivedCapturingType, stripCapturing} import scala.annotation.internal.sharable import scala.compiletime.uninitialized @@ -2232,7 +2232,7 @@ object SymDenotations { tp match { case tp @ TypeRef(prefix, _) => def foldGlb(bt: Type, ps: List[Type]): Type = ps match { - case p :: ps1 => foldGlb(bt & recur(p), ps1) + case p :: ps1 => foldGlb(bt & recur(p.stripCapturing), ps1) case _ => bt } diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 0c237a0a5fd0..cee1ec7fffa8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -895,20 +895,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling canWidenAbstract && acc(true, tp) def tryBaseType(cls2: Symbol) = - var base = nonExprBaseType(tp1, cls2) + val base = nonExprBaseType(tp1, cls2) if base.exists && (base ne tp1) && (!caseLambda.exists || widenAbstractOKFor(tp2) || tp1.widen.underlyingClassRef(refinementOK = true).exists) then def checkBase = - // Strip existing capturing set from base type - base = base.stripCapturing - // Pass capture set of tp1 to base type - tp1 match - case tp1 @ CapturingType(_, refs1) => - base = CapturingType(base, refs1, tp1.isBoxed) - case _ => isSubType(base, tp2, if tp1.isRef(cls2) then approx else approx.addLow) && recordGadtUsageIf { MatchType.thatReducesUsingGadt(tp1) } if tp1.widenDealias.isInstanceOf[AndType] || base.isInstanceOf[OrType] then From f6529c46f38cd6fc1a2adfa534d69ad203a2ca23 Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Mon, 22 Apr 2024 02:09:03 +0000 Subject: [PATCH 164/465] Store capability class information in a hash map during cc --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 15 ---------- .../dotty/tools/dotc/cc/CheckCaptures.scala | 2 +- compiler/src/dotty/tools/dotc/cc/Setup.scala | 28 ++++++++++++++++- .../captures/extending-impure-function.scala | 30 +++++++++++++++++++ .../extending-impure-function.scala.scala | 18 ----------- 5 files changed, 58 insertions(+), 35 deletions(-) create mode 100644 tests/neg-custom-args/captures/extending-impure-function.scala delete mode 100644 tests/neg-custom-args/captures/extending-impure-function.scala.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 6a8874839fb5..42483599f1e6 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -203,21 +203,6 @@ extension (tp: Type) case _ => false - def isCapabilityClassRef(using Context) = tp.dealiasKeepAnnots match - case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot) - case _ => false - - /** Check if the class has universal capability, which means: - * 1. the class has a capability annotation, - * 2. the class is an impure function type, - * 3. or one of its base classes has universal capability. - */ - def hasUniversalCapability(using Context): Boolean = tp match - case CapturingType(parent, ref) => - ref.isUniversal || parent.hasUniversalCapability - case tp => - tp.isCapabilityClassRef || tp.parents.exists(_.hasUniversalCapability) - /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 845d4bfdc8c2..3b241f751403 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -528,7 +528,7 @@ class CheckCaptures extends Recheck, SymTransformer: */ def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = var refined: Type = core - var allCaptures: CaptureSet = if core.hasUniversalCapability + var allCaptures: CaptureSet = if setup.isCapabilityClassRef(core) then CaptureSet.universal else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 082074c84ffc..fef88a8ba6de 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -23,6 +23,7 @@ trait SetupAPI: def setupUnit(tree: Tree, recheckDef: DefRecheck)(using Context): Unit def isPreCC(sym: Symbol)(using Context): Boolean def postCheck()(using Context): Unit + def isCapabilityClassRef(tp: Type)(using Context): Boolean object Setup: @@ -67,6 +68,31 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: && !sym.owner.is(CaptureChecked) && !defn.isFunctionSymbol(sym.owner) + private val capabilityClassMap = new util.HashMap[Symbol, Boolean] + + /** Check if the class is capability, which means: + * 1. the class has a capability annotation, + * 2. or at least one of its parent type has universal capability. + */ + def isCapabilityClassRef(tp: Type)(using Context): Boolean = tp.dealiasKeepAnnots match + case _: TypeRef | _: AppliedType => + val sym = tp.classSymbol + def checkSym: Boolean = + sym.hasAnnotation(defn.CapabilityAnnot) + || sym.info.parents.exists(hasUniversalCapability) + sym.isClass && capabilityClassMap.getOrElseUpdate(sym, checkSym) + case _ => false + + private def hasUniversalCapability(tp: Type)(using Context): Boolean = tp.dealiasKeepAnnots match + case CapturingType(parent, refs) => + refs.isUniversal || hasUniversalCapability(parent) + case AnnotatedType(parent, ann) => + if ann.symbol.isRetains then + try ann.tree.toCaptureSet.isUniversal || hasUniversalCapability(parent) + catch case ex: IllegalCaptureRef => false + else hasUniversalCapability(parent) + case tp => isCapabilityClassRef(tp) + private def fluidify(using Context) = new TypeMap with IdempotentCaptRefMap: def apply(t: Type): Type = t match case t: MethodType => @@ -292,7 +318,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: this(t.underlying) case t => // Map references to capability classes C to C^ - if t.hasUniversalCapability + if isCapabilityClassRef(t) then CapturingType(t, defn.expandedUniversalSet, boxed = false) else recur(t) end expandAliases diff --git a/tests/neg-custom-args/captures/extending-impure-function.scala b/tests/neg-custom-args/captures/extending-impure-function.scala new file mode 100644 index 000000000000..e491b31caed5 --- /dev/null +++ b/tests/neg-custom-args/captures/extending-impure-function.scala @@ -0,0 +1,30 @@ +class F1 extends (Int => Unit) { + def apply(x: Int): Unit = () +} + +class F2 extends (Int -> Unit) { + def apply(x: Int): Unit = () +} + +def test = + val x1 = new (Int => Unit) { + def apply(x: Int): Unit = () + } + + val x2: Int -> Unit = new (Int => Unit) { // error + def apply(x: Int): Unit = () + } + + val x3: Int -> Unit = new (Int -> Unit) { + def apply(x: Int): Unit = () + } + + val y1: Int => Unit = new F1 + val y2: Int -> Unit = new F1 // error + val y3: Int => Unit = new F2 + val y4: Int -> Unit = new F2 + + val z1 = () => () + val z2: () -> Unit = () => () + val z3: () -> Unit = z1 + val z4: () => Unit = () => () diff --git a/tests/neg-custom-args/captures/extending-impure-function.scala.scala b/tests/neg-custom-args/captures/extending-impure-function.scala.scala deleted file mode 100644 index 25e7e035c9df..000000000000 --- a/tests/neg-custom-args/captures/extending-impure-function.scala.scala +++ /dev/null @@ -1,18 +0,0 @@ -class F extends (Int => Unit) { - def apply(x: Int): Unit = () -} - -def test = - val x1 = new (Int => Unit) { - def apply(x: Int): Unit = () - } - - val x2: Int -> Unit = new (Int => Unit) { // error - def apply(x: Int): Unit = () - } - - val y1: Int => Unit = new F - val y2: Int -> Unit = new F // error - - val z1 = () => () - val z2: () -> Unit = () => () From e3a9890adae25fa46ee4e5a069114467c12172a3 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 11 Apr 2024 17:30:21 +0200 Subject: [PATCH 165/465] Add quote ASTs to TASTy Add AST nodes for Quote, Splice, QuotePattern, and QuoteSplice to TASTy. --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 6 +++ .../tools/dotc/core/tasty/TreePickler.scala | 38 ++++++++++--------- .../tools/dotc/core/tasty/TreeUnpickler.scala | 25 ++++++++++-- .../dotty/tools/dotc/typer/TypeAssigner.scala | 6 +++ tasty/src/dotty/tools/tasty/TastyFormat.scala | 15 ++++++-- tests/{pos => pos-macros}/i13532/Bar.scala | 0 tests/{pos => pos-macros}/i13532/Foo.scala | 0 .../i13532/TestMacro.scala | 0 tests/{pos => pos-macros}/i16331/Macro.scala | 0 tests/{pos => pos-macros}/i16331/Main.scala | 0 tests/{pos => pos-macros}/i19604/ZSet.scala | 0 tests/{pos => pos-macros}/i19604/core.scala | 0 tests/{pos => pos-macros}/i19604/macro.scala | 0 .../{pos => pos-macros}/i19604/prelude.scala | 0 tests/run-staging/quote-nested-1.check | 2 +- tests/run-staging/quote-nested-2.check | 2 +- tests/run-staging/quote-nested-5.check | 2 +- 17 files changed, 68 insertions(+), 28 deletions(-) rename tests/{pos => pos-macros}/i13532/Bar.scala (100%) rename tests/{pos => pos-macros}/i13532/Foo.scala (100%) rename tests/{pos => pos-macros}/i13532/TestMacro.scala (100%) rename tests/{pos => pos-macros}/i16331/Macro.scala (100%) rename tests/{pos => pos-macros}/i16331/Main.scala (100%) rename tests/{pos => pos-macros}/i19604/ZSet.scala (100%) rename tests/{pos => pos-macros}/i19604/core.scala (100%) rename tests/{pos => pos-macros}/i19604/macro.scala (100%) rename tests/{pos => pos-macros}/i19604/prelude.scala (100%) diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 71b85d97a187..8746277d1b26 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -178,6 +178,12 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Splice(expr: Tree, tpe: Type)(using Context): Splice = untpd.Splice(expr).withType(tpe) + def Splice(expr: Tree)(using Context): Splice = + ta.assignType(untpd.Splice(expr), expr) + + def SplicePattern(pat: Tree, args: List[Tree], tpe: Type)(using Context): SplicePattern = + untpd.SplicePattern(pat, args).withType(tpe) + def Hole(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpe: Type)(using Context): Hole = untpd.Hole(isTerm, idx, args, content).withType(tpe) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 0a8669292a74..f5625e694822 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -701,28 +701,30 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickleTree(alias) } case tree @ Quote(body, Nil) => - // TODO: Add QUOTE tag to TASTy assert(body.isTerm, """Quote with type should not be pickled. |Quote with type should only exists after staging phase at staging level 0.""".stripMargin) - pickleTree( - // scala.quoted.runtime.Expr.quoted[]() - ref(defn.QuotedRuntime_exprQuote) - .appliedToType(tree.bodyType) - .appliedTo(body) - .withSpan(tree.span) - ) + writeByte(QUOTE) + pickleTree(body) case Splice(expr) => - pickleTree( // TODO: Add SPLICE tag to TASTy - // scala.quoted.runtime.Expr.splice[]() - ref(defn.QuotedRuntime_exprSplice) - .appliedToType(tree.tpe) - .appliedTo(expr) - .withSpan(tree.span) - ) - case tree: QuotePattern => - // TODO: Add QUOTEPATTERN tag to TASTy - pickleTree(QuotePatterns.encode(tree)) + writeByte(SPLICE) + pickleTree(expr) + case QuotePattern(bindings, body, quotes) => + writeByte(QUOTEPATTERN) + withLength { + if body.isType then writeByte(EXPLICITtpt) + pickleTree(body) + pickleTree(quotes) + pickleType(tree.tpe) + bindings.foreach(pickleTree) + } + case SplicePattern(pat, args) => + writeByte(SPLICEPATTERN) + withLength { + pickleTree(pat) + pickleType(tree.tpe) + args.foreach(pickleTree) + } case Hole(_, idx, args, _) => writeByte(HOLE) withLength { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 073edb536151..5d4fcbfc5abe 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1314,6 +1314,10 @@ class TreeUnpickler(reader: TastyReader, NamedArg(readName(), readTree()) case EXPLICITtpt => readTpt() + case QUOTE => + Quote(readTree(), Nil) + case SPLICE => + Splice(readTree()) case _ => readPathTree() } @@ -1379,9 +1383,9 @@ class TreeUnpickler(reader: TastyReader, val fn = readTree() val args = until(end)(readTree()) if fn.symbol.isConstructor then constructorApply(fn, args) - else if fn.symbol == defn.QuotedRuntime_exprQuote then quotedExpr(fn, args) - else if fn.symbol == defn.QuotedRuntime_exprSplice then splicedExpr(fn, args) - else if fn.symbol == defn.QuotedRuntime_exprNestedSplice then nestedSpliceExpr(fn, args) + else if fn.symbol == defn.QuotedRuntime_exprQuote then quotedExpr(fn, args) // decode pre 3.5.0 encoding + else if fn.symbol == defn.QuotedRuntime_exprSplice then splicedExpr(fn, args) // decode pre 3.5.0 encoding + else if fn.symbol == defn.QuotedRuntime_exprNestedSplice then nestedSpliceExpr(fn, args) // decode pre 3.5.0 encoding else tpd.Apply(fn, args) case TYPEAPPLY => tpd.TypeApply(readTree(), until(end)(readTpt())) @@ -1503,7 +1507,7 @@ class TreeUnpickler(reader: TastyReader, val unapply = UnApply(fn, implicitArgs, argPats, patType) if fn.symbol == defn.QuoteMatching_ExprMatch_unapply || fn.symbol == defn.QuoteMatching_TypeMatch_unapply - then QuotePatterns.decode(unapply) + then QuotePatterns.decode(unapply) // decode pre 3.5.0 encoding else unapply case REFINEDtpt => val refineCls = symAtAddr.getOrElse(start, @@ -1551,6 +1555,19 @@ class TreeUnpickler(reader: TastyReader, val hi = if currentAddr == end then lo else readTpt() val alias = if currentAddr == end then EmptyTree else readTpt() createNullableTypeBoundsTree(lo, hi, alias) + case QUOTEPATTERN => + val bodyReader = fork + skipTree() + val quotes = readTree() + val patType = readType() + val bindings = readStats(ctx.owner, end) + val body = bodyReader.readTree() // need bindings in scope, so needs to be read before + QuotePattern(bindings, body, quotes, patType) + case SPLICEPATTERN => + val pat = readTree() + val patType = readType() + val args = until(end)(readTree()) + SplicePattern(pat, args, patType) case HOLE => readHole(end, isTerm = true) case _ => diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index c7476f5d9777..64722d51708c 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -531,6 +531,12 @@ trait TypeAssigner { def assignType(tree: untpd.UnApply, proto: Type)(using Context): UnApply = tree.withType(proto) + def assignType(tree: untpd.Splice, expr: Tree)(using Context): Splice = + val tpe = expr.tpe // Quotes ?=> Expr[T] + .baseType(defn.FunctionSymbol(1, isContextual = true)).argTypes.last // Expr[T] + .baseType(defn.QuotedExprClass).argTypes.head // T + tree.withType(tpe) + def assignType(tree: untpd.QuotePattern, proto: Type)(using Context): QuotePattern = tree.withType(proto) diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 6cd63d0d8f01..606f61a3baf8 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -110,10 +110,14 @@ Standard-Section: "ASTs" TopLevelStat* WHILE Length cond_Term body_Term -- while cond do body REPEATED Length elem_Type elem_Term* -- Varargs argument of type `elem` SELECTouter Length levels_Nat qual_Term underlying_Type -- Follow `levels` outer links, starting from `qual`, with given `underlying` type + QUOTE body_Term -- Quoted expression `'{ body }` + SPLICE expr_Term -- Spliced expression `${ expr }` + SPLICEPATTEN Length pat_Term tpe_Type args_Term* -- Pattern splice `${pat}` or `$pat(args*)` in a quoted pattern of type `tpe` -- patterns: BIND Length boundName_NameRef patType_Type pat_Term -- name @ pat, wherev `patType` is the type of the bound symbol ALTERNATIVE Length alt_Term* -- alt1 | ... | altn as a pattern UNAPPLY Length fun_Term ImplicitArg* pat_Type pat_Term* -- Unapply node `fun(_: pat_Type)(implicitArgs)` flowing into patterns `pat`. + QUOTEPATTERN Length body_Term quotes_Term pat_Type bindings_Term* -- Quote pattern node `'{ bindings*; body }(using quotes)` -- type trees: IDENTtpt NameRef Type -- Used for all type idents SELECTtpt NameRef qual_Term -- qual.name @@ -543,7 +547,8 @@ object TastyFormat { final val BOUNDED = 102 final val EXPLICITtpt = 103 final val ELIDED = 104 - + final val QUOTE = 105 + final val SPLICE = 106 // Tree Cat. 4: tag Nat AST final val firstNatASTTreeTag = IDENT @@ -610,8 +615,8 @@ object TastyFormat { final val TYPEREFin = 175 final val SELECTin = 176 final val EXPORT = 177 - // final val ??? = 178 - // final val ??? = 179 + final val QUOTEPATTERN = 178 + final val SPLICEPATTERN = 179 final val METHODtype = 180 final val APPLYsigpoly = 181 @@ -858,6 +863,10 @@ object TastyFormat { case PROTECTEDqualified => "PROTECTEDqualified" case EXPLICITtpt => "EXPLICITtpt" case ELIDED => "ELIDED" + case QUOTE => "QUOTE" + case SPLICE => "SPLICE" + case QUOTEPATTERN => "QUOTEPATTERN" + case SPLICEPATTERN => "SPLICEPATTERN" case HOLE => "HOLE" } diff --git a/tests/pos/i13532/Bar.scala b/tests/pos-macros/i13532/Bar.scala similarity index 100% rename from tests/pos/i13532/Bar.scala rename to tests/pos-macros/i13532/Bar.scala diff --git a/tests/pos/i13532/Foo.scala b/tests/pos-macros/i13532/Foo.scala similarity index 100% rename from tests/pos/i13532/Foo.scala rename to tests/pos-macros/i13532/Foo.scala diff --git a/tests/pos/i13532/TestMacro.scala b/tests/pos-macros/i13532/TestMacro.scala similarity index 100% rename from tests/pos/i13532/TestMacro.scala rename to tests/pos-macros/i13532/TestMacro.scala diff --git a/tests/pos/i16331/Macro.scala b/tests/pos-macros/i16331/Macro.scala similarity index 100% rename from tests/pos/i16331/Macro.scala rename to tests/pos-macros/i16331/Macro.scala diff --git a/tests/pos/i16331/Main.scala b/tests/pos-macros/i16331/Main.scala similarity index 100% rename from tests/pos/i16331/Main.scala rename to tests/pos-macros/i16331/Main.scala diff --git a/tests/pos/i19604/ZSet.scala b/tests/pos-macros/i19604/ZSet.scala similarity index 100% rename from tests/pos/i19604/ZSet.scala rename to tests/pos-macros/i19604/ZSet.scala diff --git a/tests/pos/i19604/core.scala b/tests/pos-macros/i19604/core.scala similarity index 100% rename from tests/pos/i19604/core.scala rename to tests/pos-macros/i19604/core.scala diff --git a/tests/pos/i19604/macro.scala b/tests/pos-macros/i19604/macro.scala similarity index 100% rename from tests/pos/i19604/macro.scala rename to tests/pos-macros/i19604/macro.scala diff --git a/tests/pos/i19604/prelude.scala b/tests/pos-macros/i19604/prelude.scala similarity index 100% rename from tests/pos/i19604/prelude.scala rename to tests/pos-macros/i19604/prelude.scala diff --git a/tests/run-staging/quote-nested-1.check b/tests/run-staging/quote-nested-1.check index 455e123a642c..84b8a0f533fb 100644 --- a/tests/run-staging/quote-nested-1.check +++ b/tests/run-staging/quote-nested-1.check @@ -1 +1 @@ -((q: scala.quoted.Quotes) ?=> scala.quoted.runtime.Expr.quote[scala.Int](3).apply(using q)) +((q: scala.quoted.Quotes) ?=> scala.quoted.runtime.Expr.quote[3](3).apply(using q)) diff --git a/tests/run-staging/quote-nested-2.check b/tests/run-staging/quote-nested-2.check index 48ecf87577ab..39d1a05fad8e 100644 --- a/tests/run-staging/quote-nested-2.check +++ b/tests/run-staging/quote-nested-2.check @@ -1,4 +1,4 @@ ((q: scala.quoted.Quotes) ?=> { - val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[scala.Int](4).apply(using q) + val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[4](4).apply(using q) ((contextual$2: scala.quoted.Quotes) ?=> a).apply(using q) }) diff --git a/tests/run-staging/quote-nested-5.check b/tests/run-staging/quote-nested-5.check index 47d39cc92611..6561c9cbbb2e 100644 --- a/tests/run-staging/quote-nested-5.check +++ b/tests/run-staging/quote-nested-5.check @@ -1,4 +1,4 @@ ((q: scala.quoted.Quotes) ?=> { - val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[scala.Int](4).apply(using q) + val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[4](4).apply(using q) ((q2: scala.quoted.Quotes) ?=> ((contextual$2: scala.quoted.Quotes) ?=> a).apply(using q2)) }.apply(using q)) From 31fe8977f71c9b94e4ca5c04c73eeb8f8f5f4de2 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 19 Apr 2024 11:57:29 +0200 Subject: [PATCH 166/465] Pickle type of quote and splices These types where encoded explicitly before in the type parameter of the `runtime.Expr.{expr,splice,nestedSplice}` methods. We still need them. --- .../dotty/tools/dotc/core/tasty/TreePickler.scala | 10 ++++++++-- .../dotty/tools/dotc/core/tasty/TreeUnpickler.scala | 8 ++++---- tasty/src/dotty/tools/tasty/TastyFormat.scala | 12 ++++++------ tests/run-staging/quote-nested-1.check | 2 +- tests/run-staging/quote-nested-2.check | 2 +- tests/run-staging/quote-nested-5.check | 2 +- 6 files changed, 21 insertions(+), 15 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index f5625e694822..55d25eeb3654 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -705,10 +705,16 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { """Quote with type should not be pickled. |Quote with type should only exists after staging phase at staging level 0.""".stripMargin) writeByte(QUOTE) - pickleTree(body) + withLength { + pickleTree(body) + pickleType(tree.bodyType) + } case Splice(expr) => writeByte(SPLICE) - pickleTree(expr) + withLength { + pickleTree(expr) + pickleType(tree.tpe) + } case QuotePattern(bindings, body, quotes) => writeByte(QUOTEPATTERN) withLength { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 5d4fcbfc5abe..ee3c98632b95 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1314,10 +1314,6 @@ class TreeUnpickler(reader: TastyReader, NamedArg(readName(), readTree()) case EXPLICITtpt => readTpt() - case QUOTE => - Quote(readTree(), Nil) - case SPLICE => - Splice(readTree()) case _ => readPathTree() } @@ -1555,6 +1551,10 @@ class TreeUnpickler(reader: TastyReader, val hi = if currentAddr == end then lo else readTpt() val alias = if currentAddr == end then EmptyTree else readTpt() createNullableTypeBoundsTree(lo, hi, alias) + case QUOTE => + Quote(readTree(), Nil).withBodyType(readType()) + case SPLICE => + Splice(readTree()).withType(readType()) case QUOTEPATTERN => val bodyReader = fork skipTree() diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 606f61a3baf8..413702ea0d71 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -110,8 +110,8 @@ Standard-Section: "ASTs" TopLevelStat* WHILE Length cond_Term body_Term -- while cond do body REPEATED Length elem_Type elem_Term* -- Varargs argument of type `elem` SELECTouter Length levels_Nat qual_Term underlying_Type -- Follow `levels` outer links, starting from `qual`, with given `underlying` type - QUOTE body_Term -- Quoted expression `'{ body }` - SPLICE expr_Term -- Spliced expression `${ expr }` + QUOTE Length body_Term bodyTpe_Type -- Quoted expression `'{ body }` of a body typed as `bodyTpe` + SPLICE Length expr_Term tpe_Type -- Spliced expression `${ expr }` typed as `tpe` SPLICEPATTEN Length pat_Term tpe_Type args_Term* -- Pattern splice `${pat}` or `$pat(args*)` in a quoted pattern of type `tpe` -- patterns: BIND Length boundName_NameRef patType_Type pat_Term -- name @ pat, wherev `patType` is the type of the bound symbol @@ -547,8 +547,6 @@ object TastyFormat { final val BOUNDED = 102 final val EXPLICITtpt = 103 final val ELIDED = 104 - final val QUOTE = 105 - final val SPLICE = 106 // Tree Cat. 4: tag Nat AST final val firstNatASTTreeTag = IDENT @@ -615,10 +613,12 @@ object TastyFormat { final val TYPEREFin = 175 final val SELECTin = 176 final val EXPORT = 177 - final val QUOTEPATTERN = 178 - final val SPLICEPATTERN = 179 + final val QUOTE = 178 + final val SPLICE = 179 final val METHODtype = 180 final val APPLYsigpoly = 181 + final val QUOTEPATTERN = 182 + final val SPLICEPATTERN = 183 final val MATCHtype = 190 final val MATCHtpt = 191 diff --git a/tests/run-staging/quote-nested-1.check b/tests/run-staging/quote-nested-1.check index 84b8a0f533fb..455e123a642c 100644 --- a/tests/run-staging/quote-nested-1.check +++ b/tests/run-staging/quote-nested-1.check @@ -1 +1 @@ -((q: scala.quoted.Quotes) ?=> scala.quoted.runtime.Expr.quote[3](3).apply(using q)) +((q: scala.quoted.Quotes) ?=> scala.quoted.runtime.Expr.quote[scala.Int](3).apply(using q)) diff --git a/tests/run-staging/quote-nested-2.check b/tests/run-staging/quote-nested-2.check index 39d1a05fad8e..48ecf87577ab 100644 --- a/tests/run-staging/quote-nested-2.check +++ b/tests/run-staging/quote-nested-2.check @@ -1,4 +1,4 @@ ((q: scala.quoted.Quotes) ?=> { - val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[4](4).apply(using q) + val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[scala.Int](4).apply(using q) ((contextual$2: scala.quoted.Quotes) ?=> a).apply(using q) }) diff --git a/tests/run-staging/quote-nested-5.check b/tests/run-staging/quote-nested-5.check index 6561c9cbbb2e..47d39cc92611 100644 --- a/tests/run-staging/quote-nested-5.check +++ b/tests/run-staging/quote-nested-5.check @@ -1,4 +1,4 @@ ((q: scala.quoted.Quotes) ?=> { - val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[4](4).apply(using q) + val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[scala.Int](4).apply(using q) ((q2: scala.quoted.Quotes) ?=> ((contextual$2: scala.quoted.Quotes) ?=> a).apply(using q2)) }.apply(using q)) From 743cc0b0f79a5886267ef6da2d5b101b6e98c54e Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Mon, 22 Apr 2024 09:39:38 +0200 Subject: [PATCH 167/465] Add type argument encoding to SPLICEPATTERN Future proof for #18271. --- compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala | 4 ++++ compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala | 3 ++- tasty/src/dotty/tools/tasty/TastyFormat.scala | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 55d25eeb3654..f8a0f725ea52 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -725,10 +725,14 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { bindings.foreach(pickleTree) } case SplicePattern(pat, args) => + val targs = Nil // SplicePattern `targs` will be added with #18271 writeByte(SPLICEPATTERN) withLength { pickleTree(pat) pickleType(tree.tpe) + for targ <- targs do + writeByte(EXPLICITtpt) + pickleTree(targ) args.foreach(pickleTree) } case Hole(_, idx, args, _) => diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index ee3c98632b95..64ea2d497295 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1566,7 +1566,8 @@ class TreeUnpickler(reader: TastyReader, case SPLICEPATTERN => val pat = readTree() val patType = readType() - val args = until(end)(readTree()) + val (targs, args) = until(end)(readTree()).span(_.isType) + assert(targs.isEmpty, "unexpected type arguments in SPLICEPATTERN") // `targs` will be needed for #18271. Until this fearure is added they should be empty. SplicePattern(pat, args, patType) case HOLE => readHole(end, isTerm = true) diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 413702ea0d71..164243d3b469 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -112,7 +112,7 @@ Standard-Section: "ASTs" TopLevelStat* SELECTouter Length levels_Nat qual_Term underlying_Type -- Follow `levels` outer links, starting from `qual`, with given `underlying` type QUOTE Length body_Term bodyTpe_Type -- Quoted expression `'{ body }` of a body typed as `bodyTpe` SPLICE Length expr_Term tpe_Type -- Spliced expression `${ expr }` typed as `tpe` - SPLICEPATTEN Length pat_Term tpe_Type args_Term* -- Pattern splice `${pat}` or `$pat(args*)` in a quoted pattern of type `tpe` + SPLICEPATTEN Length pat_Term tpe_Type targs_Type* args_Term* -- Pattern splice `${pat}` or `$pat[targs*](args*)` in a quoted pattern of type `tpe`. -- patterns: BIND Length boundName_NameRef patType_Type pat_Term -- name @ pat, wherev `patType` is the type of the bound symbol ALTERNATIVE Length alt_Term* -- alt1 | ... | altn as a pattern From 80a0cb5388aa1af9a88b9f394bcf181d79338fb4 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 16 Apr 2024 11:19:20 +0200 Subject: [PATCH 168/465] Stabilize reflect `SymbolMethods.isSuperAccessor` --- library/src/scala/quoted/Quotes.scala | 1 - project/MiMaFilters.scala | 5 ++++- .../src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala | 2 +- .../src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala | 3 --- .../run-tasty-inspector/stdlibExperimentalDefinitions.scala | 4 +--- 5 files changed, 6 insertions(+), 9 deletions(-) diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 2c2d35d6cf44..e5cdc3bf4fb7 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -4032,7 +4032,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def isClassConstructor: Boolean /** Is this the super accessor? */ - @experimental // TODO when stable, remove `dotty.tools.scaladoc.tasty.ClassLikeSupport.isSuperBridgeMethod` and use this method def isSuperAccessor: Boolean /** Is this the definition of a type? */ diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 46c060cacf39..b22b2e3446e5 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -48,7 +48,10 @@ object MiMaFilters { // Only exceptional cases should be added here. // Breaking changes since last reference version - Build.mimaPreviousDottyVersion -> Seq.empty, // We should never break backwards compatibility + Build.mimaPreviousDottyVersion -> // Seq.empty, // We should never break backwards compatibility + Seq( + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#SymbolMethods.isSuperAccessor"), // This change is acceptable. See comment in `Breaking changes since last LTS`. + ), // Breaking changes since last LTS Build.mimaPreviousLTSDottyVersion -> Seq( diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 1598accf4f40..88d57cdb9853 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -235,7 +235,7 @@ trait ClassLikeSupport: extension (c: ClassDef) def extractMembers: Seq[Member] = { val inherited = c.getNonTrivialInheritedMemberTrees.collect { - case dd: DefDef if !dd.symbol.isClassConstructor && !(dd.symbol.isSuperBridgeMethod || dd.symbol.isDefaultHelperMethod) => dd + case dd: DefDef if !dd.symbol.isClassConstructor && !(dd.symbol.isSuperAccessor || dd.symbol.isDefaultHelperMethod) => dd case other => other } c.membersToDocument.flatMap(parseMember(c)) ++ diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala index 2f85345baddc..39c1a5d8afd5 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala @@ -10,9 +10,6 @@ object SyntheticsSupport: import reflect._ s.flags.is(Flags.Synthetic) || s.flags.is(Flags.FieldAccessor) || s.isDefaultHelperMethod - // TODO remove and use `SymbolMethods.isSuperAccessor` - def isSuperBridgeMethod: Boolean = s.name.contains("$super$") - def isDefaultHelperMethod: Boolean = ".*\\$default\\$\\d+$".r.matches(s.name) def isOpaque: Boolean = diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 2d0081977435..5e6e5700b719 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -51,9 +51,7 @@ val experimentalDefinitionInLibrary = Set( "scala.annotation.init$.widen", "scala.annotation.init$.region", - //// New APIs: Quotes - // Can be stabilized in 3.5.0 or later - "scala.quoted.Quotes.reflectModule.SymbolMethods.isSuperAccessor", + //// New APIs: Quotes // Can be stabilized in 3.5.0 (unsure) or later "scala.quoted.Quotes.reflectModule.CompilationInfoModule.XmacroSettings", // Cant be stabilized yet. From 49dfd52ed70aa4a6fbba35e52a0fa47082ec1ebd Mon Sep 17 00:00:00 2001 From: Quentin Bernet Date: Tue, 9 Jan 2024 13:17:38 +0100 Subject: [PATCH 169/465] Implement SIP-42 Support for binary integer literals Implement, test and document SIP-42 (Implement is a strong word, I uncommented a single line) --- .../dotty/tools/dotc/parsing/Scanners.scala | 2 +- .../other-new-features/binary-literals.md | 19 +++++ docs/_spec/01-lexical-syntax.md | 5 +- docs/sidebar.yml | 1 + .../referenceReplacements/sidebar.yml | 1 + tests/neg/binaryLiterals.scala | 8 +++ tests/run/binaryLiterals.scala | 72 +++++++++++++++++++ 7 files changed, 105 insertions(+), 3 deletions(-) create mode 100644 docs/_docs/reference/other-new-features/binary-literals.md create mode 100644 tests/neg/binaryLiterals.scala create mode 100644 tests/run/binaryLiterals.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index ea43706e9fdb..3f9e8ca6532e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -884,7 +884,7 @@ object Scanners { nextChar() ch match { case 'x' | 'X' => base = 16 ; nextChar() - //case 'b' | 'B' => base = 2 ; nextChar() + case 'b' | 'B' => base = 2 ; nextChar() case _ => base = 10 ; putChar('0') } if (base != 10 && !isNumberSeparator(ch) && digit2int(ch, base) < 0) diff --git a/docs/_docs/reference/other-new-features/binary-literals.md b/docs/_docs/reference/other-new-features/binary-literals.md new file mode 100644 index 000000000000..ba19fdd3d7f7 --- /dev/null +++ b/docs/_docs/reference/other-new-features/binary-literals.md @@ -0,0 +1,19 @@ +--- +layout: doc-page +title: "Binary Integer Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/binary-integer-literals.html +--- + +A new syntax for integer literals has been added, it is now possible to do the following: +```scala +val bitmask = 0b0010_0000 // equivalent to 32, 0x20 +``` + +Binary integer literals behave similarly to hex integer literals (`0x...`), for example: +* Both `0b...` and `0B...` are allowed +* `0b`/`0B` on its own is disallowed, possible alternatives: `0`, `0b0`, `0B0` +* Only `0` and `1` are allowed after the b (`b`/`B`) +* Underscores `_` are allowed anywhere between digits, and are ignored: `0b__1 == 0b1` + + +Note: This change has been backported to Scala 2.13.13, it is therefore not technically a changed feature diff --git a/docs/_spec/01-lexical-syntax.md b/docs/_spec/01-lexical-syntax.md index 7dfcea87bd2d..e1686204116e 100644 --- a/docs/_spec/01-lexical-syntax.md +++ b/docs/_spec/01-lexical-syntax.md @@ -332,9 +332,10 @@ Literal ::= [‘-’] integerLiteral ### Integer Literals ```ebnf -integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +integerLiteral ::= (decimalNumeral | hexNumeral | binaryNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit [{binaryDigit | ‘_’} binaryDigit] ``` Values of type `Int` are all integer numbers between $-2\^{31}$ and $2\^{31}-1$, inclusive. @@ -357,7 +358,7 @@ The numeric ranges given by these types are: The digits of a numeric literal may be separated by arbitrarily many underscores for purposes of legibility. > ```scala -> 0 21_000 0x7F -42L 0xFFFF_FFFF +> 0 21_000 0x7F -42L 0xFFFF_FFFF 0b0100_0010 > ``` ### Floating Point Literals diff --git a/docs/sidebar.yml b/docs/sidebar.yml index a0011b026cef..5d72f15838cd 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -81,6 +81,7 @@ subsection: - page: reference/other-new-features/safe-initialization.md - page: reference/other-new-features/type-test.md - page: reference/other-new-features/experimental-defs.md + - page: reference/other-new-features/binary-literals.md - title: Other Changed Features directory: changed-features index: reference/changed-features/changed-features.md diff --git a/project/resources/referenceReplacements/sidebar.yml b/project/resources/referenceReplacements/sidebar.yml index de0f3d7bec2c..240085b681f2 100644 --- a/project/resources/referenceReplacements/sidebar.yml +++ b/project/resources/referenceReplacements/sidebar.yml @@ -77,6 +77,7 @@ subsection: - page: reference/other-new-features/safe-initialization.md - page: reference/other-new-features/type-test.md - page: reference/other-new-features/experimental-defs.md + - page: reference/other-new-features/binary-literals.md - title: Other Changed Features directory: changed-features index: reference/changed-features/changed-features.md diff --git a/tests/neg/binaryLiterals.scala b/tests/neg/binaryLiterals.scala new file mode 100644 index 000000000000..5d5f0b4986fc --- /dev/null +++ b/tests/neg/binaryLiterals.scala @@ -0,0 +1,8 @@ + +object Test: + val x = 0b1__0000_0000_0000_0000__0000_0000_0000_0000 // error: number too large + val X = 0B1__0000_0000_0000_0000__0000_0000_0000_0000 // error: number too large + val y = 0b1__0000_0000_0000_0000__0000_0000_0000_0000__0000_0000_0000_0000__0000_0000_0000_0000L // error: number too large + val Y = 0B1__0000_0000_0000_0000__0000_0000_0000_0000__0000_0000_0000_0000__0000_0000_0000_0000L // error: number too large + 0b // error: invalid literal number + 0b2 // error: invalid literal number diff --git a/tests/run/binaryLiterals.scala b/tests/run/binaryLiterals.scala new file mode 100644 index 000000000000..5ac8c7b6f8bc --- /dev/null +++ b/tests/run/binaryLiterals.scala @@ -0,0 +1,72 @@ +@main +def Test = + val kenobi = 0b1 + + assert(kenobi == 1) + + assert(0B0000 == 0) + assert(0B0001 == 1) + assert(0B0010 == 2) + assert(0B0100 == 4) + assert(0B1000 == 8) + + assert(0b0000 == 0) + assert(0b0001 == 1) + assert(0b0010 == 2) + assert(0b0100 == 4) + assert(0b1000 == 8) + + assert(0b0001_0000 == 16) + assert(0b0010_0000 == 32) + assert(0b0100_0000 == 64) + assert(0b1000_0000 == 128) + + assert(0b0001_0000_0000 == 256) + assert(0b0010_0000_0000 == 512) + assert(0b0100_0000_0000 == 1024) + assert(0b1000_0000_0000 == 2048) + + assert(0b0001_0000_0000_0000 == 4096) + assert(0b0010_0000_0000_0000 == 8192) + assert(0b0100_0000_0000_0000 == 16384) + assert(0b1000_0000_0000_0000 == 32768) + + assert(0b0001__0000_0000_0000_0000 == 65536) + assert(0b0010__0000_0000_0000_0000 == 131072) + assert(0b0100__0000_0000_0000_0000 == 262144) + assert(0b1000__0000_0000_0000_0000 == 524288) + + assert(0b0001_0000__0000_0000_0000_0000 == 1048576) + assert(0b0010_0000__0000_0000_0000_0000 == 2097152) + assert(0b0100_0000__0000_0000_0000_0000 == 4194304) + assert(0b1000_0000__0000_0000_0000_0000 == 8388608) + + assert(0b0001_0000_0000__0000_0000_0000_0000 == 16777216) + assert(0b0010_0000_0000__0000_0000_0000_0000 == 33554432) + assert(0b0100_0000_0000__0000_0000_0000_0000 == 67108864) + assert(0b1000_0000_0000__0000_0000_0000_0000 == 134217728) + + assert(0b0001_0000_0000_0000__0000_0000_0000_0000 == 268435456) + assert(0b0010_0000_0000_0000__0000_0000_0000_0000 == 536870912) + assert(0b0100_0000_0000_0000__0000_0000_0000_0000 == 1073741824) + assert(0b1000_0000_0000_0000__0000_0000_0000_0000L == 2147483648L) + + assert(0b1000_0000_0000_0000__0000_0000_0000_0000 == -2147483648) // Signed ! + assert(0b1111_1111_1111_1111__1111_1111_1111_1111 == -1) + + // Randomly generated using https://numbergenerator.org/random-32-bit-binary-number#!numbers=10&length=32&addfilters= + // Converted to signed decimal using https://onlinetoolz.net/unsigned-signed#base=2&bits=32 + assert(0b0110_1000_1100_0101_0010_1100_0100_0011 == 1757752387) + assert(0b1111_0101_0100_1011_0101_1000_0011_0110 == -179611594) + assert(0b0000_0011_0000_1010_1010_0011_0000_0000 == 51028736) + assert(0b0101_0010_1111_1001_0100_0101_1101_1011 == 1392068059) + assert(0b1001_0000_1111_1001_1011_1101_1100_1111 == -1862681137) + + assert(0B0000_0111_1110_1100_0111_1100_1000_0010 == 132938882) + assert(0B0000_1011_0111_1011_0001_1010_1010_1000 == 192617128) + assert(0B1100_1100_1000_1010_1111_0111_0100_1101 == -863307955) + assert(0B1000_0000_0001_0010_0001_1001_0101_1110 == -2146297506) + assert(0B1110_0000_0110_1100_0111_0110_1100_1111 == -529762609) + + assert(0b0010_1001_0101_1001__1010_0100_1000_1010__1001_1000_0011_0111__1100_1011_0111_0101L == 2979593543648529269L) + assert(0b1101_1110_0100_1000__0010_1101_1010_0010__0111_1000_1111_1001__1010_1001_0101_1000L == -2429641823128802984L) From f63f35f28fc3a7cb6bcaa3b5e0e79394582c1700 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 23 Apr 2024 09:00:57 +0200 Subject: [PATCH 170/465] Fix Dotty CI badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6c3212f0676b..7a2bda3f8073 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ Dotty ===== -[![Dotty CI](https://github.com/scala/scala3/workflows/Dotty/badge.svg?branch=master)](https://github.com/scala/scala3/actions?query=branch%3Amain) +[![Dotty CI](https://github.com/scala/scala3/workflows/Dotty/badge.svg?branch=main)](https://github.com/scala/scala3/actions?query=branch%3Amain) [![Join the chat at https://discord.com/invite/scala](https://img.shields.io/discord/632150470000902164)](https://discord.com/invite/scala) * [Documentation](https://docs.scala-lang.org/scala3/) From 6e080775f9849b6d532940b2c972bfef6ba4f00b Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 23 Apr 2024 17:22:42 +0200 Subject: [PATCH 171/465] Update test to show where the difference lies --- .../captures/effect-swaps.check | 28 ++++---- .../captures/effect-swaps.scala | 66 ++++++++++--------- 2 files changed, 49 insertions(+), 45 deletions(-) diff --git a/tests/neg-custom-args/captures/effect-swaps.check b/tests/neg-custom-args/captures/effect-swaps.check index 086dce7f1841..bda3509645d1 100644 --- a/tests/neg-custom-args/captures/effect-swaps.check +++ b/tests/neg-custom-args/captures/effect-swaps.check @@ -1,19 +1,21 @@ --- Error: tests/neg-custom-args/captures/effect-swaps.scala:62:6 ------------------------------------------------------- -61 | Result: -62 | Future: // error, escaping label from Result - | ^ - |local reference contextual$1 from (using contextual$1: boundary.Label[box Result[box Future[box T^?]^{fr, contextual$1, contextual$1}, box E^?]^?]^): - | box Result[box Future[box T^?]^{fr, contextual$1, contextual$1}, box E^?]^? leaks into outer capture set of type parameter T of method apply in object boundary -63 | fr.await.ok +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:64:8 ---------------------------------- +63 | Result: +64 | Future: // error, escaping label from Result + | ^ + | Found: Result.Ok[box Future[box T^?]^{fr, contextual$1}] + | Required: Result[Future[T], Nothing] +65 | fr.await.ok |-------------------------------------------------------------------------------------------------------------------- |Inline stack trace |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from effect-swaps.scala:37 -37 | boundary: - | ^^^^^^^^ + |This location contains code that was inlined from effect-swaps.scala:41 +41 | boundary(Ok(body)) + | ^^^^^^^^ -------------------------------------------------------------------------------------------------------------------- --- Error: tests/neg-custom-args/captures/effect-swaps.scala:66:11 ------------------------------------------------------ -66 | Result.make: // error, escaping label from Result - | ^^^^^^^^^^^ + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/effect-swaps.scala:68:15 ------------------------------------------------------ +68 | Result.make: //lbl ?=> // error, escaping label from Result + | ^^^^^^^^^^^ |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9}, box E^?]]^): | box Future[box T^?]^{fr, contextual$9} leaks into outer capture set of type parameter T of method make in object Result diff --git a/tests/neg-custom-args/captures/effect-swaps.scala b/tests/neg-custom-args/captures/effect-swaps.scala index e8e609411b15..1d72077bb8da 100644 --- a/tests/neg-custom-args/captures/effect-swaps.scala +++ b/tests/neg-custom-args/captures/effect-swaps.scala @@ -14,55 +14,57 @@ end boundary import boundary.{Label, break} -class Async +@capability trait Async +object Async: + def blocking[T](body: Async ?=> T): T = ??? + class Future[+T]: this: Future[T]^ => - def await(using Async^): T = ??? + def await(using Async): T = ??? object Future: - def apply[T](op: Async^ ?=> T)(using Async): Future[T]^{op} = ??? + def apply[T](op: Async ?=> T)(using Async): Future[T]^{op} = ??? + +enum Result[+T, +E]: + case Ok[+T](value: T) extends Result[T, Nothing] + case Err[+E](error: E) extends Result[Nothing, E] -abstract class Result[+T, +E] -case class Ok[+T](value: T) extends Result[T, Nothing] -case class Err[+E](value: E) extends Result[Nothing, E] object Result: - extension [T, E](r: Result[T, E]) + extension [T, E](r: Result[T, E]^)(using Label[Err[E]]) /** `_.ok` propagates Err to current Label */ - inline def ok(using Label[Result[Nothing, E]]): T = r match - case r: Ok[_] => r.value - case err => break(err.asInstanceOf[Err[E]]) + def ok: T = r match + case Ok(value) => value + case Err(value) => break[Err[E]](Err(value)) transparent inline def apply[T, E](inline body: Label[Result[T, E]] ?=> T): Result[T, E] = - boundary: - val result = body - Ok(result) + boundary(Ok(body)) // same as apply, but not an inline method def make[T, E](body: Label[Result[T, E]] ?=> T): Result[T, E] = - boundary: - val result = body - Ok(result) + boundary(Ok(body)) end Result def test[T, E](using Async) = - val good1: List[Future[Result[T, E]]] => Future[Result[List[T], E]] = frs => - Future: - Result: - frs.map(_.await.ok) // OK + import Result.* + Async.blocking: async ?=> + val good1: List[Future[Result[T, E]]] => Future[Result[List[T], E]] = frs => + Future: + Result: + frs.map(_.await.ok) // OK - val good2: Result[Future[T], E] => Future[Result[T, E]] = rf => - Future: - Result: - rf.ok.await // OK, Future argument has type Result[T] + val good2: Result[Future[T], E] => Future[Result[T, E]] = rf => + Future: + Result: + rf.ok.await // OK, Future argument has type Result[T] - def fail3(fr: Future[Result[T, E]]^) = - Result: - Future: // error, escaping label from Result - fr.await.ok + def fail3(fr: Future[Result[T, E]]^) = + Result: + Future: // error, escaping label from Result + fr.await.ok - def fail4(fr: Future[Result[T, E]]^) = - Result.make: // error, escaping label from Result - Future: - fr.await.ok + def fail4[T, E](fr: Future[Result[T, E]]^) = + Result.make: //lbl ?=> // error, escaping label from Result + Future: fut ?=> + fr.await.ok From cfc46e4556a2a79bbe4d1d9daa31ad5842171fa7 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 23 Apr 2024 19:37:15 +0200 Subject: [PATCH 172/465] Better error diagnostics under -explain-cyclic Also report type-checked right hand sides and export expansions. Streamline trace-handling code using inline functions. Fixes #20245 --- .../tools/dotc/core/SymDenotations.scala | 20 +++---------- .../dotty/tools/dotc/core/TypeErrors.scala | 19 ++++++++++--- .../tools/dotc/core/tasty/TreeUnpickler.scala | 23 ++++++--------- .../dotty/tools/dotc/reporting/messages.scala | 3 +- .../src/dotty/tools/dotc/typer/Checking.scala | 7 +---- .../src/dotty/tools/dotc/typer/Namer.scala | 8 ++++-- tests/neg-macros/i14772.check | 1 + tests/neg-macros/i16582.check | 2 ++ tests/neg/cyclic.check | 4 +++ tests/neg/i20245.check | 17 +++++++++++ tests/neg/i20245/Context_1.scala | 12 ++++++++ tests/neg/i20245/Messages_1.scala | 8 ++++++ tests/neg/i20245/Tree_1.scala | 18 ++++++++++++ tests/neg/i20245/Typer_1.scala | 28 +++++++++++++++++++ tests/neg/i20245/Typer_2.scala | 27 ++++++++++++++++++ tests/neg/i20245/Unification_1.scala | 11 ++++++++ 16 files changed, 164 insertions(+), 44 deletions(-) create mode 100644 tests/neg/i20245.check create mode 100644 tests/neg/i20245/Context_1.scala create mode 100644 tests/neg/i20245/Messages_1.scala create mode 100644 tests/neg/i20245/Tree_1.scala create mode 100644 tests/neg/i20245/Typer_1.scala create mode 100644 tests/neg/i20245/Typer_2.scala create mode 100644 tests/neg/i20245/Unification_1.scala diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index f01d2faf86c4..37d1ed644e07 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -168,16 +168,11 @@ object SymDenotations { } } else - val traceCycles = CyclicReference.isTraced - try - if traceCycles then - CyclicReference.pushTrace("compute the signature of ", symbol, "") + CyclicReference.trace("compute the signature of ", symbol): if myFlags.is(Touched) then throw CyclicReference(this)(using ctx.withOwner(symbol)) myFlags |= Touched atPhase(validFor.firstPhaseId)(completer.complete(this)) - finally - if traceCycles then CyclicReference.popTrace() protected[dotc] def info_=(tp: Type): Unit = { /* // DEBUG @@ -2994,12 +2989,9 @@ object SymDenotations { def apply(clsd: ClassDenotation)(implicit onBehalf: BaseData, ctx: Context) : (List[ClassSymbol], BaseClassSet) = { assert(isValid) - val traceCycles = CyclicReference.isTraced - try - if traceCycles then - CyclicReference.pushTrace("compute the base classes of ", clsd.symbol, "") - if (cache != null) cache.uncheckedNN - else { + CyclicReference.trace("compute the base classes of ", clsd.symbol): + if cache != null then cache.uncheckedNN + else if (locked) throw CyclicReference(clsd) locked = true provisional = false @@ -3009,10 +3001,6 @@ object SymDenotations { if (!provisional) cache = computed else onBehalf.signalProvisional() computed - } - finally - if traceCycles then CyclicReference.popTrace() - addDependent(onBehalf) } def sameGroup(p1: Phase, p2: Phase) = p1.sameParentsStartId == p2.sameParentsStartId diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 13fe02b712bc..4f944cd50983 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -198,20 +198,31 @@ object CyclicReference: cyclicErrors.println(elem.toString) ex - type TraceElement = (/*prefix:*/ String, Symbol, /*suffix:*/ String) + type TraceElement = Context ?=> String type Trace = mutable.ArrayBuffer[TraceElement] val Trace = Property.Key[Trace] - def isTraced(using Context) = + private def isTraced(using Context) = ctx.property(CyclicReference.Trace).isDefined - def pushTrace(info: TraceElement)(using Context): Unit = + private def pushTrace(info: Context ?=> String)(using Context): Unit = for buf <- ctx.property(CyclicReference.Trace) do buf += info - def popTrace()(using Context): Unit = + private def popTrace()(using Context): Unit = for buf <- ctx.property(CyclicReference.Trace) do buf.dropRightInPlace(1) + + inline def trace[T](info: TraceElement)(inline op: => T)(using Context): T = + val traceCycles = isTraced + try + if traceCycles then pushTrace(info) + op + finally + if traceCycles then popTrace() + + inline def trace[T](prefix: String, sym: Symbol)(inline op: => T)(using Context): T = + trace((ctx: Context) ?=> i"$prefix$sym")(op) end CyclicReference class UnpicklingError(denot: Denotation, where: String, cause: Throwable)(using Context) extends TypeError: diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 45bd58e3c7c1..bff97e898f03 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -158,20 +158,15 @@ class TreeUnpickler(reader: TastyReader, if f == null then "" else s" in $f" def fail(ex: Throwable) = throw UnpicklingError(denot, where, ex) treeAtAddr(currentAddr) = - val traceCycles = CyclicReference.isTraced - try - if traceCycles then - CyclicReference.pushTrace("read the definition of ", denot.symbol, where) - atPhaseBeforeTransforms { - new TreeReader(reader).readIndexedDef()( - using ctx.withOwner(owner).withModeBits(mode).withSource(source)) - } - catch - case ex: CyclicReference => throw ex - case ex: AssertionError => fail(ex) - case ex: Exception => fail(ex) - finally - if traceCycles then CyclicReference.popTrace() + CyclicReference.trace(i"read the definition of ${denot.symbol}$where"): + try + atPhaseBeforeTransforms: + new TreeReader(reader).readIndexedDef()( + using ctx.withOwner(owner).withModeBits(mode).withSource(source)) + catch + case ex: CyclicReference => throw ex + case ex: AssertionError => fail(ex) + case ex: Exception => fail(ex) } class TreeReader(val reader: TastyReader) { diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 5fc5b4ae66b0..48304e54b5e7 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -95,7 +95,8 @@ abstract class CyclicMsg(errorId: ErrorMessageID)(using Context) extends Message protected def context: String = ex.optTrace match case Some(trace) => s"\n\nThe error occurred while trying to ${ - trace.map((prefix, sym, suffix) => i"$prefix$sym$suffix").mkString("\n which required to ") + trace.map(identity) // map with identity will turn Context ?=> String elements to String elements + .mkString("\n which required to ") }$debugInfo" case None => "\n\n Run with -explain-cyclic for more details." diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 96e2e937927d..bfbcd4358853 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -349,10 +349,7 @@ object Checking { } if isInteresting(pre) then - val traceCycles = CyclicReference.isTraced - try - if traceCycles then - CyclicReference.pushTrace("explore ", tp.symbol, " for cyclic references") + CyclicReference.trace(i"explore ${tp.symbol} for cyclic references"): val pre1 = this(pre, false, false) if locked.contains(tp) || tp.symbol.infoOrCompleter.isInstanceOf[NoCompleter] @@ -367,8 +364,6 @@ object Checking { finally locked -= tp tp.withPrefix(pre1) - finally - if traceCycles then CyclicReference.popTrace() else tp } catch { diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index bad78b6714e8..fc3c7e4ac41a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1442,7 +1442,8 @@ class Namer { typer: Typer => def process(stats: List[Tree])(using Context): Unit = stats match case (stat: Export) :: stats1 => - processExport(stat, NoSymbol) + CyclicReference.trace(i"elaborate the export clause $stat"): + processExport(stat, NoSymbol) process(stats1) case (stat: Import) :: stats1 => process(stats1)(using ctx.importContext(stat, symbolOfTree(stat))) @@ -1954,8 +1955,9 @@ class Namer { typer: Typer => rhsCtx = prepareRhsCtx(rhsCtx, paramss) def typedAheadRhs(pt: Type) = - PrepareInlineable.dropInlineIfError(sym, - typedAheadExpr(mdef.rhs, pt)(using rhsCtx)) + CyclicReference.trace(i"type the right hand side of $sym since no explicit type was given"): + PrepareInlineable.dropInlineIfError(sym, + typedAheadExpr(mdef.rhs, pt)(using rhsCtx)) def rhsType = // For default getters, we use the corresponding parameter type as an diff --git a/tests/neg-macros/i14772.check b/tests/neg-macros/i14772.check index 94b4a3445b01..5c1836811b03 100644 --- a/tests/neg-macros/i14772.check +++ b/tests/neg-macros/i14772.check @@ -5,6 +5,7 @@ | | The error occurred while trying to compute the signature of method $anonfun | which required to compute the signature of method impl + | which required to type the right hand side of method impl since no explicit type was given | | Run with both -explain-cyclic and -Ydebug-cyclic to see full stack trace. | diff --git a/tests/neg-macros/i16582.check b/tests/neg-macros/i16582.check index 548a4491ed24..546d0b7efaf3 100644 --- a/tests/neg-macros/i16582.check +++ b/tests/neg-macros/i16582.check @@ -6,7 +6,9 @@ | dotty.tools.dotc.core.CyclicReference: Recursive value o2 needs type | | The error occurred while trying to compute the signature of method test + | which required to type the right hand side of method test since no explicit type was given | which required to compute the signature of value o2 + | which required to type the right hand side of value o2 since no explicit type was given | which required to compute the signature of value o2 | | Run with both -explain-cyclic and -Ydebug-cyclic to see full stack trace. diff --git a/tests/neg/cyclic.check b/tests/neg/cyclic.check index 19eedac04f1c..d9afb91975f8 100644 --- a/tests/neg/cyclic.check +++ b/tests/neg/cyclic.check @@ -4,9 +4,13 @@ | Overloaded or recursive method f needs return type | | The error occurred while trying to compute the signature of method f + | which required to type the right hand side of method f since no explicit type was given | which required to compute the signature of method g + | which required to type the right hand side of method g since no explicit type was given | which required to compute the signature of method h + | which required to type the right hand side of method h since no explicit type was given | which required to compute the signature of method i + | which required to type the right hand side of method i since no explicit type was given | which required to compute the signature of method f | | Run with both -explain-cyclic and -Ydebug-cyclic to see full stack trace. diff --git a/tests/neg/i20245.check b/tests/neg/i20245.check new file mode 100644 index 000000000000..565bde7678b7 --- /dev/null +++ b/tests/neg/i20245.check @@ -0,0 +1,17 @@ + +-- [E046] Cyclic Error: tests/neg/i20245/Typer_2.scala:16:57 ----------------------------------------------------------- +16 | private[typer] val unification = new Unification(using this) // error + | ^ + | Cyclic reference involving class Context + | + | The error occurred while trying to compute the base classes of class Context + | which required to compute the base classes of trait TyperOps + | which required to compute the signature of trait TyperOps + | which required to elaborate the export clause export unification.requireSubtype + | which required to compute the signature of value unification + | which required to type the right hand side of value unification since no explicit type was given + | which required to compute the base classes of class Context + | + | Run with both -explain-cyclic and -Ydebug-cyclic to see full stack trace. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20245/Context_1.scala b/tests/neg/i20245/Context_1.scala new file mode 100644 index 000000000000..a38d8fe7531d --- /dev/null +++ b/tests/neg/i20245/Context_1.scala @@ -0,0 +1,12 @@ +package effekt +package context + +import effekt.typer.TyperOps + + +abstract class Context extends TyperOps { + + // bring the context itself in scope + implicit val context: Context = this + +} diff --git a/tests/neg/i20245/Messages_1.scala b/tests/neg/i20245/Messages_1.scala new file mode 100644 index 000000000000..c8cc8267d44c --- /dev/null +++ b/tests/neg/i20245/Messages_1.scala @@ -0,0 +1,8 @@ +package effekt +package util + +object messages { + trait ErrorReporter { + + } +} diff --git a/tests/neg/i20245/Tree_1.scala b/tests/neg/i20245/Tree_1.scala new file mode 100644 index 000000000000..54a2a5cc1a64 --- /dev/null +++ b/tests/neg/i20245/Tree_1.scala @@ -0,0 +1,18 @@ +package effekt +package source + +import effekt.context.Context + +object Resolvable { + + // There need to be two resolve extension methods for the error to show up + // They also need to take an implicit Context + extension (n: Int) { + def resolve(using C: Context): Unit = ??? + } + + extension (b: Boolean) { + def resolve(using C: Context): Unit = ??? + } +} +export Resolvable.resolve diff --git a/tests/neg/i20245/Typer_1.scala b/tests/neg/i20245/Typer_1.scala new file mode 100644 index 000000000000..0a61346ecaef --- /dev/null +++ b/tests/neg/i20245/Typer_1.scala @@ -0,0 +1,28 @@ +package effekt +package typer + +import effekt.util.messages.ErrorReporter + +import effekt.context.{ Context } + +// This import is also NECESSARY for the cyclic error +import effekt.source.{ resolve } + + +trait TyperOps extends ErrorReporter { self: Context => + + // passing `this` as ErrorReporter here is also NECESSARY for the cyclic error + private[typer] val unification = new Unification(using this) + + // this export is NECESSARY for the cyclic error + export unification.{ requireSubtype } + + println(1) + + // vvvvvvvv insert a line here, save, and run `compile` again vvvvvvvvvv +} + + + + + diff --git a/tests/neg/i20245/Typer_2.scala b/tests/neg/i20245/Typer_2.scala new file mode 100644 index 000000000000..ed7f05de80d0 --- /dev/null +++ b/tests/neg/i20245/Typer_2.scala @@ -0,0 +1,27 @@ +//> using options -explain-cyclic +package effekt +package typer + +import effekt.util.messages.ErrorReporter + +import effekt.context.{ Context } + +// This import is also NECESSARY for the cyclic error +import effekt.source.{ resolve } + + +trait TyperOps extends ErrorReporter { self: Context => + + // passing `this` as ErrorReporter here is also NECESSARY for the cyclic error + private[typer] val unification = new Unification(using this) // error + + // this export is NECESSARY for the cyclic error + export unification.{ requireSubtype } + + // vvvvvvvv insert a line here, save, and run `compile` again vvvvvvvvvv +} + + + + + diff --git a/tests/neg/i20245/Unification_1.scala b/tests/neg/i20245/Unification_1.scala new file mode 100644 index 000000000000..406ab1b93b00 --- /dev/null +++ b/tests/neg/i20245/Unification_1.scala @@ -0,0 +1,11 @@ +package effekt +package typer + +import effekt.util.messages.ErrorReporter + + +class Unification(using C: ErrorReporter) { + + def requireSubtype(): Unit = () + +} From 1fc391679c10144ed49f8d924fd017c487816e03 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Tue, 9 Apr 2024 13:36:27 +0200 Subject: [PATCH 173/465] Improve incorrect classloader reporting in staging * do not println staging crashes to stdout * enrich the errors coming from the compiler with additional hints about using the correct classloader --- compiler/src/dotty/tools/dotc/Run.scala | 2 ++ .../src/scala/quoted/staging/QuoteCompiler.scala | 4 +++- .../src/scala/quoted/staging/QuoteDriver.scala | 16 +++++++++++++++- tests/run-staging/i19170c.check | 1 + tests/run-staging/i19170c.scala | 16 ++++++++++++++++ tests/run-staging/i19176b.check | 1 + tests/run-staging/i19176b.scala | 14 ++++++++++++++ 7 files changed, 52 insertions(+), 2 deletions(-) create mode 100644 tests/run-staging/i19170c.check create mode 100644 tests/run-staging/i19170c.scala create mode 100644 tests/run-staging/i19176b.check create mode 100644 tests/run-staging/i19176b.scala diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 64e216a39b2a..fa827432460a 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -661,4 +661,6 @@ object Run { report.enrichErrorMessage(errorMessage) else errorMessage + def doNotEnrichErrorMessage: Unit = + if run != null then run.myEnrichedErrorMessage = true } diff --git a/staging/src/scala/quoted/staging/QuoteCompiler.scala b/staging/src/scala/quoted/staging/QuoteCompiler.scala index cf24b1de369a..dea40cd1035d 100644 --- a/staging/src/scala/quoted/staging/QuoteCompiler.scala +++ b/staging/src/scala/quoted/staging/QuoteCompiler.scala @@ -48,7 +48,9 @@ private class QuoteCompiler extends Compiler: override def newRun(implicit ctx: Context): ExprRun = reset() - new ExprRun(this, ctx.addMode(Mode.ReadPositions)) + val run = new ExprRun(this, ctx.addMode(Mode.ReadPositions)) + run.doNotEnrichErrorMessage + run def outputClassName: TypeName = "Generated$Code$From$Quoted".toTypeName diff --git a/staging/src/scala/quoted/staging/QuoteDriver.scala b/staging/src/scala/quoted/staging/QuoteDriver.scala index e894a7bc40f2..7eb99bce4ff8 100644 --- a/staging/src/scala/quoted/staging/QuoteDriver.scala +++ b/staging/src/scala/quoted/staging/QuoteDriver.scala @@ -8,6 +8,7 @@ import dotty.tools.dotc.quoted.QuotesCache import dotty.tools.io.{AbstractFile, Directory, PlainDirectory, VirtualDirectory} import dotty.tools.repl.AbstractFileClassLoader import dotty.tools.dotc.reporting._ +import dotty.tools.dotc.config.Settings.Setting.value import dotty.tools.dotc.util.ClasspathFromClassloader import scala.quoted._ import scala.quoted.staging.Compiler @@ -40,7 +41,20 @@ private class QuoteDriver(appClassloader: ClassLoader) extends Driver: setCompilerSettings(ctx1.fresh.setSetting(ctx1.settings.outputDir, outDir), settings) } - new QuoteCompiler().newRun(ctx).compileExpr(exprBuilder) match + val compiledExpr = + try + new QuoteCompiler().newRun(ctx).compileExpr(exprBuilder) + catch case ex: dotty.tools.FatalError => + val enrichedMessage = + s"""An unhandled exception was thrown in the staging compiler. + |This might be caused by using an incorrect classloader + |when creating the `staging.Compiler` instance with `staging.Compiler.make`. + |For details, please refer to the documentation. + |For non-enriched exceptions, compile with -Yno-enrich-error-messages.""".stripMargin + if ctx.settings.YnoEnrichErrorMessages.value(using ctx) then throw ex + else throw new Exception(enrichedMessage, ex) + + compiledExpr match case Right(value) => value.asInstanceOf[T] diff --git a/tests/run-staging/i19170c.check b/tests/run-staging/i19170c.check new file mode 100644 index 000000000000..581ccb11e364 --- /dev/null +++ b/tests/run-staging/i19170c.check @@ -0,0 +1 @@ +exception thrown, no additional printlns diff --git a/tests/run-staging/i19170c.scala b/tests/run-staging/i19170c.scala new file mode 100644 index 000000000000..24b7faa8a323 --- /dev/null +++ b/tests/run-staging/i19170c.scala @@ -0,0 +1,16 @@ +import scala.quoted.* + +given staging.Compiler = + staging.Compiler.make(getClass.getClassLoader.getParent) // different classloader that 19170b.scala +class A(i: Int) + +def f(i: Expr[Int])(using Quotes): Expr[A] = { '{ new A($i) } } + +@main def Test = { + try + val g: Int => A = staging.run { '{ (i: Int) => ${ f('{i}) } } } + println(g(3)) + catch case ex: Exception => + assert(ex.getMessage().startsWith("An unhandled exception was thrown in the staging compiler."), ex.getMessage()) + println("exception thrown, no additional printlns") +} diff --git a/tests/run-staging/i19176b.check b/tests/run-staging/i19176b.check new file mode 100644 index 000000000000..581ccb11e364 --- /dev/null +++ b/tests/run-staging/i19176b.check @@ -0,0 +1 @@ +exception thrown, no additional printlns diff --git a/tests/run-staging/i19176b.scala b/tests/run-staging/i19176b.scala new file mode 100644 index 000000000000..d3f1657d03da --- /dev/null +++ b/tests/run-staging/i19176b.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +given staging.Compiler = + staging.Compiler.make(getClass.getClassLoader.getParent) // we want to make sure the classloader is incorrect + +class A + +@main def Test = + try + val f: (A, Int) => Int = staging.run { '{ (q: A, x: Int) => x } } + f(new A, 3) + catch case ex: Exception => + assert(ex.getMessage().startsWith("An unhandled exception was thrown in the staging compiler."), ex.getMessage()) + println("exception thrown, no additional printlns") From 5efacbe87d62c5e9a1f8fce07be2c3c8e5700ab7 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 24 Apr 2024 12:28:18 +0200 Subject: [PATCH 174/465] Add implicit search to CyclicReference traces --- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index ffd9d7fd8515..70efd2a5fd68 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1084,10 +1084,15 @@ trait Implicits: (searchCtx.scope eq ctx.scope) && (searchCtx.owner eq ctx.owner.owner) do () - try ImplicitSearch(pt, argument, span)(using searchCtx).bestImplicit - catch case ce: CyclicReference => - ce.inImplicitSearch = true - throw ce + def searchStr = + if argument.isEmpty then i"argument of type $pt" + else i"conversion from ${argument.tpe} to $pt" + + CyclicReference.trace(i"searching for an implicit $searchStr"): + try ImplicitSearch(pt, argument, span)(using searchCtx).bestImplicit + catch case ce: CyclicReference => + ce.inImplicitSearch = true + throw ce else NoMatchingImplicitsFailure val result = From 41c735e5b48d565210980538690ed3a180594393 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Wed, 24 Apr 2024 12:42:26 +0200 Subject: [PATCH 175/465] fix: prefer non-export definition locations --- .../dotty/tools/pc/PcDefinitionProvider.scala | 11 +++++++---- .../pc/tests/definition/PcDefinitionSuite.scala | 17 +++++++++++++++++ 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala index 0de81ec39711..536ddae4203c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.ast.NavigateAST import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Flags.ModuleClass +import dotty.tools.dotc.core.Flags.{Exported, ModuleClass} import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver @@ -123,9 +123,12 @@ class PcDefinitionProvider( case symbols @ (sym :: other) => val isLocal = sym.source == pos.source if isLocal then - val defs = - Interactive.findDefinitions(List(sym), driver, false, false).filter(_.source == sym.source) - defs.headOption match + val (exportedDefs, otherDefs) = + Interactive.findDefinitions(List(sym), driver, false, false) + .filter(_.source == sym.source) + .partition(_.tree.symbol.is(Exported)) + + otherDefs.headOption.orElse(exportedDefs.headOption) match case Some(srcTree) => val pos = srcTree.namePos pos.toLocation match diff --git a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala index 9636aea77c2e..c7c9b9979404 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala @@ -274,6 +274,23 @@ class PcDefinitionSuite extends BasePcDefinitionSuite: |""".stripMargin ) + @Test def exportTermExtension = + check( + """|package a + |class Test extends A { + | assert("Hello".fo@@o == "HelloFoo") + |} + | + |trait A { + | export B.* + |} + | + |object B { + | extension (value: String) def <>: String = s"${value}Foo" + |} + |""".stripMargin + ) + @Test def `named-arg-local` = check( """| From 4eff7488a7e017190af42be1c21ea17681ec238c Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Tue, 23 Apr 2024 12:06:20 +0200 Subject: [PATCH 176/465] Introduce MethodTypeKind to quotes reflection API It allows to create Contextual and Implicit MethodTypes. MethodTypeKind abstracts away the MethodTypeCompanion implementation into a simple enum style choice for a newly added MethodType apply. The MethodType unapply is kept as it was for source compatibility, instead users are encouraged to use isImplicit and isContextual methods. --- .../quoted/runtime/impl/QuotesImpl.scala | 11 ++++ library/src/scala/quoted/Quotes.scala | 25 +++++++- project/MiMaFilters.scala | 7 +- .../reflect-method-type-kind/macro_1.scala | 64 +++++++++++++++++++ .../reflect-method-type-kind/test_2.scala | 3 + 5 files changed, 108 insertions(+), 2 deletions(-) create mode 100644 tests/run-macros/reflect-method-type-kind/macro_1.scala create mode 100644 tests/run-macros/reflect-method-type-kind/test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 81fadb6baa89..cf26d6f16ef3 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2204,6 +2204,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler case _ => None end MethodOrPolyTypeTest + type MethodTypeKind = dotc.core.Types.MethodTypeCompanion + + object MethodTypeKind extends MethodTypeKindModule: + val Plain: MethodTypeKind = Types.MethodType + val Contextual: MethodTypeKind = Types.ContextualMethodType + val Implicit: MethodTypeKind = Types.ImplicitMethodType + type MethodType = dotc.core.Types.MethodType object MethodTypeTypeTest extends TypeTest[TypeRepr, MethodType]: @@ -2215,6 +2222,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object MethodType extends MethodTypeModule: def apply(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType = Types.MethodType(paramNames.map(_.toTermName))(paramInfosExp, resultTypeExp) + def apply(kind: MethodTypeKind)(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType = + kind.apply(paramNames.map(_.toTermName))(paramInfosExp, resultTypeExp) def unapply(x: MethodType): (List[String], List[TypeRepr], TypeRepr) = (x.paramNames.map(_.toString), x.paramTypes, x.resType) end MethodType @@ -2223,6 +2232,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler extension (self: MethodType) def isErased: Boolean = false def isImplicit: Boolean = self.isImplicitMethod + def isContextual: Boolean = self.isContextualMethod + def methodTypeKind: MethodTypeKind = self.companion def param(idx: Int): TypeRepr = self.newParamRef(idx) def erasedParams: List[Boolean] = self.erasedParams diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index e5cdc3bf4fb7..e76c924ece20 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -211,6 +211,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * +- MatchCase * +- TypeBounds * +- NoPrefix + * + * +- MethodTypeKind * * +- Selector -+- SimpleSelector * +- RenameSelector @@ -3234,6 +3236,22 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** `TypeTest` that allows testing at runtime in a pattern match if a `TypeRepr` is a `MethodOrPoly` */ given MethodOrPolyTypeTest: TypeTest[TypeRepr, MethodOrPoly] + /** Type which decides on the kind of parameter list represented by `MethodType`. */ + type MethodTypeKind + + /** Module object of `type MethodKind` */ + val MethodTypeKind: MethodTypeKindModule + + /** Methods of the module object `val MethodKind` */ + trait MethodTypeKindModule { this: MethodTypeKind.type => + /** Represents a parameter list without any implicitness of parameters, like (x1: X1, x2: X2, ...) */ + val Plain: MethodTypeKind + /** Represents a parameter list with implicit parameters, like `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)`, `(using x1: X1, ..., xn: Xn)` */ + val Implicit: MethodTypeKind + /** Represents a parameter list of a contextual method, like `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ + val Contextual: MethodTypeKind + } + /** Type of the definition of a method taking a single list of parameters. It's return type may be a MethodType. */ type MethodType <: MethodOrPoly @@ -3246,6 +3264,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Methods of the module object `val MethodType` */ trait MethodTypeModule { this: MethodType.type => def apply(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType + def apply(kind: MethodTypeKind)(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType def unapply(x: MethodType): (List[String], List[TypeRepr], TypeRepr) } @@ -3255,8 +3274,12 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Extension methods of `MethodType` */ trait MethodTypeMethods: extension (self: MethodType) - /** Is this the type of using parameter clause `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ + /** Is this the type of parameter clause like `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ def isImplicit: Boolean + /** Is this the type of parameter clause like `(using X1, ..., Xn)` or `(using x1: X1, x2: X2, ... )` */ + def isContextual: Boolean + /** Returns a MethodTypeKind object representing the implicitness of the MethodType parameter clause. */ + def methodTypeKind: MethodTypeKind /** Is this the type of erased parameter clause `(erased x1: X1, ..., xn: Xn)` */ @deprecated("Use `hasErasedParams` and `erasedParams`", "3.4") def isErased: Boolean diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index b22b2e3446e5..904367d15ecd 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -50,7 +50,12 @@ object MiMaFilters { // Breaking changes since last reference version Build.mimaPreviousDottyVersion -> // Seq.empty, // We should never break backwards compatibility Seq( - ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#SymbolMethods.isSuperAccessor"), // This change is acceptable. See comment in `Breaking changes since last LTS`. + // `ReversedMissingMethodProblem`s are acceptable. See comment in `Breaking changes since last LTS`. + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#SymbolMethods.isSuperAccessor"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.MethodTypeKind"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeModule.apply"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.methodTypeKind"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.isContextual"), ), // Breaking changes since last LTS diff --git a/tests/run-macros/reflect-method-type-kind/macro_1.scala b/tests/run-macros/reflect-method-type-kind/macro_1.scala new file mode 100644 index 000000000000..6e996a85603d --- /dev/null +++ b/tests/run-macros/reflect-method-type-kind/macro_1.scala @@ -0,0 +1,64 @@ +trait Foo +trait Bar + +object Methods: + def implicitMethod(implicit foo: Foo, int: Int): Bar = ??? + def contextualMethod(using foo: Foo, int: Int): Bar = ??? + def plainMethod(foo: Foo, int: Int): Bar = ??? + +object Macro: + import scala.quoted._ + inline def macroCall(): Unit = ${ macroCallImpl } + def macroCallImpl(using Quotes): Expr[Unit] = + testReadingMethodTypeKind + testCreatingMethodTypeKind + '{()} + + def testReadingMethodTypeKind(using Quotes) = + import quotes.reflect._ + def getFromMethods(name: String): TypeRepr = + val typeRepr = TypeRepr.of[Methods.type] + val symbol = + typeRepr.typeSymbol.methodMember(name).headOption.getOrElse( + typeRepr.typeSymbol.fieldMember(name) + ) + typeRepr.memberType(symbol) + + assert(getFromMethods("implicitMethod").asInstanceOf[MethodType].isImplicit) + assert(!getFromMethods("implicitMethod").asInstanceOf[MethodType].isContextual) + assert(getFromMethods("implicitMethod").asInstanceOf[MethodType].methodTypeKind == MethodTypeKind.Implicit) + + assert(getFromMethods("contextualMethod").asInstanceOf[MethodType].isImplicit) + assert(getFromMethods("contextualMethod").asInstanceOf[MethodType].isContextual) + assert(getFromMethods("contextualMethod").asInstanceOf[MethodType].methodTypeKind == MethodTypeKind.Contextual) + + assert(!getFromMethods("plainMethod").asInstanceOf[MethodType].isImplicit) + assert(!getFromMethods("plainMethod").asInstanceOf[MethodType].isContextual) + assert(getFromMethods("plainMethod").asInstanceOf[MethodType].methodTypeKind == MethodTypeKind.Plain) + + + def testCreatingMethodTypeKind(using Quotes) = + import quotes.reflect._ + val paramTypes = List(TypeRepr.of[Foo], TypeRepr.of[Int]) + val resType = TypeRepr.of[Bar] + val implicitMethodType = MethodType.apply(MethodTypeKind.Implicit)(List("foo", "int"))(mt => paramTypes, mt => resType) + assert(implicitMethodType.isImplicit) + assert(!implicitMethodType.isContextual) + assert(implicitMethodType.methodTypeKind == MethodTypeKind.Implicit) + assert(implicitMethodType.methodTypeKind != MethodTypeKind.Contextual) + assert(implicitMethodType.methodTypeKind != MethodTypeKind.Plain) + + + val contextualMethodType = MethodType.apply(MethodTypeKind.Contextual)(List("foo", "int"))(mt => paramTypes, mt => resType) + assert(contextualMethodType.isImplicit) + assert(contextualMethodType.isContextual) + assert(contextualMethodType.methodTypeKind != MethodTypeKind.Implicit) + assert(contextualMethodType.methodTypeKind == MethodTypeKind.Contextual) + assert(contextualMethodType.methodTypeKind != MethodTypeKind.Plain) + + val plainMethodType = MethodType.apply(MethodTypeKind.Plain)(List("foo", "int"))(mt => paramTypes, mt => resType) + assert(!plainMethodType.isContextual) + assert(!plainMethodType.isImplicit) + assert(plainMethodType.methodTypeKind != MethodTypeKind.Implicit) + assert(plainMethodType.methodTypeKind != MethodTypeKind.Contextual) + assert(plainMethodType.methodTypeKind == MethodTypeKind.Plain) diff --git a/tests/run-macros/reflect-method-type-kind/test_2.scala b/tests/run-macros/reflect-method-type-kind/test_2.scala new file mode 100644 index 000000000000..a020dc4e2d93 --- /dev/null +++ b/tests/run-macros/reflect-method-type-kind/test_2.scala @@ -0,0 +1,3 @@ +object Test: + def main(args: Array[String]): Unit = + Macro.macroCall() From ec118412e0c1e84d8e4fce99c8ed2a7c922656d2 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Wed, 24 Apr 2024 18:47:00 +0200 Subject: [PATCH 177/465] Fix default args lookup for given classes --- .../src/dotty/tools/dotc/typer/Applications.scala | 15 ++++++++++++++- tests/pos/20088.scala | 6 ++++++ tests/pos/20088b.scala | 6 ++++++ 3 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 tests/pos/20088.scala create mode 100644 tests/pos/20088b.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 184b250e94fb..4213c20f5a40 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -275,7 +275,20 @@ object Applications { if (getterDenot.exists) qual.select(TermRef(qual.tpe, getterName, getterDenot)) else EmptyTree if !meth.isClassConstructor then - selectGetter(receiver) + val res = selectGetter(receiver) + if res.isEmpty && meth.is(Given) then + val classSym = meth.info.finalResultType.typeSymbol + if classSym.isClass && classSym.isAllOf(Given | Synthetic) then + // `meth` is an implicit wrapper: the `given def` desugared from a + // `given C(...)` or `given C with ...` by `desugar#classDef`. + // Therefore, we can try to look for the default getters of the + // constructor of the `given class`. We find it via the `given + // def`'s result type. See #20088 and associated test cases. + val classRefTree = receiver.select(classSym) + val constructorSym = classSym.primaryConstructor.asTerm + findDefaultGetter(constructorSym, classRefTree, idx) + else res + else res else // default getters for class constructors are found in the companion object val cls = meth.owner diff --git a/tests/pos/20088.scala b/tests/pos/20088.scala new file mode 100644 index 000000000000..308c5a0f0a91 --- /dev/null +++ b/tests/pos/20088.scala @@ -0,0 +1,6 @@ +trait Foo +trait Bar + +given (using foo: Foo = new {}): Bar with {} + +def Test = summon[Bar] diff --git a/tests/pos/20088b.scala b/tests/pos/20088b.scala new file mode 100644 index 000000000000..8cbf79d16959 --- /dev/null +++ b/tests/pos/20088b.scala @@ -0,0 +1,6 @@ +trait Foo +class Bar + +given (using foo: Foo = new {}): Bar() + +def Test = summon[Bar] From abb6909f22df2148d3719c3567ba5c5488a35636 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 24 Apr 2024 19:18:10 +0200 Subject: [PATCH 178/465] Normalize when verifying if TypeTestCasts are unchecked --- .../tools/dotc/transform/TypeTestsCasts.scala | 1 + tests/pos/i13433c/A_1.scala | 25 +++++++++++++++++++ tests/pos/i13433c/B_2.scala | 22 ++++++++++++++++ 3 files changed, 48 insertions(+) create mode 100644 tests/pos/i13433c/A_1.scala create mode 100644 tests/pos/i13433c/B_2.scala diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index 509461c794f4..2007cbe45ff5 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -135,6 +135,7 @@ object TypeTestsCasts { def recur(X: Type, P: Type): String = trace(s"recur(${X.show}, ${P.show})") { (X <:< P) ||| P.dealias.match case _: SingletonType => "" + case tp if tp.isMatchAlias => recur(X, tp.tryNormalize) case _: TypeProxy if isAbstract(P) => i"it refers to an abstract type member or type parameter" case defn.ArrayOf(tpT) => diff --git a/tests/pos/i13433c/A_1.scala b/tests/pos/i13433c/A_1.scala new file mode 100644 index 000000000000..8163c4f6e990 --- /dev/null +++ b/tests/pos/i13433c/A_1.scala @@ -0,0 +1,25 @@ +import scala.reflect.TypeTest + +type Matcher[A] = A match { case String => A } + +def patternMatch[A](a: Any)(using tt: TypeTest[Any, Matcher[A]]): Option[Matcher[A]] = { + // type T = RDF.Triple[Rdf] + a match { + case res: Matcher[A] => Some(res) + case _ => None + } +} + +def patternMatchWithAlias[A](a: Any)(using tt: TypeTest[Any, Matcher[A]]): Option[Matcher[A]] = { + type T = Matcher[A] + a match { + case res: T => Some(res) + case _ => None + } +} + +type S = String +type MS = Matcher[S] + +type S2 = MS +type MS2 = Matcher[S2] diff --git a/tests/pos/i13433c/B_2.scala b/tests/pos/i13433c/B_2.scala new file mode 100644 index 000000000000..5f2b0e6e0830 --- /dev/null +++ b/tests/pos/i13433c/B_2.scala @@ -0,0 +1,22 @@ +//> using options -Xfatal-warnings -deprecation -feature + +@main def main = { + println(patternMatch[String]("abc")) + println(patternMatchWithAlias[String]("abc")) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[Matcher[String]] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None })) + println(patternMatchWithAlias[String]("abc")(using (s: Any) => { + if s.isInstanceOf[Matcher[String]] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None })) + + println(patternMatch[String](1)) + println(patternMatchWithAlias[String](1)) + + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[S] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[MS] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[S2] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[MS2] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) +} From 1f9987cafd5851555f324a81915e27562f28e576 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 25 Apr 2024 09:45:28 +0200 Subject: [PATCH 179/465] Only recurse for TypeTests checkability with normalizing match types --- compiler/src/dotty/tools/dotc/core/Types.scala | 4 ++++ compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala | 2 +- tests/{pos => warn}/i13433c/A_1.scala | 2 ++ tests/{pos => warn}/i13433c/B_2.scala | 3 ++- 4 files changed, 9 insertions(+), 2 deletions(-) rename tests/{pos => warn}/i13433c/A_1.scala (94%) rename tests/{pos => warn}/i13433c/B_2.scala (87%) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 870e985fd48e..69ba2377a456 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5214,6 +5214,10 @@ object Types extends TypeUtils { def thatReducesUsingGadt(tp: Type)(using Context): Boolean = tp.underlyingMatchType match case mt: MatchType => mt.reducesUsingGadt case _ => false + + object Normalizing: + def unapply(tp: Type)(using Context): Option[Type] = + Some(tp.tryNormalize).filter(_.exists) } enum MatchTypeCasePattern: diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index 2007cbe45ff5..082c239c6443 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -135,7 +135,7 @@ object TypeTestsCasts { def recur(X: Type, P: Type): String = trace(s"recur(${X.show}, ${P.show})") { (X <:< P) ||| P.dealias.match case _: SingletonType => "" - case tp if tp.isMatchAlias => recur(X, tp.tryNormalize) + case MatchType.Normalizing(tp) => recur(X, tp) case _: TypeProxy if isAbstract(P) => i"it refers to an abstract type member or type parameter" case defn.ArrayOf(tpT) => diff --git a/tests/pos/i13433c/A_1.scala b/tests/warn/i13433c/A_1.scala similarity index 94% rename from tests/pos/i13433c/A_1.scala rename to tests/warn/i13433c/A_1.scala index 8163c4f6e990..d810b8e34909 100644 --- a/tests/pos/i13433c/A_1.scala +++ b/tests/warn/i13433c/A_1.scala @@ -23,3 +23,5 @@ type MS = Matcher[S] type S2 = MS type MS2 = Matcher[S2] + +type Mstuck = Matcher[Nothing] diff --git a/tests/pos/i13433c/B_2.scala b/tests/warn/i13433c/B_2.scala similarity index 87% rename from tests/pos/i13433c/B_2.scala rename to tests/warn/i13433c/B_2.scala index 5f2b0e6e0830..a0654d8cb96d 100644 --- a/tests/pos/i13433c/B_2.scala +++ b/tests/warn/i13433c/B_2.scala @@ -1,4 +1,3 @@ -//> using options -Xfatal-warnings -deprecation -feature @main def main = { println(patternMatch[String]("abc")) @@ -19,4 +18,6 @@ if s.isInstanceOf[S2] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) println(patternMatch[String]("abc")(using (s: Any) => { if s.isInstanceOf[MS2] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[Mstuck] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) // warn } From fc06435366b42f48a0a90803ac3dda4ad58b9f02 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Mon, 22 Apr 2024 17:11:32 +0200 Subject: [PATCH 180/465] Fix implicit search failure reporting --- .../src/dotty/tools/dotc/typer/Typer.scala | 105 ++++++++++-------- tests/neg/19414-desugared.check | 4 + tests/neg/19414-desugared.scala | 22 ++++ tests/neg/19414.check | 4 + tests/neg/19414.scala | 15 +++ tests/neg/given-ambiguous-default-1.check | 4 + tests/neg/given-ambiguous-default-1.scala | 18 +++ tests/neg/given-ambiguous-default-2.check | 4 + tests/neg/given-ambiguous-default-2.scala | 18 +++ tests/neg/i8827a.check | 7 +- tests/neg/i8827b.check | 7 +- tests/neg/i9568.check | 7 +- tests/neg/implicitSearch.check | 7 +- tests/neg/missing-implicit3.check | 11 +- 14 files changed, 154 insertions(+), 79 deletions(-) create mode 100644 tests/neg/19414-desugared.check create mode 100644 tests/neg/19414-desugared.scala create mode 100644 tests/neg/19414.check create mode 100644 tests/neg/19414.scala create mode 100644 tests/neg/given-ambiguous-default-1.check create mode 100644 tests/neg/given-ambiguous-default-1.scala create mode 100644 tests/neg/given-ambiguous-default-2.check create mode 100644 tests/neg/given-ambiguous-default-2.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 9150ad6be392..eb3dcae69181 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3707,7 +3707,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer private def adapt1(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = { assert(pt.exists && !pt.isInstanceOf[ExprType] || ctx.reporter.errorsReported, i"tree: $tree, pt: $pt") - def methodStr = err.refStr(methPart(tree).tpe) def readapt(tree: Tree)(using Context) = adapt(tree, pt, locked) def readaptSimplified(tree: Tree)(using Context) = readapt(simplify(tree, pt, locked)) @@ -3872,49 +3871,38 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer arg :: inferArgsAfter(arg) end implicitArgs - val args = implicitArgs(wtp.paramInfos, 0, pt) - - def propagatedFailure(args: List[Tree]): Type = args match { - case arg :: args1 => - arg.tpe match { - case ambi: AmbiguousImplicits => - propagatedFailure(args1) match { - case NoType | (_: AmbiguousImplicits) => ambi - case failed => failed - } - case failed: SearchFailureType => failed - case _ => propagatedFailure(args1) - } - case Nil => NoType - } - - val propFail = propagatedFailure(args) - - def issueErrors(): Tree = { - def paramSymWithMethodTree(paramName: TermName) = - if tree.symbol.exists then - tree.symbol.paramSymss.flatten - .map(sym => sym.name -> sym) - .toMap - .get(paramName) - .map((_, tree)) - else - None - - wtp.paramNames.lazyZip(wtp.paramInfos).lazyZip(args).foreach { (paramName, formal, arg) => - arg.tpe match { + /** Reports errors for arguments of `appTree` that have a + * `SearchFailureType`, recursively traversing arguments that are + * themselves applications. `mt` must be the type of `appTree.fun`. + */ + def reportErrors(appTree: Apply, mt: MethodType): Unit = + val Apply(fun, args) = appTree + for (paramName, formal, arg) <- mt.paramNames.lazyZip(mt.paramInfos).lazyZip(args) do + arg.tpe match case failure: SearchFailureType => - report.error( - missingArgMsg(arg, formal, implicitParamString(paramName, methodStr, tree), paramSymWithMethodTree(paramName)), - tree.srcPos.endPos - ) - case _ => - } - } - untpd.Apply(tree, args).withType(propFail) - } + arg match + case childAppTree: Apply => + childAppTree.fun.tpe.widen match + case childMt: MethodType => reportErrors(childAppTree, childMt) + case _ => () + case _ => () + + val methodStr = err.refStr(methPart(fun).tpe) + val paramStr = implicitParamString(paramName, methodStr, fun) + val paramSymWithMethodCallTree = + fun.symbol.paramSymss.flatten + .find(_.name == paramName) + .map((_, appTree)) + val message = missingArgMsg(arg, formal, paramStr, paramSymWithMethodCallTree) + // Note: if the same error type appears on several trees, we + // might report it several times, but this is not a problem + // because only the first one will be displayed. We traverse in + // post-order, so that the most detailed message gets displayed. + report.error(message, fun.srcPos.endPos) + case _ => () - if (propFail.exists) { + val args = implicitArgs(wtp.paramInfos, 0, pt) + if (args.tpes.exists(_.isInstanceOf[SearchFailureType])) { // If there are several arguments, some arguments might already // have influenced the context, binding variables, but later ones // might fail. In that case the constraint and instantiated variables @@ -3923,18 +3911,39 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // If method has default params, fall back to regular application // where all inferred implicits are passed as named args. - if hasDefaultParams && !propFail.isInstanceOf[AmbiguousImplicits] then - val namedArgs = wtp.paramNames.lazyZip(args).flatMap { (pname, arg) => - if (arg.tpe.isError) Nil else untpd.NamedArg(pname, untpd.TypedSplice(arg)) :: Nil - } + if hasDefaultParams then + // Only keep the arguments that don't have an error type, or that + // have an `AmbiguousImplicits` error type. The later ensures that a + // default argument can't override an ambiguous implicit. See tests + // `given-ambiguous-default*` and `19414*`. + val namedArgs = + wtp.paramNames.lazyZip(args) + .filter((_, arg) => !arg.tpe.isError || arg.tpe.isInstanceOf[AmbiguousImplicits]) + .map((pname, arg) => untpd.NamedArg(pname, untpd.TypedSplice(arg))) + val app = cpy.Apply(tree)(untpd.TypedSplice(tree), namedArgs) val needsUsing = wtp.isContextualMethod || wtp.match case MethodType(ContextBoundParamName(_) :: _) => sourceVersion.isAtLeast(`3.4`) case _ => false if needsUsing then app.setApplyKind(ApplyKind.Using) typr.println(i"try with default implicit args $app") - typed(app, pt, locked) - else issueErrors() + val retyped = typed(app, pt, locked) + + // If the retyped tree still has an error type and is an `Apply` + // node, we can report the errors for each argument nicely. + // Otherwise, we don't report anything here. + retyped match + case retyped: Apply if retyped.tpe.isError => reportErrors(retyped, wtp) + case _ => () + + retyped + else + val firstNonAmbiguous = args.tpes.find(tp => tp.isError && !tp.isInstanceOf[AmbiguousImplicits]) + def firstError = args.tpes.find(_.isError) + val errorType = firstNonAmbiguous.orElse(firstError).getOrElse(NoType) + val res = untpd.Apply(tree, args).withType(errorType) + reportErrors(res, wtp) + res } else tree match { case tree: Block => diff --git a/tests/neg/19414-desugared.check b/tests/neg/19414-desugared.check new file mode 100644 index 000000000000..eb8389649348 --- /dev/null +++ b/tests/neg/19414-desugared.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/19414-desugared.scala:22:34 ------------------------------------------------------------ +22 | summon[BodySerializer[JsObject]] // error: Ambiguous given instances + | ^ + |Ambiguous given instances: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] of parameter writer of given instance given_BodySerializer_B diff --git a/tests/neg/19414-desugared.scala b/tests/neg/19414-desugared.scala new file mode 100644 index 000000000000..9fc16e2249a2 --- /dev/null +++ b/tests/neg/19414-desugared.scala @@ -0,0 +1,22 @@ +trait JsValue +trait JsObject extends JsValue + +trait Writer[T] +trait BodySerializer[-B] + +class Printer + +given Writer[JsValue] = ??? +given Writer[JsObject] = ??? + +// This is not an exact desugaring of the original code: currently the compiler +// actually changes the modifier of the parameter list from `using` to +// `implicit` when desugaring the context-bound `B: Writer` to `implicit writer: +// Writer[B]`, but we can't write it in user code as this is not valid syntax. +given [B](using + writer: Writer[B], + printer: Printer = new Printer +): BodySerializer[B] = ??? + +def f: Unit = + summon[BodySerializer[JsObject]] // error: Ambiguous given instances diff --git a/tests/neg/19414.check b/tests/neg/19414.check new file mode 100644 index 000000000000..b865b4ba227c --- /dev/null +++ b/tests/neg/19414.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/19414.scala:15:34 ---------------------------------------------------------------------- +15 | summon[BodySerializer[JsObject]] // error: Ambiguous given instances + | ^ + |Ambiguous given instances: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] of a context parameter of given instance given_BodySerializer_B diff --git a/tests/neg/19414.scala b/tests/neg/19414.scala new file mode 100644 index 000000000000..bb275ad943b7 --- /dev/null +++ b/tests/neg/19414.scala @@ -0,0 +1,15 @@ +trait JsValue +trait JsObject extends JsValue + +trait Writer[T] +trait BodySerializer[-B] + +class Printer + +given Writer[JsValue] = ??? +given Writer[JsObject] = ??? + +given [B: Writer](using printer: Printer = new Printer): BodySerializer[B] = ??? + +def f: Unit = + summon[BodySerializer[JsObject]] // error: Ambiguous given instances diff --git a/tests/neg/given-ambiguous-default-1.check b/tests/neg/given-ambiguous-default-1.check new file mode 100644 index 000000000000..734143b337d8 --- /dev/null +++ b/tests/neg/given-ambiguous-default-1.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/given-ambiguous-default-1.scala:18:23 -------------------------------------------------- +18 |def f: Unit = summon[B] // error: Ambiguous given instances + | ^ + |Ambiguous given instances: both given instance a1 and given instance a2 match type A of parameter a of given instance given_B diff --git a/tests/neg/given-ambiguous-default-1.scala b/tests/neg/given-ambiguous-default-1.scala new file mode 100644 index 000000000000..140736e9eee3 --- /dev/null +++ b/tests/neg/given-ambiguous-default-1.scala @@ -0,0 +1,18 @@ +/** This test checks that provided ambiguous given instances take precedence + * over default given arguments. In the following code, the compiler must + * report an "Ambiguous implicits" error for the parameter `a`, and must not + * use its default value. + * + * See also: + * - tests/neg/19414.scala + * - tests/neg/19414-desugared.scala + * - tests/neg/given-ambiguous-default-2.scala + */ + +class A +class B +given a1: A = ??? +given a2: A = ??? +given (using a: A = A()): B = ??? + +def f: Unit = summon[B] // error: Ambiguous given instances diff --git a/tests/neg/given-ambiguous-default-2.check b/tests/neg/given-ambiguous-default-2.check new file mode 100644 index 000000000000..25e9914e8288 --- /dev/null +++ b/tests/neg/given-ambiguous-default-2.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/given-ambiguous-default-2.scala:18:23 -------------------------------------------------- +18 |def f: Unit = summon[C] // error: Ambiguous given instances + | ^ + |Ambiguous given instances: both given instance a1 and given instance a2 match type A of parameter a of given instance given_C diff --git a/tests/neg/given-ambiguous-default-2.scala b/tests/neg/given-ambiguous-default-2.scala new file mode 100644 index 000000000000..9e639b66f3d1 --- /dev/null +++ b/tests/neg/given-ambiguous-default-2.scala @@ -0,0 +1,18 @@ +/** This test checks that provided given instances take precedence over default + * given arguments, even when there are multiple default arguments. Before the + * fix for issue #19414, this code would compile without errors. + * + * See also: + * - tests/neg/given-ambiguous-default-1.scala + * - tests/neg/19414.scala + * - tests/neg/19414-desugared.scala + */ + +class A +class B +class C +given a1: A = ??? +given a2: A = ??? +given (using a: A = A(), b: B = B()): C = ??? + +def f: Unit = summon[C] // error: Ambiguous given instances diff --git a/tests/neg/i8827a.check b/tests/neg/i8827a.check index 3d6c2bfa500b..8ebc50caf128 100644 --- a/tests/neg/i8827a.check +++ b/tests/neg/i8827a.check @@ -1,12 +1,7 @@ -- [E172] Type Error: tests/neg/i8827a.scala:16:26 --------------------------------------------------------------------- 16 | summon[Order[List[Foo]]] // error | ^ - | No given instance of type pkg.Order[List[pkg.Foo]] was found for parameter x of method summon in object Predef. - | I found: - | - | pkg.Order.orderList[pkg.Foo](/* missing */summon[pkg.Order[pkg.Foo]]) - | - | But no implicit values were found that match type pkg.Order[pkg.Foo]. + | No given instance of type pkg.Order[pkg.Foo] was found for parameter orderA of method orderList in object Order | | The following import might fix the problem: | diff --git a/tests/neg/i8827b.check b/tests/neg/i8827b.check index 6848c53aee28..de726ede38d1 100644 --- a/tests/neg/i8827b.check +++ b/tests/neg/i8827b.check @@ -1,12 +1,7 @@ -- [E172] Type Error: tests/neg/i8827b.scala:16:28 --------------------------------------------------------------------- 16 | summon[Order[Option[Foo]]] // error | ^ - |No given instance of type pkg.Order[Option[pkg.Foo]] was found for parameter x of method summon in object Predef. - |I found: - | - | pkg.Order.given_Order_Option[pkg.Foo](/* missing */summon[pkg.Order[pkg.Foo]]) - | - |But no implicit values were found that match type pkg.Order[pkg.Foo]. + |No given instance of type pkg.Order[pkg.Foo] was found for parameter orderA of given instance given_Order_Option in object Order | |The following import might fix the problem: | diff --git a/tests/neg/i9568.check b/tests/neg/i9568.check index 3f318d0b0111..744023714a69 100644 --- a/tests/neg/i9568.check +++ b/tests/neg/i9568.check @@ -4,13 +4,10 @@ | No given instance of type => Monad[F] was found for parameter ev of method blaMonad in object Test. | I found: | - | Test.blaMonad[F², S](Test.blaMonad[F³, S²]) + | Test.blaMonad[F², S] | - | But method blaMonad in object Test does not match type => Monad[F²] + | But method blaMonad in object Test does not match type => Monad[F] | | where: F is a type variable with constraint <: [_] =>> Any | F² is a type variable with constraint <: [_] =>> Any - | F³ is a type variable with constraint <: [_] =>> Any - | S is a type variable - | S² is a type variable | . diff --git a/tests/neg/implicitSearch.check b/tests/neg/implicitSearch.check index e8efc744ac0a..01325c5bf736 100644 --- a/tests/neg/implicitSearch.check +++ b/tests/neg/implicitSearch.check @@ -1,12 +1,7 @@ -- [E172] Type Error: tests/neg/implicitSearch.scala:13:12 ------------------------------------------------------------- 13 | sort(xs) // error (with a partially constructed implicit argument shown) | ^ - | No given instance of type Test.Ord[List[List[T]]] was found for parameter o of method sort in object Test. - | I found: - | - | Test.listOrd[List[T]](Test.listOrd[T](/* missing */summon[Test.Ord[T]])) - | - | But no implicit values were found that match type Test.Ord[T]. + | No given instance of type Test.Ord[T] was found for parameter o of method listOrd in object Test -- [E172] Type Error: tests/neg/implicitSearch.scala:15:38 ------------------------------------------------------------- 15 | listOrd(listOrd(implicitly[Ord[T]] /*not found*/)) // error | ^ diff --git a/tests/neg/missing-implicit3.check b/tests/neg/missing-implicit3.check index c58b4430f3fe..1b1df3d5a46d 100644 --- a/tests/neg/missing-implicit3.check +++ b/tests/neg/missing-implicit3.check @@ -1,14 +1,9 @@ -- [E172] Type Error: tests/neg/missing-implicit3.scala:13:36 ---------------------------------------------------------- 13 |val sortedFoos = sort(List(new Foo)) // error | ^ - | No given instance of type ord.Ord[ord.Foo] was found for a context parameter of method sort in package ord. - | I found: + |No given instance of type ord.Foo => Comparable[? >: ord.Foo] was found for parameter x$1 of given instance ordered in object Ord | - | ord.Ord.ordered[ord.Foo](/* missing */summon[ord.Foo => Comparable[? >: ord.Foo]]) + |The following import might make progress towards fixing the problem: | - | But no implicit values were found that match type ord.Foo => Comparable[? >: ord.Foo]. - | - | The following import might make progress towards fixing the problem: - | - | import scala.math.Ordered.orderingToOrdered + | import scala.math.Ordered.orderingToOrdered | From 0890e7ae50f72592928e350c450ca949ff1a3873 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Thu, 25 Apr 2024 09:51:16 +0200 Subject: [PATCH 181/465] Type `addImplicitsArgs` result with the first failure --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index eb3dcae69181..628e33e353bf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3902,7 +3902,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => () val args = implicitArgs(wtp.paramInfos, 0, pt) - if (args.tpes.exists(_.isInstanceOf[SearchFailureType])) { + val firstFailure = args.tpes.find(_.isInstanceOf[SearchFailureType]) + if (firstFailure.isDefined) { // If there are several arguments, some arguments might already // have influenced the context, binding variables, but later ones // might fail. In that case the constraint and instantiated variables @@ -3938,10 +3939,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer retyped else - val firstNonAmbiguous = args.tpes.find(tp => tp.isError && !tp.isInstanceOf[AmbiguousImplicits]) - def firstError = args.tpes.find(_.isError) - val errorType = firstNonAmbiguous.orElse(firstError).getOrElse(NoType) - val res = untpd.Apply(tree, args).withType(errorType) + val res = untpd.Apply(tree, args).withType(firstFailure.get) reportErrors(res, wtp) res } From 7c67d7c113085e83e26b043fe82cbafb6de1c708 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Thu, 25 Apr 2024 10:59:35 +0300 Subject: [PATCH 182/465] Fix implicitNotFound message for type aliases (#19343) Fix #7092 --- .../dotty/tools/dotc/reporting/messages.scala | 52 ++++++++++++------- .../dotty/tools/vulpix/ParallelTesting.scala | 3 +- tests/neg/i4986c.check | 2 +- tests/neg/i7092.check | 32 ++++++++++++ tests/neg/i7092.scala | 31 +++++++++++ 5 files changed, 98 insertions(+), 22 deletions(-) create mode 100644 tests/neg/i7092.check create mode 100644 tests/neg/i7092.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 5fc5b4ae66b0..07ab1f21d6a0 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -2821,19 +2821,27 @@ class MissingImplicitArgument( val idx = paramNames.indexOf(name) if (idx >= 0) Some(i"${args(idx)}") else None """\$\{\s*([^}\s]+)\s*\}""".r.replaceAllIn(raw, (_: Regex.Match) match - case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("")).nn + case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("?" + v)).nn ) /** @param rawMsg Message template with variables, e.g. "Variable A is ${A}" * @param sym Symbol of the annotated type or of the method whose parameter was annotated + * @param paramNames Names of type parameters to substitute with `args` in the message template + * @param args Resolved type arguments to substitute for `paramNames` in the message template * @param substituteType Function substituting specific types for abstract types associated with variables, e.g A -> Int */ - def formatAnnotationMessage(rawMsg: String, sym: Symbol, substituteType: Type => Type)(using Context): String = + def formatAnnotationMessage( + rawMsg: String, + sym: Symbol, + paramNames: List[Name], + args: List[Type], + substituteType: Type => Type, + )(using Context): String = val substitutableTypesSymbols = substitutableTypeSymbolsInScope(sym) userDefinedErrorString( rawMsg, - paramNames = substitutableTypesSymbols.map(_.name.unexpandedName.toString), - args = substitutableTypesSymbols.map(_.typeRef).map(substituteType) + paramNames = (paramNames ::: substitutableTypesSymbols.map(_.name)).map(_.unexpandedName.toString), + args = args ::: substitutableTypesSymbols.map(_.typeRef).map(substituteType) ) /** Extract a user defined error message from a symbol `sym` @@ -2845,14 +2853,17 @@ class MissingImplicitArgument( msg <- ann.argumentConstantString(0) yield msg - def userDefinedImplicitNotFoundTypeMessageFor(sym: Symbol)(using Context): Option[String] = - for - rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) - if Feature.migrateTo3 || sym != defn.Function1 - // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore - yield - val substituteType = (_: Type).asSeenFrom(pt, sym) - formatAnnotationMessage(rawMsg, sym, substituteType) + def userDefinedImplicitNotFoundTypeMessageFor( + sym: Symbol, + params: List[ParamInfo] = Nil, + args: List[Type] = Nil + )(using Context): Option[String] = for + rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) + if Feature.migrateTo3 || sym != defn.Function1 + // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore + yield + val paramNames = params.map(_.paramName) + formatAnnotationMessage(rawMsg, sym, paramNames, args, _.asSeenFrom(pt, sym)) /** Extracting the message from a method parameter, e.g. in * @@ -2867,19 +2878,22 @@ class MissingImplicitArgument( val targs = tpd.typeArgss(applTree).flatten val methodOwner = fn.symbol.owner val methodOwnerType = tpd.qualifier(fn).tpe - val methodTypeParams = fn.symbol.paramSymss.flatten.filter(_.isType) + val methodTypeParams = fn.symbol.paramSymss.flatten.withFilter(_.isType).map(_.name) val methodTypeArgs = targs.map(_.tpe) - val substituteType = (_: Type).asSeenFrom(methodOwnerType, methodOwner).subst(methodTypeParams, methodTypeArgs) - formatAnnotationMessage(rawMsg, sym.owner, substituteType) + formatAnnotationMessage(rawMsg, sym.owner, methodTypeParams, methodTypeArgs, _.asSeenFrom(methodOwnerType, methodOwner)) def userDefinedImplicitNotFoundTypeMessage(using Context): Option[String] = - def recur(tp: Type): Option[String] = tp match + def recur(tp: Type, params: List[ParamInfo] = Nil, args: List[Type] = Nil): Option[String] = tp match + case tp: AppliedType => + val tycon = tp.typeConstructor + val typeParams = if tycon.isLambdaSub then tycon.hkTypeParams else tycon.typeParams + recur(tycon, typeParams ::: params, tp.args ::: args) case tp: TypeRef => - val sym = tp.symbol - userDefinedImplicitNotFoundTypeMessageFor(sym).orElse(recur(tp.info)) + userDefinedImplicitNotFoundTypeMessageFor(tp.symbol, params, args) + .orElse(recur(tp.info)) case tp: ClassInfo => tp.baseClasses.iterator - .map(userDefinedImplicitNotFoundTypeMessageFor) + .map(userDefinedImplicitNotFoundTypeMessageFor(_)) .find(_.isDefined).flatten case tp: TypeProxy => recur(tp.superType) diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index 6ff0d3fba5ca..e7e5936a4b29 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -274,12 +274,11 @@ trait ParallelTesting extends RunnerOrchestration { self => */ final def diffTest(testSource: TestSource, checkFile: JFile, actual: List[String], reporters: Seq[TestReporter], logger: LoggedRunnable) = { for (msg <- FileDiff.check(testSource.title, actual, checkFile.getPath)) { - onFailure(testSource, reporters, logger, Some(msg)) - if (updateCheckFiles) { FileDiff.dump(checkFile.toPath.toString, actual) echo("Updated checkfile: " + checkFile.getPath) } else { + onFailure(testSource, reporters, logger, Some(msg)) val outFile = checkFile.toPath.resolveSibling(s"${checkFile.toPath.getFileName}.out").toString FileDiff.dump(outFile, actual) echo(FileDiff.diffMessage(checkFile.getPath, outFile)) diff --git a/tests/neg/i4986c.check b/tests/neg/i4986c.check index 8befc30f5a60..0517ae10e427 100644 --- a/tests/neg/i4986c.check +++ b/tests/neg/i4986c.check @@ -61,4 +61,4 @@ -- [E172] Type Error: tests/neg/i4986c.scala:62:19 --------------------------------------------------------------------- 62 | i.m[Option[Long]] // error | ^ - | String; List; [A, _] =>> List[Option[?]]; Int; Option[Long]; + | String; List; [A, _] =>> List[Option[?]]; Int; Option[Long]; ?XX diff --git a/tests/neg/i7092.check b/tests/neg/i7092.check new file mode 100644 index 000000000000..bd4bdc783a08 --- /dev/null +++ b/tests/neg/i7092.check @@ -0,0 +1,32 @@ +-- [E172] Type Error: tests/neg/i7092.scala:24:19 ---------------------------------------------------------------------- +24 | summon[F[String]] // error + | ^ + | Not found for String +-- [E172] Type Error: tests/neg/i7092.scala:25:19 ---------------------------------------------------------------------- +25 | summon[G[String]] // error + | ^ + | Not found for String +-- [E172] Type Error: tests/neg/i7092.scala:26:16 ---------------------------------------------------------------------- +26 | summon[H[Int]] // error + | ^ + | Not found for Int, ?B +-- [E172] Type Error: tests/neg/i7092.scala:27:23 ---------------------------------------------------------------------- +27 | summon[H[Int][Float]] // error + | ^ + | Not found for Int, Float +-- [E172] Type Error: tests/neg/i7092.scala:28:18 ---------------------------------------------------------------------- +28 | summon[AAA[Int]] // error + | ^ + | Not found for Int +-- [E172] Type Error: tests/neg/i7092.scala:29:25 ---------------------------------------------------------------------- +29 | summon[AAA[Int][Float]] // error + | ^ + | Not found for Int +-- [E172] Type Error: tests/neg/i7092.scala:30:19 ---------------------------------------------------------------------- +30 | summon[op.F[Int]] // error + | ^ + | Could not find Int +-- [E172] Type Error: tests/neg/i7092.scala:31:28 ---------------------------------------------------------------------- +31 | summon[String =!:= String] // error + | ^ + | Cannot proof type inequality because types are equal: String =:= String diff --git a/tests/neg/i7092.scala b/tests/neg/i7092.scala new file mode 100644 index 000000000000..9481008b9d70 --- /dev/null +++ b/tests/neg/i7092.scala @@ -0,0 +1,31 @@ +import scala.annotation.implicitNotFound +import scala.util.NotGiven + +@implicitNotFound("Not found for ${A}") +type F[A] + +@implicitNotFound("Not found for ${A}") +trait G[A] + +@implicitNotFound("Not found for ${A}, ${B}") +type H = [A] =>> [B] =>> (A, B) + +@implicitNotFound("Not found for ${A}") +type AAA = [A] =>> [A] =>> A + +object op: + @implicitNotFound("Could not find ${A}") + opaque type F[A] = A + +@implicitNotFound("Cannot proof type inequality because types are equal: ${A} =:= ${B}") +type =!:=[A, B] = NotGiven[A =:= B] + +object Test: + summon[F[String]] // error + summon[G[String]] // error + summon[H[Int]] // error + summon[H[Int][Float]] // error + summon[AAA[Int]] // error + summon[AAA[Int][Float]] // error + summon[op.F[Int]] // error + summon[String =!:= String] // error From 4d7e6add055277ca56458b492b24a2b6bc3f07c4 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Thu, 25 Apr 2024 11:34:36 +0200 Subject: [PATCH 183/465] Make MethodTypeKind into an enum --- .../quoted/runtime/impl/QuotesImpl.scala | 19 ++++++++++--------- library/src/scala/quoted/Quotes.scala | 19 +++++++------------ 2 files changed, 17 insertions(+), 21 deletions(-) diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index cf26d6f16ef3..492d214a2c8d 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2204,13 +2204,6 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler case _ => None end MethodOrPolyTypeTest - type MethodTypeKind = dotc.core.Types.MethodTypeCompanion - - object MethodTypeKind extends MethodTypeKindModule: - val Plain: MethodTypeKind = Types.MethodType - val Contextual: MethodTypeKind = Types.ContextualMethodType - val Implicit: MethodTypeKind = Types.ImplicitMethodType - type MethodType = dotc.core.Types.MethodType object MethodTypeTypeTest extends TypeTest[TypeRepr, MethodType]: @@ -2223,7 +2216,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def apply(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType = Types.MethodType(paramNames.map(_.toTermName))(paramInfosExp, resultTypeExp) def apply(kind: MethodTypeKind)(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType = - kind.apply(paramNames.map(_.toTermName))(paramInfosExp, resultTypeExp) + val companion = kind match + case MethodTypeKind.Contextual => Types.ContextualMethodType + case MethodTypeKind.Implicit => Types.ImplicitMethodType + case MethodTypeKind.Plain => Types.MethodType + companion.apply(paramNames.map(_.toTermName))(paramInfosExp, resultTypeExp) def unapply(x: MethodType): (List[String], List[TypeRepr], TypeRepr) = (x.paramNames.map(_.toString), x.paramTypes, x.resType) end MethodType @@ -2233,7 +2230,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def isErased: Boolean = false def isImplicit: Boolean = self.isImplicitMethod def isContextual: Boolean = self.isContextualMethod - def methodTypeKind: MethodTypeKind = self.companion + def methodTypeKind: MethodTypeKind = + self.companion match + case Types.ContextualMethodType => MethodTypeKind.Contextual + case Types.ImplicitMethodType => MethodTypeKind.Implicit + case _ => MethodTypeKind.Plain def param(idx: Int): TypeRepr = self.newParamRef(idx) def erasedParams: List[Boolean] = self.erasedParams diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index e76c924ece20..e4fcc0ce29d0 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -212,7 +212,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * +- TypeBounds * +- NoPrefix * - * +- MethodTypeKind + * +- MethodTypeKind -+- Contextual + * +- Implicit + * +- Plain * * +- Selector -+- SimpleSelector * +- RenameSelector @@ -3237,20 +3239,13 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given MethodOrPolyTypeTest: TypeTest[TypeRepr, MethodOrPoly] /** Type which decides on the kind of parameter list represented by `MethodType`. */ - type MethodTypeKind - - /** Module object of `type MethodKind` */ - val MethodTypeKind: MethodTypeKindModule - - /** Methods of the module object `val MethodKind` */ - trait MethodTypeKindModule { this: MethodTypeKind.type => + enum MethodTypeKind: /** Represents a parameter list without any implicitness of parameters, like (x1: X1, x2: X2, ...) */ - val Plain: MethodTypeKind + case Plain /** Represents a parameter list with implicit parameters, like `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)`, `(using x1: X1, ..., xn: Xn)` */ - val Implicit: MethodTypeKind + case Implicit /** Represents a parameter list of a contextual method, like `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ - val Contextual: MethodTypeKind - } + case Contextual /** Type of the definition of a method taking a single list of parameters. It's return type may be a MethodType. */ type MethodType <: MethodOrPoly From 86e2fe5ee6d3431375f14de2563f6822f79c6bdc Mon Sep 17 00:00:00 2001 From: Chris Pado Date: Thu, 25 Apr 2024 03:53:19 -0700 Subject: [PATCH 184/465] Support src filter in -WConf (Closes #17635) (#18783) Fixes #18782 --- .../tools/dotc/config/ScalaSettings.scala | 4 + .../dotty/tools/dotc/reporting/WConf.scala | 15 ++- .../dotc/config/ScalaSettingsTests.scala | 100 ++++++++++++++++++ 3 files changed, 118 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index db867f394297..6520546ec0f9 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -247,6 +247,9 @@ private sealed trait WarningSettings: | - Message name: name=PureExpressionInStatementPosition | The message name is printed with the warning in verbose warning mode. | + | - Source location: src=regex + | The regex is evaluated against the full source path. + | |In verbose warning mode the compiler prints matching filters for warnings. |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). @@ -266,6 +269,7 @@ private sealed trait WarningSettings: |Examples: | - change every warning into an error: -Wconf:any:error | - silence deprecations: -Wconf:cat=deprecation:s + | - silence warnings in src_managed directory: -Wconf:src=src_managed/.*:s | |Note: on the command-line you might need to quote configurations containing `*` or `&` |to prevent the shell from expanding patterns.""".stripMargin, diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index cc0a63cb1532..54a6fc14e054 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -5,7 +5,9 @@ package reporting import scala.language.unsafeNulls import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.util.SourcePosition +import dotty.tools.dotc.util.{NoSourcePosition, SourcePosition} +import dotty.tools.dotc.interfaces.SourceFile +import dotty.tools.dotc.reporting.MessageFilter.SourcePattern import java.util.regex.PatternSyntaxException import scala.annotation.internal.sharable @@ -21,11 +23,19 @@ enum MessageFilter: val noHighlight = message.msg.message.replaceAll("\\e\\[[\\d;]*[^\\d;]","") pattern.findFirstIn(noHighlight).nonEmpty case MessageID(errorId) => message.msg.errorId == errorId + case SourcePattern(pattern) => + val source = message.position.orElse(NoSourcePosition).source() + val path = source.jfile() + .map(_.toPath.toAbsolutePath.toUri.normalize().getRawPath) + .orElse(source.path()) + pattern.findFirstIn(path).nonEmpty + case None => false case Any, Deprecated, Feature, Unchecked, None case MessagePattern(pattern: Regex) case MessageID(errorId: ErrorMessageID) + case SourcePattern(pattern: Regex) enum Action: case Error, Warning, Verbose, Info, Silent @@ -84,6 +94,9 @@ object WConf: case "feature" => Right(Feature) case "unchecked" => Right(Unchecked) case _ => Left(s"unknown category: $conf") + + case "src" => regex(conf).map(SourcePattern.apply) + case _ => Left(s"unknown filter: $filter") case _ => Left(s"unknown filter: $s") diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index e958a5925fce..2049b1bf5486 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -7,6 +7,11 @@ import dotty.tools.dotc.config.ScalaSettingCategories._ import org.junit.Test import org.junit.Assert._ import core.Decorators.toMessage +import dotty.tools.io.{Path, PlainFile} + +import java.net.URI +import java.nio.file.Files +import scala.util.Using class ScalaSettingsTests: @@ -96,5 +101,100 @@ class ScalaSettingsTests: assertEquals(Action.Silent, sut.action(depr)) + private def wconfSrcFilterTest(argsStr: String, + warning: reporting.Diagnostic.Warning): Either[List[String], reporting.Action] = + import reporting.Diagnostic + val settings = new ScalaSettings + val args = ArgsSummary(settings.defaultState, List(argsStr), errors = Nil, warnings = Nil) + val proc = settings.processArguments(args, processAll = true, skipped = Nil) + val wconfStr = settings.Wconf.valueIn(proc.sstate) + val wconf = reporting.WConf.fromSettings(wconfStr) + wconf.map(_.action(warning)) + + @Test def `WConf src filter silences warnings from a matching path for virtual file`: Unit = + val result = wconfSrcFilterTest( + argsStr = "-Wconf:src=path/.*:s", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), + span = util.Spans.Span(1L) + ) + ) + ) + assertEquals(result, Right(reporting.Action.Silent)) + + @Test def `WConf src filter doesn't silence warnings from a non-matching path`: Unit = + val result = wconfSrcFilterTest( + argsStr = "-Wconf:src=another/.*:s", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), + span = util.Spans.Span(1L) + ) + ) + ) + assertEquals(result, Right(reporting.Action.Warning)) + + @Test def `WConf src filter silences warnings from a matching path for real file`: Unit = + val result = Using.resource(Files.createTempFile("myfile", ".scala").nn) { file => + wconfSrcFilterTest( + argsStr = "-Wconf:src=myfile.*?\\.scala:s", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile(new PlainFile(Path(file)), "UTF-8"), + span = util.Spans.Span(1L) + ) + ) + ) + }(Files.deleteIfExists(_)) + assertEquals(result, Right(reporting.Action.Silent)) + + @Test def `WConf src filter doesn't silence warnings from a non-matching path for real file`: Unit = + val result = Using.resource(Files.createTempFile("myfile", ".scala").nn) { file => + wconfSrcFilterTest( + argsStr = "-Wconf:src=another.*?\\.scala:s", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile(new PlainFile(Path(file)), "UTF-8"), + span = util.Spans.Span(1L) + ) + ) + ) + }(Files.deleteIfExists(_)) + assertEquals(result, Right(reporting.Action.Warning)) + + @Test def `WConf src filter reports an error on an invalid regex`: Unit = + val result = wconfSrcFilterTest( + argsStr = """-Wconf:src=\:s""", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), + span = util.Spans.Span(1L) + ) + ), + ) + assertTrue( + result.left.exists(errors => + errors.sizeIs == 1 && errors.headOption.exists(_.startsWith("invalid pattern")) + ) + ) + + @Test def `WConf src filter can be mixed with other filters with rightmost taking precedence`: Unit = + val result = wconfSrcFilterTest( + argsStr = "-Wconf:src=.*:s,cat=deprecation:e", + warning = reporting.Diagnostic.DeprecationWarning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), + span = util.Spans.Span(1L) + ) + ) + ) + assertEquals(result, Right(reporting.Action.Error)) end ScalaSettingsTests From a0513b0ce5701d1f4078b4d567ce16aac1b43c19 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Thu, 25 Apr 2024 13:08:21 +0200 Subject: [PATCH 185/465] Make logLevel invisible (#20253) To silence this warning when opening SBT: ``` [warn] there are 2 keys that are not used by any other settings/tasks: [warn] [warn] * scala2-library-bootstrapped / Compile / compile / logLevel [warn] +- /Users/mbovel/dotty/project/Build.scala:1083 [warn] * scala2-library-cc / Compile / compile / logLevel [warn] +- /Users/mbovel/dotty/project/Build.scala:1083 [warn] [warn] note: a setting might still be used by a command; to exclude a key from this `lintUnused` check [warn] either append it to `Global / excludeLintKeys` or call .withRank(KeyRanks.Invisible) on the key ``` --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 8142e5e0744d..f4c5dfe01338 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1085,7 +1085,7 @@ object Build { scalacOptions += "-Yscala2Unpickler:never", scalacOptions += "-Yno-experimental", scalacOptions -= "-Xfatal-warnings", - Compile / compile / logLevel := Level.Error, + Compile / compile / logLevel.withRank(KeyRanks.Invisible) := Level.Error, ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), libraryDependencies += From 48bfe814f675b7d09563bb2eb54d75ddec8621d2 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 25 Apr 2024 13:45:26 +0200 Subject: [PATCH 186/465] Fix conflict with `new ScalaSettings` Conflict between #18783 and #19766 --- compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index 2049b1bf5486..b2de0d6423df 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -104,7 +104,7 @@ class ScalaSettingsTests: private def wconfSrcFilterTest(argsStr: String, warning: reporting.Diagnostic.Warning): Either[List[String], reporting.Action] = import reporting.Diagnostic - val settings = new ScalaSettings + val settings = ScalaSettings val args = ArgsSummary(settings.defaultState, List(argsStr), errors = Nil, warnings = Nil) val proc = settings.processArguments(args, processAll = true, skipped = Nil) val wconfStr = settings.Wconf.valueIn(proc.sstate) From 4d7354bae2812343b59bde92df747a8fb3ac13e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Thu, 25 Apr 2024 13:45:04 +0200 Subject: [PATCH 187/465] Unify export target validation. Instead of having separate checks for members and classes/modules, we unify them. This gives simpler code and will make it easier to allow nested objects. Additionally: We report a proper error when attempting to export a JS native member (instead of crashing the compiler). Forward port of the upstream commit https://github.com/scala-js/scala-js/commit/14de17dc632d5a4983bdcce18f79c09b3ceb61bc --- .../dotc/transform/sjs/PrepJSExports.scala | 308 +++++++++--------- .../dotc/transform/sjs/PrepJSInterop.scala | 9 +- 2 files changed, 155 insertions(+), 162 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index dbd6e1a8f412..228d85b86986 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -45,100 +45,31 @@ object PrepJSExports { private final case class ExportInfo(jsName: String, destination: ExportDestination)(val pos: SrcPos) - /** Checks a class or module class for export. + /** Generate exports for the given Symbol. * - * Note that non-module Scala classes are never actually exported; their constructors are. - * However, the checks are performed on the class when the class is annotated. + * - Registers top-level and static exports. + * - Returns (non-static) exporters for this symbol. */ - def checkClassOrModuleExports(sym: Symbol)(using Context): Unit = { - val exports = exportsOf(sym) - if (exports.nonEmpty) - checkClassOrModuleExports(sym, exports.head.pos) - } + def genExport(sym: Symbol)(using Context): List[Tree] = { + // Scala classes are never exported: Their constructors are. + val isScalaClass = sym.isClass && !sym.isOneOf(Trait | Module) && !isJSAny(sym) - /** Generate the exporter for the given DefDef or ValDef. - * - * If this DefDef is a constructor, it is registered to be exported by - * GenJSCode instead and no trees are returned. - */ - def genExportMember(baseSym: Symbol)(using Context): List[Tree] = { - val clsSym = baseSym.owner + val exports = + if (isScalaClass) Nil + else exportsOf(sym) - val exports = exportsOf(baseSym) + assert(exports.isEmpty || !sym.is(Bridge), + s"found exports for bridge symbol $sym. exports: $exports") - // Helper function for errors - def err(msg: String): List[Tree] = { - report.error(msg, exports.head.pos) - Nil - } - - def memType = if (baseSym.isConstructor) "constructor" else "method" - - if (exports.isEmpty) { - Nil - } else if (!hasLegalExportVisibility(baseSym)) { - err(s"You may only export public and protected ${memType}s") - } else if (baseSym.is(Inline)) { - err("You may not export an inline method") - } else if (isJSAny(clsSym)) { - err(s"You may not export a $memType of a subclass of js.Any") - } else if (baseSym.isLocalToBlock) { - err("You may not export a local definition") - } else if (hasIllegalRepeatedParam(baseSym)) { - err(s"In an exported $memType, a *-parameter must come last (through all parameter lists)") - } else if (hasIllegalDefaultParam(baseSym)) { - err(s"In an exported $memType, all parameters with defaults must be at the end") - } else if (baseSym.isConstructor) { - // Constructors do not need an exporter method. We only perform the checks at this phase. - checkClassOrModuleExports(clsSym, exports.head.pos) + if (sym.isClass || sym.isConstructor) { + /* we can generate constructors, classes and modules entirely in the backend, + * since they do not need inheritance and such. + */ Nil } else { - assert(!baseSym.is(Bridge), s"genExportMember called for bridge symbol $baseSym") + // For normal exports, generate exporter methods. val normalExports = exports.filter(_.destination == ExportDestination.Normal) - normalExports.flatMap(exp => genExportDefs(baseSym, exp.jsName, exp.pos.span)) - } - } - - /** Check a class or module for export. - * - * There are 2 ways that this method can be reached: - * - via `registerClassExports` - * - via `genExportMember` (constructor of Scala class) - */ - private def checkClassOrModuleExports(sym: Symbol, errPos: SrcPos)(using Context): Unit = { - val isMod = sym.is(ModuleClass) - - def err(msg: String): Unit = - report.error(msg, errPos) - - def hasAnyNonPrivateCtor: Boolean = - sym.info.decl(nme.CONSTRUCTOR).hasAltWith(denot => !isPrivateMaybeWithin(denot.symbol)) - - if (sym.is(Trait)) { - err("You may not export a trait") - } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { - err("You may not export a native JS " + (if (isMod) "object" else "class")) - } else if (!hasLegalExportVisibility(sym)) { - err("You may only export public and protected " + (if (isMod) "objects" else "classes")) - } else if (isJSAny(sym.owner)) { - err("You may not export a " + (if (isMod) "object" else "class") + " in a subclass of js.Any") - } else if (sym.isLocalToBlock) { - err("You may not export a local " + (if (isMod) "object" else "class")) - } else if (!sym.isStatic) { - if (isMod) - err("You may not export a nested object") - else - err("You may not export a nested class. Create an exported factory method in the outer class to work around this limitation.") - } else if (sym.is(Abstract, butNot = Trait) && !isJSAny(sym)) { - err("You may not export an abstract class") - } else if (!isMod && !hasAnyNonPrivateCtor) { - /* This test is only relevant for JS classes but doesn't hurt for Scala - * classes as we could not reach it if there were only private - * constructors. - */ - err("You may not export a class that has only private constructors") - } else { - // OK + normalExports.flatMap(exp => genExportDefs(sym, exp.jsName, exp.pos.span)) } } @@ -172,8 +103,22 @@ object PrepJSExports { Nil } + val allAnnots = { + val allAnnots0 = directAnnots ++ unitAnnots + + if (allAnnots0.nonEmpty) { + val errorPos: SrcPos = + if (allAnnots0.head.symbol == JSExportAllAnnot) sym + else allAnnots0.head.tree + if (checkExportTarget(sym, errorPos)) allAnnots0 + else Nil // prevent code generation from running to avoid crashes. + } else { + Nil + } + } + val allExportInfos = for { - annot <- directAnnots ++ unitAnnots + annot <- allAnnots } yield { val isExportAll = annot.symbol == JSExportAllAnnot val isTopLevelExport = annot.symbol == JSExportTopLevelAnnot @@ -217,48 +162,10 @@ object PrepJSExports { } } - // Enforce proper setter signature - if (sym.isJSSetter) - checkSetterSignature(sym, exportPos, exported = true) - // Enforce no __ in name if (!isTopLevelExport && name.contains("__")) report.error("An exported name may not contain a double underscore (`__`)", exportPos) - /* Illegal function application exports, i.e., method named 'apply' - * without an explicit export name. - */ - if (isMember && !hasExplicitName && sym.name == nme.apply) { - destination match { - case ExportDestination.Normal => - def shouldBeTolerated = { - isExportAll && directAnnots.exists { annot => - annot.symbol == JSExportAnnot && - annot.arguments.nonEmpty && - annot.argumentConstantString(0).contains("apply") - } - } - - // Don't allow apply without explicit name - if (!shouldBeTolerated) { - report.error( - "A member cannot be exported to function application. " + - "Add @JSExport(\"apply\") to export under the name apply.", - exportPos) - } - - case _: ExportDestination.TopLevel => - throw new AssertionError( - em"Found a top-level export without an explicit name at ${exportPos.sourcePos}") - - case ExportDestination.Static => - report.error( - "A member cannot be exported to function application as static. " + - "Use @JSExportStatic(\"apply\") to export it under the name 'apply'.", - exportPos) - } - } - val symOwner = if (sym.isConstructor) sym.owner.owner else sym.owner @@ -266,9 +173,18 @@ object PrepJSExports { // Destination-specific restrictions destination match { case ExportDestination.Normal => + // Disallow @JSExport at the top-level, as well as on objects and classes + if (symOwner.is(Package) || symOwner.isPackageObject) { + report.error("@JSExport is forbidden on top-level definitions. Use @JSExportTopLevel instead.", exportPos) + } else if (!isMember && !sym.is(Trait)) { + report.error( + "@JSExport is forbidden on objects and classes. Use @JSExport'ed factory methods instead.", + exportPos) + } + // Make sure we do not override the default export of toString def isIllegalToString = { - isMember && name == "toString" && sym.name != nme.toString_ && + name == "toString" && sym.name != nme.toString_ && sym.info.paramInfoss.forall(_.isEmpty) && !sym.isJSGetter } if (isIllegalToString) { @@ -277,13 +193,25 @@ object PrepJSExports { exportPos) } - // Disallow @JSExport at the top-level, as well as on objects and classes - if (symOwner.is(Package) || symOwner.isPackageObject) { - report.error("@JSExport is forbidden on top-level definitions. Use @JSExportTopLevel instead.", exportPos) - } else if (!isMember && !sym.is(Trait)) { - report.error( - "@JSExport is forbidden on objects and classes. Use @JSExport'ed factory methods instead.", - exportPos) + /* Illegal function application exports, i.e., method named 'apply' + * without an explicit export name. + */ + if (!hasExplicitName && sym.name == nme.apply) { + def shouldBeTolerated = { + isExportAll && directAnnots.exists { annot => + annot.symbol == JSExportAnnot && + annot.arguments.nonEmpty && + annot.argumentConstantString(0).contains("apply") + } + } + + // Don't allow apply without explicit name + if (!shouldBeTolerated) { + report.error( + "A member cannot be exported to function application. " + + "Add @JSExport(\"apply\") to export under the name apply.", + exportPos) + } } case _: ExportDestination.TopLevel => @@ -292,10 +220,8 @@ object PrepJSExports { else if (sym.is(Method, butNot = Accessor) && sym.isJSProperty) report.error("You may not export a getter or a setter to the top level", exportPos) - /* Disallow non-static methods. - * Note: Non-static classes have more specific error messages in checkClassOrModuleExports. - */ - if (sym.isTerm && (!symOwner.isStatic || !symOwner.is(ModuleClass))) + // Disallow non-static definitions. + if (!symOwner.isStatic || !symOwner.is(ModuleClass)) report.error("Only static objects may export their members to the top level", exportPos) // The top-level name must be a valid JS identifier @@ -320,11 +246,17 @@ object PrepJSExports { if (isMember) { if (sym.is(Lazy)) report.error("You may not export a lazy val as static", exportPos) + + // Illegal function application export + if (!hasExplicitName && sym.name == nme.apply) { + report.error( + "A member cannot be exported to function application as " + + "static. Use @JSExportStatic(\"apply\") to export it under " + + "the name 'apply'.", + exportPos) + } } else { - if (sym.is(Trait)) - report.error("You may not export a trait as static.", exportPos) - else - report.error("Implementation restriction: cannot export a class or object as static", exportPos) + report.error("Implementation restriction: cannot export a class or object as static", exportPos) } } @@ -342,9 +274,9 @@ object PrepJSExports { } .foreach(_ => report.warning("Found duplicate @JSExport", sym)) - /* Make sure that no field is exported *twice* as static, nor both as - * static and as top-level (it is possible to export a field several times - * as top-level, though). + /* Check that no field is exported *twice* as static, nor both as static + * and as top-level (it is possible to export a field several times as + * top-level, though). */ if (!sym.is(Method)) { for (firstStatic <- allExportInfos.find(_.destination == ExportDestination.Static)) { @@ -370,6 +302,78 @@ object PrepJSExports { allExportInfos.distinct } + /** Checks whether the given target is suitable for export and exporting + * should be performed. + * + * Reports any errors for unsuitable targets. + * @returns a boolean indicating whether exporting should be performed. Note: + * a result of true is not a guarantee that no error was emitted. But it is + * a guarantee that the target is not "too broken" to run the rest of + * the generation. This approximation is done to avoid having to complicate + * shared code verifying conditions. + */ + private def checkExportTarget(sym: Symbol, errPos: SrcPos)(using Context): Boolean = { + def err(msg: String): Boolean = { + report.error(msg, errPos) + false + } + + def hasLegalExportVisibility(sym: Symbol): Boolean = + sym.isPublic || sym.is(Protected, butNot = Local) + + def isMemberOfJSAny: Boolean = + isJSAny(sym.owner) || (sym.isConstructor && isJSAny(sym.owner.owner)) + + def hasIllegalRepeatedParam: Boolean = { + val paramInfos = sym.info.paramInfoss.flatten + paramInfos.nonEmpty && paramInfos.init.exists(_.isRepeatedParam) + } + + def hasIllegalDefaultParam: Boolean = { + sym.hasDefaultParams + && sym.paramSymss.flatten.reverse.dropWhile(_.is(HasDefault)).exists(_.is(HasDefault)) + } + + def hasAnyNonPrivateCtor: Boolean = + sym.info.member(nme.CONSTRUCTOR).hasAltWith(d => !isPrivateMaybeWithin(d.symbol)) + + if (sym.is(Trait)) { + err("You may not export a trait") + } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { + err("You may not export a native JS definition") + } else if (!hasLegalExportVisibility(sym)) { + err("You may only export public and protected definitions") + } else if (sym.isConstructor && !hasLegalExportVisibility(sym.owner)) { + err("You may only export constructors of public and protected classes") + } else if (sym.is(Macro)) { + err("You may not export a macro") + } else if (isMemberOfJSAny) { + err("You may not export a member of a subclass of js.Any") + } else if (sym.isLocalToBlock) { + err("You may not export a local definition") + } else if (sym.isConstructor && sym.owner.isLocalToBlock) { + err("You may not export constructors of local classes") + } else if (hasIllegalRepeatedParam) { + err("In an exported method or constructor, a *-parameter must come last " + + "(through all parameter lists)") + } else if (hasIllegalDefaultParam) { + err("In an exported method or constructor, all parameters with " + + "defaults must be at the end") + } else if (sym.isConstructor && sym.owner.is(Abstract, butNot = Trait) && !isJSAny(sym)) { + err("You may not export an abstract class") + } else if (sym.isClass && !sym.is(ModuleClass) && isJSAny(sym) && !hasAnyNonPrivateCtor) { + /* This test is only relevant for JS classes: We'll complain on the + * individual exported constructors in case of a Scala class. + */ + err("You may not export a class that has only private constructors") + } else { + if (sym.isJSSetter) + checkSetterSignature(sym, errPos, exported = true) + + true // ok even if a setter has the wrong signature. + } + } + /** Generates an exporter for a DefDef including default parameter methods. */ private def genExportDefs(defSym: Symbol, jsName: String, span: Span)(using Context): List[Tree] = { val clsSym = defSym.owner.asClass @@ -448,20 +452,4 @@ object PrepJSExports { case _ => defn.AnyType } - - /** Whether the given symbol has a visibility that allows exporting */ - private def hasLegalExportVisibility(sym: Symbol)(using Context): Boolean = - sym.isPublic || sym.is(Protected, butNot = Local) - - /** Checks whether this type has a repeated parameter elsewhere than at the end of all the params. */ - private def hasIllegalRepeatedParam(sym: Symbol)(using Context): Boolean = { - val paramInfos = sym.info.paramInfoss.flatten - paramInfos.nonEmpty && paramInfos.init.exists(_.isRepeatedParam) - } - - /** Checks whether there are default parameters not at the end of the flattened parameter list. */ - private def hasIllegalDefaultParam(sym: Symbol)(using Context): Boolean = { - sym.hasDefaultParams - && sym.paramSymss.flatten.reverse.dropWhile(_.is(HasDefault)).exists(_.is(HasDefault)) - } } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index 610fca869ad2..f1c5f44b42c3 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -160,7 +160,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tree match { case tree: TypeDef if tree.isClassDef => - checkClassOrModuleExports(sym) + val exports = genExport(sym) + assert(exports.isEmpty, s"got non-empty exports for $sym") if (isJSAny(sym)) transformJSClassDef(tree) @@ -172,7 +173,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP case tree: ValOrDefDef => // Prepare exports - exporters.getOrElseUpdate(sym.owner, mutable.ListBuffer.empty) ++= genExportMember(sym) + val exports = genExport(sym) + if (exports.nonEmpty) + exporters.getOrElseUpdate(sym.owner, mutable.ListBuffer.empty) ++= exports if (sym.isLocalToBlock) super.transform(tree) @@ -247,6 +250,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP exporters.get(clsSym).fold { transformedTree } { exports => + assert(exports.nonEmpty, s"found empty exporters for $clsSym" ) + checkNoDoubleDeclaration(clsSym) cpy.Template(transformedTree)( From fc43000ac3bb51ac420460c6931805aa26c6df25 Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Thu, 25 Apr 2024 14:42:30 +0000 Subject: [PATCH 188/465] Make experimental final --- library/src-bootstrapped/scala/annotation/experimental.scala | 3 +-- .../src-non-bootstrapped/scala/annotation/experimental.scala | 3 +-- project/MiMaFilters.scala | 1 + tests/neg/experimentalExperimental.scala | 1 + tests/pos/experimentalExperimental.scala | 1 - 5 files changed, 4 insertions(+), 5 deletions(-) create mode 100644 tests/neg/experimentalExperimental.scala delete mode 100644 tests/pos/experimentalExperimental.scala diff --git a/library/src-bootstrapped/scala/annotation/experimental.scala b/library/src-bootstrapped/scala/annotation/experimental.scala index 185db51c07c1..634cfe12db7f 100644 --- a/library/src-bootstrapped/scala/annotation/experimental.scala +++ b/library/src-bootstrapped/scala/annotation/experimental.scala @@ -5,6 +5,5 @@ package scala.annotation * @see [[https://dotty.epfl.ch/docs/reference/other-new-features/experimental-defs]] * @syntax markdown */ -@deprecatedInheritance("Scheduled for being final in the future", "3.4.0") -class experimental(message: String) extends StaticAnnotation: +final class experimental(message: String) extends StaticAnnotation: def this() = this("") diff --git a/library/src-non-bootstrapped/scala/annotation/experimental.scala b/library/src-non-bootstrapped/scala/annotation/experimental.scala index dbc3296aa1ab..e879b47e12ff 100644 --- a/library/src-non-bootstrapped/scala/annotation/experimental.scala +++ b/library/src-non-bootstrapped/scala/annotation/experimental.scala @@ -1,4 +1,3 @@ package scala.annotation -@deprecatedInheritance("Scheduled for being final in the future", "3.4.0") -class experimental extends StaticAnnotation +final class experimental extends StaticAnnotation diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 904367d15ecd..3dec32a4c369 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -56,6 +56,7 @@ object MiMaFilters { ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeModule.apply"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.methodTypeKind"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.isContextual"), + ProblemFilters.exclude[FinalClassProblem]("scala.annotation.experimental"), ), // Breaking changes since last LTS diff --git a/tests/neg/experimentalExperimental.scala b/tests/neg/experimentalExperimental.scala new file mode 100644 index 000000000000..9011a3e49225 --- /dev/null +++ b/tests/neg/experimentalExperimental.scala @@ -0,0 +1 @@ +class MyExperimentalAnnot extends scala.annotation.experimental // error diff --git a/tests/pos/experimentalExperimental.scala b/tests/pos/experimentalExperimental.scala deleted file mode 100644 index 4b57e5b94346..000000000000 --- a/tests/pos/experimentalExperimental.scala +++ /dev/null @@ -1 +0,0 @@ -class MyExperimentalAnnot extends scala.annotation.experimental From 38b87f977d77c45c49d3e0bae64d8ed12f715555 Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Thu, 25 Apr 2024 14:48:27 +0000 Subject: [PATCH 189/465] Add a comment for experimental in MiMaFilters --- project/MiMaFilters.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 3dec32a4c369..cc0da889620f 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -56,6 +56,7 @@ object MiMaFilters { ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeModule.apply"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.methodTypeKind"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.isContextual"), + // Change `experimental` annotation to a final class ProblemFilters.exclude[FinalClassProblem]("scala.annotation.experimental"), ), From 46e0d9d433661525bd993ea78eee885674267de2 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 25 Apr 2024 17:54:52 +0200 Subject: [PATCH 190/465] Update compiler/src/dotty/tools/dotc/core/TypeErrors.scala --- compiler/src/dotty/tools/dotc/core/TypeErrors.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 4f944cd50983..c71c20a38eb9 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -205,7 +205,7 @@ object CyclicReference: private def isTraced(using Context) = ctx.property(CyclicReference.Trace).isDefined - private def pushTrace(info: Context ?=> String)(using Context): Unit = + private def pushTrace(info: TraceElement)(using Context): Unit = for buf <- ctx.property(CyclicReference.Trace) do buf += info From 0e4d51a8ed2f4068a506aa236bd5d7740c8af50b Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Thu, 25 Apr 2024 23:46:32 +0200 Subject: [PATCH 191/465] Do not visit args recursively in issueErrors Instead, fix the display of nested `AmbiguousImplicits` errors. --- .../dotty/tools/dotc/reporting/messages.scala | 2 +- .../dotty/tools/dotc/typer/Implicits.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 59 ++++++++----------- tests/neg/19414-desugared.check | 12 +++- tests/neg/19414.check | 12 +++- tests/neg/given-ambiguous-1.check | 9 +++ tests/neg/given-ambiguous-1.scala | 12 ++++ tests/neg/given-ambiguous-2.check | 4 ++ tests/neg/given-ambiguous-2.scala | 10 ++++ tests/neg/given-ambiguous-default-1.check | 7 ++- tests/neg/given-ambiguous-default-2.check | 7 ++- tests/neg/i8827a.check | 7 ++- tests/neg/i8827b.check | 7 ++- tests/neg/i9568.check | 7 ++- tests/neg/implicitSearch.check | 7 ++- tests/neg/missing-implicit3.check | 11 +++- 16 files changed, 128 insertions(+), 47 deletions(-) create mode 100644 tests/neg/given-ambiguous-1.check create mode 100644 tests/neg/given-ambiguous-1.scala create mode 100644 tests/neg/given-ambiguous-2.check create mode 100644 tests/neg/given-ambiguous-2.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 5fc5b4ae66b0..1ebdc741f54f 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -2980,7 +2980,7 @@ class MissingImplicitArgument( * def foo(implicit foo: Foo): Any = ??? */ arg.tpe match - case ambi: AmbiguousImplicits => + case ambi: AmbiguousImplicits if !ambi.nested => (ambi.alt1, ambi.alt2) match case (alt @ AmbiguousImplicitMsg(msg), _) => userDefinedAmbiguousImplicitMsg(alt, msg) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index ffd9d7fd8515..bb2fd22b1c93 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -537,7 +537,7 @@ object Implicits: end TooUnspecific /** An ambiguous implicits failure */ - class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree) extends SearchFailureType: + class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree, val nested: Boolean = false) extends SearchFailureType: def msg(using Context): Message = var str1 = err.refStr(alt1.ref) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 628e33e353bf..82b722850260 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3872,38 +3872,36 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer end implicitArgs /** Reports errors for arguments of `appTree` that have a - * `SearchFailureType`, recursively traversing arguments that are - * themselves applications. `mt` must be the type of `appTree.fun`. + * `SearchFailureType`. */ - def reportErrors(appTree: Apply, mt: MethodType): Unit = - val Apply(fun, args) = appTree - for (paramName, formal, arg) <- mt.paramNames.lazyZip(mt.paramInfos).lazyZip(args) do + def issueErrors(fun: Tree, args: List[Tree]): Tree = + def firstFailure = args.tpes.find(_.isInstanceOf[SearchFailureType]).getOrElse(NoType) + val errorType = + firstFailure match + case tp: AmbiguousImplicits => + AmbiguousImplicits(tp.alt1, tp.alt2, tp.expectedType, tp.argument, nested = true) + case tp => + tp + val res = untpd.Apply(fun, args).withType(errorType) + + wtp.paramNames.lazyZip(wtp.paramInfos).lazyZip(args).foreach { (paramName, formal, arg) => arg.tpe match case failure: SearchFailureType => - arg match - case childAppTree: Apply => - childAppTree.fun.tpe.widen match - case childMt: MethodType => reportErrors(childAppTree, childMt) - case _ => () - case _ => () - val methodStr = err.refStr(methPart(fun).tpe) val paramStr = implicitParamString(paramName, methodStr, fun) - val paramSymWithMethodCallTree = - fun.symbol.paramSymss.flatten - .find(_.name == paramName) - .map((_, appTree)) - val message = missingArgMsg(arg, formal, paramStr, paramSymWithMethodCallTree) - // Note: if the same error type appears on several trees, we - // might report it several times, but this is not a problem - // because only the first one will be displayed. We traverse in - // post-order, so that the most detailed message gets displayed. - report.error(message, fun.srcPos.endPos) - case _ => () + val paramSym = fun.symbol.paramSymss.flatten.find(_.name == paramName) + val paramSymWithMethodCallTree = paramSym.map((_, res)) + report.error( + missingArgMsg(arg, formal, paramStr, paramSymWithMethodCallTree), + tree.srcPos.endPos + ) + case _ => + } + + res val args = implicitArgs(wtp.paramInfos, 0, pt) - val firstFailure = args.tpes.find(_.isInstanceOf[SearchFailureType]) - if (firstFailure.isDefined) { + if (args.tpes.exists(_.isInstanceOf[SearchFailureType])) { // If there are several arguments, some arguments might already // have influenced the context, binding variables, but later ones // might fail. In that case the constraint and instantiated variables @@ -3934,14 +3932,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // node, we can report the errors for each argument nicely. // Otherwise, we don't report anything here. retyped match - case retyped: Apply if retyped.tpe.isError => reportErrors(retyped, wtp) - case _ => () - - retyped - else - val res = untpd.Apply(tree, args).withType(firstFailure.get) - reportErrors(res, wtp) - res + case Apply(tree, args) if retyped.tpe.isError => issueErrors(tree, args) + case _ => retyped + else issueErrors(tree, args) } else tree match { case tree: Block => diff --git a/tests/neg/19414-desugared.check b/tests/neg/19414-desugared.check index eb8389649348..e126404e3e4f 100644 --- a/tests/neg/19414-desugared.check +++ b/tests/neg/19414-desugared.check @@ -1,4 +1,14 @@ -- [E172] Type Error: tests/neg/19414-desugared.scala:22:34 ------------------------------------------------------------ 22 | summon[BodySerializer[JsObject]] // error: Ambiguous given instances | ^ - |Ambiguous given instances: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] of parameter writer of given instance given_BodySerializer_B + |No given instance of type BodySerializer[JsObject] was found for parameter x of method summon in object Predef. + |I found: + | + | given_BodySerializer_B[B]( + | writer = + | /* ambiguous: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] */ + | summon[Writer[B]] + | , + | this.given_BodySerializer_B$default$2[B]) + | + |But both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B]. diff --git a/tests/neg/19414.check b/tests/neg/19414.check index b865b4ba227c..f80f1681739d 100644 --- a/tests/neg/19414.check +++ b/tests/neg/19414.check @@ -1,4 +1,14 @@ -- [E172] Type Error: tests/neg/19414.scala:15:34 ---------------------------------------------------------------------- 15 | summon[BodySerializer[JsObject]] // error: Ambiguous given instances | ^ - |Ambiguous given instances: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] of a context parameter of given instance given_BodySerializer_B + |No given instance of type BodySerializer[JsObject] was found for parameter x of method summon in object Predef. + |I found: + | + | given_BodySerializer_B[B]( + | evidence$1 = + | /* ambiguous: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] */ + | summon[Writer[B]] + | , + | this.given_BodySerializer_B$default$2[B]) + | + |But both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B]. diff --git a/tests/neg/given-ambiguous-1.check b/tests/neg/given-ambiguous-1.check new file mode 100644 index 000000000000..017714de2ea7 --- /dev/null +++ b/tests/neg/given-ambiguous-1.check @@ -0,0 +1,9 @@ +-- [E172] Type Error: tests/neg/given-ambiguous-1.scala:12:23 ---------------------------------------------------------- +12 |def f: Unit = summon[B] // error: Ambiguous given instances + | ^ + | No given instance of type B was found for parameter x of method summon in object Predef. + | I found: + | + | given_B(/* ambiguous: both given instance a1 and given instance a2 match type A */summon[A]) + | + | But both given instance a1 and given instance a2 match type A. diff --git a/tests/neg/given-ambiguous-1.scala b/tests/neg/given-ambiguous-1.scala new file mode 100644 index 000000000000..0ce4f566e615 --- /dev/null +++ b/tests/neg/given-ambiguous-1.scala @@ -0,0 +1,12 @@ +class A +class B +given a1: A = ??? +given a2: A = ??? +given (using a: A): B = ??? + +// In this case, the ambiguous given instance is not directly the argument of +// `summon`; it is the argument of `given_B` which is needed for the argument of +// `summon`. This is a nested ambiguous implicit, thus we report an error in +// the style "I found ... but". See `given-ambiguous-2` for a direct ambiguous +// implicit error. +def f: Unit = summon[B] // error: Ambiguous given instances diff --git a/tests/neg/given-ambiguous-2.check b/tests/neg/given-ambiguous-2.check new file mode 100644 index 000000000000..ec84b750e691 --- /dev/null +++ b/tests/neg/given-ambiguous-2.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/given-ambiguous-2.scala:10:15 ---------------------------------------------------------- +10 |def f: Unit = g // error: Ambiguous given instances + | ^ + | Ambiguous given instances: both given instance a1 and given instance a2 match type A of parameter a of method g diff --git a/tests/neg/given-ambiguous-2.scala b/tests/neg/given-ambiguous-2.scala new file mode 100644 index 000000000000..2c3c52f1ccb0 --- /dev/null +++ b/tests/neg/given-ambiguous-2.scala @@ -0,0 +1,10 @@ +class A +class B +given a1: A = ??? +given a2: A = ??? +def g(using a: A): B = ??? + +// In this case, the ambiguous given instance is directly the argument of +// `summon`. This is a direct ambiguous implicit, thus we report the error +// directly. See `given-ambiguous-1` for a nested ambiguous implicit error. +def f: Unit = g // error: Ambiguous given instances diff --git a/tests/neg/given-ambiguous-default-1.check b/tests/neg/given-ambiguous-default-1.check index 734143b337d8..0b24a89b82cf 100644 --- a/tests/neg/given-ambiguous-default-1.check +++ b/tests/neg/given-ambiguous-default-1.check @@ -1,4 +1,9 @@ -- [E172] Type Error: tests/neg/given-ambiguous-default-1.scala:18:23 -------------------------------------------------- 18 |def f: Unit = summon[B] // error: Ambiguous given instances | ^ - |Ambiguous given instances: both given instance a1 and given instance a2 match type A of parameter a of given instance given_B + | No given instance of type B was found for parameter x of method summon in object Predef. + | I found: + | + | given_B(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A]) + | + | But both given instance a1 and given instance a2 match type A. diff --git a/tests/neg/given-ambiguous-default-2.check b/tests/neg/given-ambiguous-default-2.check index 25e9914e8288..10094f652485 100644 --- a/tests/neg/given-ambiguous-default-2.check +++ b/tests/neg/given-ambiguous-default-2.check @@ -1,4 +1,9 @@ -- [E172] Type Error: tests/neg/given-ambiguous-default-2.scala:18:23 -------------------------------------------------- 18 |def f: Unit = summon[C] // error: Ambiguous given instances | ^ - |Ambiguous given instances: both given instance a1 and given instance a2 match type A of parameter a of given instance given_C + |No given instance of type C was found for parameter x of method summon in object Predef. + |I found: + | + | given_C(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A], this.given_C$default$2) + | + |But both given instance a1 and given instance a2 match type A. diff --git a/tests/neg/i8827a.check b/tests/neg/i8827a.check index 8ebc50caf128..3d6c2bfa500b 100644 --- a/tests/neg/i8827a.check +++ b/tests/neg/i8827a.check @@ -1,7 +1,12 @@ -- [E172] Type Error: tests/neg/i8827a.scala:16:26 --------------------------------------------------------------------- 16 | summon[Order[List[Foo]]] // error | ^ - | No given instance of type pkg.Order[pkg.Foo] was found for parameter orderA of method orderList in object Order + | No given instance of type pkg.Order[List[pkg.Foo]] was found for parameter x of method summon in object Predef. + | I found: + | + | pkg.Order.orderList[pkg.Foo](/* missing */summon[pkg.Order[pkg.Foo]]) + | + | But no implicit values were found that match type pkg.Order[pkg.Foo]. | | The following import might fix the problem: | diff --git a/tests/neg/i8827b.check b/tests/neg/i8827b.check index de726ede38d1..6848c53aee28 100644 --- a/tests/neg/i8827b.check +++ b/tests/neg/i8827b.check @@ -1,7 +1,12 @@ -- [E172] Type Error: tests/neg/i8827b.scala:16:28 --------------------------------------------------------------------- 16 | summon[Order[Option[Foo]]] // error | ^ - |No given instance of type pkg.Order[pkg.Foo] was found for parameter orderA of given instance given_Order_Option in object Order + |No given instance of type pkg.Order[Option[pkg.Foo]] was found for parameter x of method summon in object Predef. + |I found: + | + | pkg.Order.given_Order_Option[pkg.Foo](/* missing */summon[pkg.Order[pkg.Foo]]) + | + |But no implicit values were found that match type pkg.Order[pkg.Foo]. | |The following import might fix the problem: | diff --git a/tests/neg/i9568.check b/tests/neg/i9568.check index 744023714a69..3f318d0b0111 100644 --- a/tests/neg/i9568.check +++ b/tests/neg/i9568.check @@ -4,10 +4,13 @@ | No given instance of type => Monad[F] was found for parameter ev of method blaMonad in object Test. | I found: | - | Test.blaMonad[F², S] + | Test.blaMonad[F², S](Test.blaMonad[F³, S²]) | - | But method blaMonad in object Test does not match type => Monad[F] + | But method blaMonad in object Test does not match type => Monad[F²] | | where: F is a type variable with constraint <: [_] =>> Any | F² is a type variable with constraint <: [_] =>> Any + | F³ is a type variable with constraint <: [_] =>> Any + | S is a type variable + | S² is a type variable | . diff --git a/tests/neg/implicitSearch.check b/tests/neg/implicitSearch.check index 01325c5bf736..e8efc744ac0a 100644 --- a/tests/neg/implicitSearch.check +++ b/tests/neg/implicitSearch.check @@ -1,7 +1,12 @@ -- [E172] Type Error: tests/neg/implicitSearch.scala:13:12 ------------------------------------------------------------- 13 | sort(xs) // error (with a partially constructed implicit argument shown) | ^ - | No given instance of type Test.Ord[T] was found for parameter o of method listOrd in object Test + | No given instance of type Test.Ord[List[List[T]]] was found for parameter o of method sort in object Test. + | I found: + | + | Test.listOrd[List[T]](Test.listOrd[T](/* missing */summon[Test.Ord[T]])) + | + | But no implicit values were found that match type Test.Ord[T]. -- [E172] Type Error: tests/neg/implicitSearch.scala:15:38 ------------------------------------------------------------- 15 | listOrd(listOrd(implicitly[Ord[T]] /*not found*/)) // error | ^ diff --git a/tests/neg/missing-implicit3.check b/tests/neg/missing-implicit3.check index 1b1df3d5a46d..c58b4430f3fe 100644 --- a/tests/neg/missing-implicit3.check +++ b/tests/neg/missing-implicit3.check @@ -1,9 +1,14 @@ -- [E172] Type Error: tests/neg/missing-implicit3.scala:13:36 ---------------------------------------------------------- 13 |val sortedFoos = sort(List(new Foo)) // error | ^ - |No given instance of type ord.Foo => Comparable[? >: ord.Foo] was found for parameter x$1 of given instance ordered in object Ord + | No given instance of type ord.Ord[ord.Foo] was found for a context parameter of method sort in package ord. + | I found: | - |The following import might make progress towards fixing the problem: + | ord.Ord.ordered[ord.Foo](/* missing */summon[ord.Foo => Comparable[? >: ord.Foo]]) | - | import scala.math.Ordered.orderingToOrdered + | But no implicit values were found that match type ord.Foo => Comparable[? >: ord.Foo]. + | + | The following import might make progress towards fixing the problem: + | + | import scala.math.Ordered.orderingToOrdered | From 2c70f3dbab44c492beef3dec770aba0c76e7beb6 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Tue, 26 Mar 2024 15:10:11 +0100 Subject: [PATCH 192/465] Add note about type mismatch in automatically inserted apply argument Co-Authored-By: Jan-Pieter van den Heuvel <1197006+jan-pieter@users.noreply.github.com> Co-Authored-By: Lucas Nouguier --- .../dotc/reporting/ExploringReporter.scala | 5 ++- .../dotty/tools/dotc/reporting/Reporter.scala | 3 ++ .../tools/dotc/reporting/StoreReporter.scala | 5 ++- .../dotty/tools/dotc/reporting/messages.scala | 2 +- .../dotty/tools/dotc/typer/Applications.scala | 32 ++++++++++++++++++- tests/neg/19680.check | 24 ++++++++++++++ tests/neg/19680.scala | 9 ++++++ tests/neg/19680b.check | 25 +++++++++++++++ tests/neg/19680b.scala | 2 ++ 9 files changed, 103 insertions(+), 4 deletions(-) create mode 100644 tests/neg/19680.check create mode 100644 tests/neg/19680.scala create mode 100644 tests/neg/19680b.check create mode 100644 tests/neg/19680b.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala index f469c03764c0..99720b8e4d29 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala @@ -18,6 +18,9 @@ class ExploringReporter extends StoreReporter(null, fromTyperState = false): override def removeBufferedMessages(using Context): List[Diagnostic] = try infos.toList finally reset() + override def mapBufferedMessages(f: Diagnostic => Diagnostic)(using Context): Unit = + infos.mapInPlace(f) + def reset(): Unit = infos.clear() -end ExploringReporter \ No newline at end of file +end ExploringReporter diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index 237a3f166fe8..ddea384f4832 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -269,6 +269,9 @@ abstract class Reporter extends interfaces.ReporterResult { /** If this reporter buffers messages, remove and return all buffered messages. */ def removeBufferedMessages(using Context): List[Diagnostic] = Nil + /** If this reporter buffers messages, apply `f` to all buffered messages. */ + def mapBufferedMessages(f: Diagnostic => Diagnostic)(using Context): Unit = () + /** Issue all messages in this reporter to next outer one, or make sure they are written. */ def flush()(using Context): Unit = val msgs = removeBufferedMessages diff --git a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala index aef5f2c5863b..9395788d4cc7 100644 --- a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala @@ -21,7 +21,7 @@ class StoreReporter(outer: Reporter | Null = Reporter.NoReporter, fromTyperState protected var infos: mutable.ListBuffer[Diagnostic] | Null = null - def doReport(dia: Diagnostic)(using Context): Unit = { + override def doReport(dia: Diagnostic)(using Context): Unit = { typr.println(s">>>> StoredError: ${dia.message}") // !!! DEBUG if (infos == null) infos = new mutable.ListBuffer infos.uncheckedNN += dia @@ -37,6 +37,9 @@ class StoreReporter(outer: Reporter | Null = Reporter.NoReporter, fromTyperState if (infos != null) try infos.uncheckedNN.toList finally infos = null else Nil + override def mapBufferedMessages(f: Diagnostic => Diagnostic)(using Context): Unit = + if infos != null then infos.uncheckedNN.mapInPlace(f) + override def pendingMessages(using Context): List[Diagnostic] = if (infos != null) infos.uncheckedNN.toList else Nil diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 07ab1f21d6a0..c6431a4af5fd 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -289,7 +289,7 @@ extends NotFoundMsg(MissingIdentID) { } } -class TypeMismatch(val found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) +class TypeMismatch(val found: Type, expected: Type, val inTree: Option[untpd.Tree], addenda: => String*)(using Context) extends TypeMismatchMsg(found, expected)(TypeMismatchID): def msg(using Context) = diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 184b250e94fb..f406ec5f26c0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1084,7 +1084,37 @@ trait Applications extends Compatibility { simpleApply(fun1, proto) } { (failedVal, failedState) => - def fail = { failedState.commit(); failedVal } + def fail = + insertedApplyNote() + failedState.commit() + failedVal + + /** If the applied function is an automatically inserted `apply` + * method and one of its arguments has a type mismatch , append + * a note to the error message that explains where the required + * type comes from. See #19680 and associated test case. + */ + def insertedApplyNote() = + if fun1.symbol.name == nme.apply && fun1.span.isSynthetic then + fun1 match + case Select(qualifier, _) => + failedState.reporter.mapBufferedMessages: + case dia: Diagnostic.Error => + dia.msg match + case msg: TypeMismatch => + msg.inTree match + case Some(arg) if tree.args.exists(_.span == arg.span) => + val Select(qualifier, _) = fun1: @unchecked + val noteText = + i"""The required type comes from a parameter of the automatically + |inserted `apply` method of `${qualifier.tpe}`, + |which is the type of `${qualifier.show}`.""".stripMargin + Diagnostic.Error(msg.appendExplanation("\n\n" + noteText), dia.pos) + case _ => dia + case msg => dia + case dia => dia + case _ => () + // Try once with original prototype and once (if different) with tupled one. // The reason we need to try both is that the decision whether to use tupled // or not was already taken but might have to be revised when an implicit diff --git a/tests/neg/19680.check b/tests/neg/19680.check new file mode 100644 index 000000000000..8372d5129960 --- /dev/null +++ b/tests/neg/19680.check @@ -0,0 +1,24 @@ +-- [E007] Type Mismatch Error: tests/neg/19680.scala:9:67 -------------------------------------------------------------- +9 |def renderWidget(using Config): Unit = renderWebsite("/tmp")(Config()) // error: found Config, required Int + | ^^^^^^^^ + | Found: Config + | Required: Int + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | Tree: new Config() + | I tried to show that + | Config + | conforms to + | Int + | but none of the attempts shown below succeeded: + | + | ==> Config <: Int = false + | + | The tests were made under the empty constraint + | + | The required type comes from a parameter of the automatically + | inserted `apply` method of `scala.collection.StringOps`, + | which is the type of `augmentString(renderWebsite("/tmp")(x$1))`. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/19680.scala b/tests/neg/19680.scala new file mode 100644 index 000000000000..57fdd851dc54 --- /dev/null +++ b/tests/neg/19680.scala @@ -0,0 +1,9 @@ +//> using options -explain + +// Tests that the error message indicates that the required type `Int` comes +// from the automatically inserted `apply` method of `String`. This note is +// inserted by `insertedApplyNote` in `Applications`. + +class Config() +def renderWebsite(path: String)(using config: Config): String = ??? +def renderWidget(using Config): Unit = renderWebsite("/tmp")(Config()) // error: found Config, required Int diff --git a/tests/neg/19680b.check b/tests/neg/19680b.check new file mode 100644 index 000000000000..14f2a30c5caa --- /dev/null +++ b/tests/neg/19680b.check @@ -0,0 +1,25 @@ +-- [E007] Type Mismatch Error: tests/neg/19680b.scala:2:21 ------------------------------------------------------------- +2 |def Test = List(1,2)("hello") // error: found String, required Int + | ^^^^^^^ + | Found: ("hello" : String) + | Required: Int + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | Tree: "hello" + | I tried to show that + | ("hello" : String) + | conforms to + | Int + | but none of the attempts shown below succeeded: + | + | ==> ("hello" : String) <: Int + | ==> String <: Int = false + | + | The tests were made under the empty constraint + | + | The required type comes from a parameter of the automatically + | inserted `apply` method of `List[Int]`, + | which is the type of `List.apply[Int]([1,2 : Int]*)`. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/19680b.scala b/tests/neg/19680b.scala new file mode 100644 index 000000000000..a089d23e6a32 --- /dev/null +++ b/tests/neg/19680b.scala @@ -0,0 +1,2 @@ +//> using options -explain +def Test = List(1,2)("hello") // error: found String, required Int From e55a36dd4385bbdb53b75be923d87854ed9e3d15 Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Fri, 26 Apr 2024 11:22:45 +0000 Subject: [PATCH 193/465] Add fromNullable --- library/src/scala/runtime/stdLibPatches/Predef.scala | 4 ++++ project/MiMaFilters.scala | 1 + tests/explicit-nulls/neg/from-nullable.scala | 6 ++++++ .../run-tasty-inspector/stdlibExperimentalDefinitions.scala | 3 +++ 4 files changed, 14 insertions(+) create mode 100644 tests/explicit-nulls/neg/from-nullable.scala diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 2146254a9467..ca9978ea347f 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -62,4 +62,8 @@ object Predef: * `eq` or `ne` methods, only `==` and `!=` inherited from `Any`. */ inline def ne(inline y: AnyRef | Null): Boolean = !(x eq y) + + extension (inline opt: Option.type) + @experimental + inline def fromNullable[T](t: T | Null): Option[T] = Option(t).asInstanceOf end Predef diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index cc0da889620f..5ccb70ad6fdf 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -9,6 +9,7 @@ object MiMaFilters { // Additions that require a new minor version of the library Build.mimaPreviousDottyVersion -> Seq( ProblemFilters.exclude[DirectMissingMethodProblem]("scala.annotation.experimental.this"), + ProblemFilters.exclude[FinalClassProblem]("scala.annotation.experimental"), ), // Additions since last LTS diff --git a/tests/explicit-nulls/neg/from-nullable.scala b/tests/explicit-nulls/neg/from-nullable.scala new file mode 100644 index 000000000000..ab4ab7f63e8e --- /dev/null +++ b/tests/explicit-nulls/neg/from-nullable.scala @@ -0,0 +1,6 @@ +import scala.annotation.experimental + +@experimental def testFromNullable = + val s: String | Null = "abc" + val sopt1: Option[String] = Option(s) // error + val sopt2: Option[String] = Option.fromNullable(s) // ok \ No newline at end of file diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 5e6e5700b719..76c08fa24213 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -76,6 +76,9 @@ val experimentalDefinitionInLibrary = Set( "scala.Tuple$.Reverse", // can be stabilized in 3.5 "scala.Tuple$.ReverseOnto", // can be stabilized in 3.5 "scala.runtime.Tuples$.reverse", // can be stabilized in 3.5 + + // New feature: fromNullable for explicit nulls + "scala.Predef$.fromNullable", ) From 7dc3d24fa9607c375d8d3c022fcff2a2da3e0cc2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 26 Apr 2024 11:52:24 +0200 Subject: [PATCH 194/465] Fix scala-js/scala-js#4142: Support exporting nested classes / objects Forward port of the upstream commit https://github.com/scala-js/scala-js/commit/a8d428ec6d20fd09726c549b7c47509d422d9a54 --- .../dotc/transform/sjs/PrepJSExports.scala | 141 +++++++++++------- .../dotc/transform/sjs/PrepJSInterop.scala | 9 +- ...sexport-on-non-toplevel-class-object.scala | 30 ---- 3 files changed, 95 insertions(+), 85 deletions(-) delete mode 100644 tests/neg-scalajs/jsexport-on-non-toplevel-class-object.scala diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index 228d85b86986..f66141bff8ad 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -54,23 +54,19 @@ object PrepJSExports { // Scala classes are never exported: Their constructors are. val isScalaClass = sym.isClass && !sym.isOneOf(Trait | Module) && !isJSAny(sym) + // Filter constructors of module classes: The module classes themselves will be exported. + val isModuleClassCtor = sym.isConstructor && sym.owner.is(ModuleClass) + val exports = - if (isScalaClass) Nil + if (isScalaClass || isModuleClassCtor) Nil else exportsOf(sym) assert(exports.isEmpty || !sym.is(Bridge), s"found exports for bridge symbol $sym. exports: $exports") - if (sym.isClass || sym.isConstructor) { - /* we can generate constructors, classes and modules entirely in the backend, - * since they do not need inheritance and such. - */ - Nil - } else { - // For normal exports, generate exporter methods. - val normalExports = exports.filter(_.destination == ExportDestination.Normal) - normalExports.flatMap(exp => genExportDefs(sym, exp.jsName, exp.pos.span)) - } + // For normal exports, generate exporter methods. + val normalExports = exports.filter(_.destination == ExportDestination.Normal) + normalExports.flatMap(exp => genExportDefs(sym, exp.jsName, exp.pos.span)) } /** Computes the ExportInfos for sym from its annotations. */ @@ -83,6 +79,10 @@ object PrepJSExports { else sym } + val symOwner = + if (sym.isConstructor) sym.owner.owner + else sym.owner + val JSExportAnnot = jsdefn.JSExportAnnot val JSExportTopLevelAnnot = jsdefn.JSExportTopLevelAnnot val JSExportStaticAnnot = jsdefn.JSExportStaticAnnot @@ -92,13 +92,22 @@ object PrepJSExports { val directMemberAnnots = Set[Symbol](JSExportAnnot, JSExportTopLevelAnnot, JSExportStaticAnnot) val directAnnots = trgSym.annotations.filter(annot => directMemberAnnots.contains(annot.symbol)) - // Is this a member export (i.e. not a class or module export)? - val isMember = !sym.isClass && !sym.isConstructor - - // Annotations for this member on the whole unit + /* Annotations for this member on the whole unit + * + * Note that for top-level classes / modules this is always empty, because + * packages cannot have annotations. + */ val unitAnnots = { - if (isMember && sym.isPublic && !sym.is(Synthetic)) - sym.owner.annotations.filter(_.symbol == JSExportAllAnnot) + val useExportAll = { + sym.isPublic && + !sym.is(Synthetic) && + !sym.isConstructor && + !sym.is(Trait) && + (!sym.isClass || sym.is(ModuleClass)) + } + + if (useExportAll) + symOwner.annotations.filter(_.symbol == JSExportAllAnnot) else Nil } @@ -139,7 +148,13 @@ object PrepJSExports { "dummy" } } else { - sym.defaultJSName + val name = (if (sym.isConstructor) sym.owner else sym).defaultJSName + if (name.endsWith(str.SETTER_SUFFIX) && !sym.isJSSetter) { + report.error( + "You must set an explicit name when exporting a non-setter with a name ending in _=", + exportPos) + } + name } } @@ -166,20 +181,12 @@ object PrepJSExports { if (!isTopLevelExport && name.contains("__")) report.error("An exported name may not contain a double underscore (`__`)", exportPos) - val symOwner = - if (sym.isConstructor) sym.owner.owner - else sym.owner - // Destination-specific restrictions destination match { case ExportDestination.Normal => - // Disallow @JSExport at the top-level, as well as on objects and classes + // Disallow @JSExport on top-level definitions. if (symOwner.is(Package) || symOwner.isPackageObject) { report.error("@JSExport is forbidden on top-level definitions. Use @JSExportTopLevel instead.", exportPos) - } else if (!isMember && !sym.is(Trait)) { - report.error( - "@JSExport is forbidden on objects and classes. Use @JSExport'ed factory methods instead.", - exportPos) } // Make sure we do not override the default export of toString @@ -243,19 +250,19 @@ object PrepJSExports { exportPos) } - if (isMember) { - if (sym.is(Lazy)) - report.error("You may not export a lazy val as static", exportPos) + if (sym.is(Lazy)) + report.error("You may not export a lazy val as static", exportPos) - // Illegal function application export - if (!hasExplicitName && sym.name == nme.apply) { - report.error( - "A member cannot be exported to function application as " + - "static. Use @JSExportStatic(\"apply\") to export it under " + - "the name 'apply'.", - exportPos) - } - } else { + // Illegal function application export + if (!hasExplicitName && sym.name == nme.apply) { + report.error( + "A member cannot be exported to function application as " + + "static. Use @JSExportStatic(\"apply\") to export it under " + + "the name 'apply'.", + exportPos) + } + + if (sym.isClass || sym.isConstructor) { report.error("Implementation restriction: cannot export a class or object as static", exportPos) } } @@ -375,31 +382,41 @@ object PrepJSExports { } /** Generates an exporter for a DefDef including default parameter methods. */ - private def genExportDefs(defSym: Symbol, jsName: String, span: Span)(using Context): List[Tree] = { - val clsSym = defSym.owner.asClass + private def genExportDefs(sym: Symbol, jsName: String, span: Span)(using Context): List[Tree] = { + val siblingSym = + if (sym.isConstructor) sym.owner + else sym + + val clsSym = siblingSym.owner.asClass + + val isProperty = sym.is(ModuleClass) || isJSAny(sym) || sym.isJSProperty + + val copiedFlags0 = (siblingSym.flags & (Protected | Final)).toTermFlags + val copiedFlags = + if (siblingSym.is(HasDefaultParams)) copiedFlags0 | HasDefaultParams // term flag only + else copiedFlags0 // Create symbol for new method - val name = makeExportName(jsName, !defSym.is(Method) || defSym.isJSProperty) - val flags = (defSym.flags | Method | Synthetic) - &~ (Deferred | Accessor | ParamAccessor | CaseAccessor | Mutable | Lazy | Override) + val scalaName = makeExportName(jsName, !sym.is(Method) || sym.isJSProperty) + val flags = Method | Synthetic | copiedFlags val info = - if (defSym.isConstructor) defSym.info - else if (defSym.is(Method)) finalResultTypeToAny(defSym.info) + if (sym.isConstructor) sym.info + else if (sym.is(Method)) finalResultTypeToAny(sym.info) else ExprType(defn.AnyType) - val expSym = newSymbol(clsSym, name, flags, info, defSym.privateWithin, span).entered + val expSym = newSymbol(clsSym, scalaName, flags, info, sym.privateWithin, span).entered // Construct exporter DefDef tree - val exporter = genProxyDefDef(clsSym, defSym, expSym, span) + val exporter = genProxyDefDef(clsSym, sym, expSym, span) // Construct exporters for default getters - val defaultGetters = if (!defSym.hasDefaultParams) { + val defaultGetters = if (!sym.hasDefaultParams) { Nil } else { for { - (param, i) <- defSym.paramSymss.flatten.zipWithIndex + (param, i) <- sym.paramSymss.flatten.zipWithIndex if param.is(HasDefault) } yield { - genExportDefaultGetter(clsSym, defSym, expSym, i, span) + genExportDefaultGetter(clsSym, sym, expSym, i, span) } } @@ -435,7 +452,27 @@ object PrepJSExports { proxySym: TermSymbol, span: Span)(using Context): Tree = { DefDef(proxySym, { argss => - This(clsSym).select(trgSym).appliedToArgss(argss) + if (trgSym.isConstructor) { + val tycon = trgSym.owner.typeRef + New(tycon).select(TermRef(tycon, trgSym)).appliedToArgss(argss) + } else if (trgSym.is(ModuleClass)) { + assert(argss.isEmpty, + s"got a module export with non-empty paramss. target: $trgSym, proxy: $proxySym at $span") + ref(trgSym.sourceModule) + } else if (trgSym.isClass) { + assert(isJSAny(trgSym), s"got a class export for a non-JS class ($trgSym) at $span") + val tpe = argss match { + case Nil => + trgSym.typeRef + case (targs @ (first :: _)) :: Nil if first.isType => + trgSym.typeRef.appliedTo(targs.map(_.tpe)) + case _ => + throw AssertionError(s"got a class export with unexpected paramss. target: $trgSym, proxy: $proxySym at $span") + } + ref(jsdefn.JSPackage_constructorOf).appliedToType(tpe) + } else { + This(clsSym).select(trgSym).appliedToArgss(argss) + } }).withSpan(span) } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index f1c5f44b42c3..1b8fdd268ece 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -161,7 +161,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tree match { case tree: TypeDef if tree.isClassDef => val exports = genExport(sym) - assert(exports.isEmpty, s"got non-empty exports for $sym") + if (exports.nonEmpty) + exporters.getOrElseUpdate(sym.owner, mutable.ListBuffer.empty) ++= exports if (isJSAny(sym)) transformJSClassDef(tree) @@ -174,8 +175,10 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP case tree: ValOrDefDef => // Prepare exports val exports = genExport(sym) - if (exports.nonEmpty) - exporters.getOrElseUpdate(sym.owner, mutable.ListBuffer.empty) ++= exports + if (exports.nonEmpty) { + val target = if (sym.isConstructor) sym.owner.owner else sym.owner + exporters.getOrElseUpdate(target, mutable.ListBuffer.empty) ++= exports + } if (sym.isLocalToBlock) super.transform(tree) diff --git a/tests/neg-scalajs/jsexport-on-non-toplevel-class-object.scala b/tests/neg-scalajs/jsexport-on-non-toplevel-class-object.scala deleted file mode 100644 index 7d127a5654ae..000000000000 --- a/tests/neg-scalajs/jsexport-on-non-toplevel-class-object.scala +++ /dev/null @@ -1,30 +0,0 @@ -import scala.scalajs.js -import scala.scalajs.js.annotation.* - -class A { - @JSExport // error - class A1 { - @JSExport // error - def this(x: Int) = this() - } - - @JSExport // error - class A2 extends js.Object - - @JSExport // error - object A3 - - @JSExport // error - object A4 extends js.Object -} - -object B { - @JSExport // error - class B1 { - @JSExport // error - def this(x: Int) = this() - } - - @JSExport // error - class B2 extends js.Object -} From c9f42221c16ba3ff2f7c9fdfdac7804988f57c0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 26 Apr 2024 15:09:57 +0200 Subject: [PATCH 195/465] Fix mixin codegen for an `object Foo extends js.Any` inside a `trait`. The `New` is not enough for an `object class Foo$` that is an inner JS object. It needs the correct wrapping generated by `ExplicitJSClasses`. That wrapping exists in the `lazy def` defined in the trait, but cannot be reproduced in the implementing class. In general, we need full paths to do that, which are long gone. When extending a Scala 3-defined trait, we can actually generate a call to the `lazy def`. Although it was never called before (and still is never called for non-JS objects), it was always generated by the Scala 3 compilers. Scala 2, however, does not emit those `lazy def`s. Since I don't see a way out (yet), we report an "Implementation restriction" error in that case. --- compiler/src/dotty/tools/dotc/transform/Mixin.scala | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala index 6df4bebde132..9a19c0dc414f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Mixin.scala +++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala @@ -18,6 +18,8 @@ import NameKinds.* import NameOps.* import ast.Trees.* +import dotty.tools.dotc.transform.sjs.JSSymUtils.isJSType + object Mixin { val name: String = "mixin" val description: String = "expand trait fields and trait initializers" @@ -273,7 +275,15 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => else if (getter.is(Lazy, butNot = Module)) transformFollowing(superRef(getter).appliedToNone) else if (getter.is(Module)) - New(getter.info.resultType, List(This(cls))) + if ctx.settings.scalajs.value && getter.moduleClass.isJSType then + if getter.is(Scala2x) then + report.error( + em"""Implementation restriction: cannot extend the Scala 2 trait $mixin + |containing the object $getter that extends js.Any""", + cls.srcPos) + transformFollowing(superRef(getter).appliedToNone) + else + New(getter.info.resultType, List(This(cls))) else Underscore(getter.info.resultType) // transformFollowing call is needed to make memoize & lazy vals run From 06c7f1a13b04bf08f68bd8f283d914a50d571b0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Thu, 25 Apr 2024 10:42:50 +0200 Subject: [PATCH 196/465] Upgrade to Scala.js 1.13.0. Code changes are forward ports of the following refactorings: * Split members of ClassDef by type https://github.com/scala-js/scala-js/commit/ec3f4066119ca04d6370095ce9884a42371d9cca * Unify TreeHash and Versioned into a single Version https://github.com/scala-js/scala-js/commit/9f5071fd0429cb73421ca6900f7a4b7ef5c33bd2 * Fix scala-js/scala-js#4769: Give JSPropertyDef a hash https://github.com/scala-js/scala-js/commit/c2bf430a7a4ba5ecd1fe692a794964934bd4db34 --- .../dotty/tools/backend/sjs/JSCodeGen.scala | 241 +++++++++--------- .../tools/backend/sjs/JSExportsGen.scala | 38 +-- project/Build.scala | 2 + project/plugins.sbt | 2 +- 4 files changed, 142 insertions(+), 141 deletions(-) diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 5b0d37d28a9b..8328afd52573 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -31,6 +31,7 @@ import org.scalajs.ir.Names.{ClassName, MethodName, SimpleMethodName} import org.scalajs.ir.OriginalName import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.Trees.OptimizerHints +import org.scalajs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSSymUtils.* @@ -354,7 +355,8 @@ class JSCodeGen()(using genCtx: Context) { // Generate members (constructor + methods) - val generatedNonFieldMembers = new mutable.ListBuffer[js.MemberDef] + val methodsBuilder = List.newBuilder[js.MethodDef] + val jsNativeMembersBuilder = List.newBuilder[js.JSNativeMemberDef] val tpl = td.rhs.asInstanceOf[Template] for (tree <- tpl.constr :: tpl.body) { @@ -365,23 +367,25 @@ class JSCodeGen()(using genCtx: Context) { // fields are added via genClassFields(), but we need to generate the JS native members val sym = vd.symbol if (!sym.is(Module) && sym.hasAnnotation(jsdefn.JSNativeAnnot)) - generatedNonFieldMembers += genJSNativeMemberDef(vd) + jsNativeMembersBuilder += genJSNativeMemberDef(vd) case dd: DefDef => val sym = dd.symbol if sym.hasAnnotation(jsdefn.JSNativeAnnot) then if !sym.is(Accessor) then - generatedNonFieldMembers += genJSNativeMemberDef(dd) + jsNativeMembersBuilder += genJSNativeMemberDef(dd) else - generatedNonFieldMembers ++= genMethod(dd) + methodsBuilder ++= genMethod(dd) case _ => throw new FatalError("Illegal tree in body of genScalaClass(): " + tree) } } - // Generate fields and add to methods + ctors - val generatedMembers = genClassFields(td) ++ generatedNonFieldMembers.toList + val (fields, staticGetterDefs) = if (!isHijacked) genClassFields(td) else (Nil, Nil) + + val jsNativeMembers = jsNativeMembersBuilder.result() + val generatedMethods = methodsBuilder.result() ::: staticGetterDefs // Generate member exports val memberExports = jsExportsGen.genMemberExports(sym) @@ -422,12 +426,12 @@ class JSCodeGen()(using genCtx: Context) { if (isDynamicImportThunk) List(genDynamicImportForwarder(sym)) else Nil - val allMemberDefsExceptStaticForwarders = - generatedMembers ::: memberExports ::: optStaticInitializer ::: optDynamicImportForwarder + val allMethodsExceptStaticForwarders: List[js.MethodDef] = + generatedMethods ::: optStaticInitializer ::: optDynamicImportForwarder // Add static forwarders - val allMemberDefs = if (!isCandidateForForwarders(sym)) { - allMemberDefsExceptStaticForwarders + val allMethods = if (!isCandidateForForwarders(sym)) { + allMethodsExceptStaticForwarders } else { if (isStaticModule(sym)) { /* If the module class has no linked class, we must create one to @@ -446,23 +450,24 @@ class JSCodeGen()(using genCtx: Context) { Nil, None, None, - forwarders, - Nil + fields = Nil, + methods = forwarders, + jsConstructor = None, + jsMethodProps = Nil, + jsNativeMembers = Nil, + topLevelExportDefs = Nil )(js.OptimizerHints.empty) generatedStaticForwarderClasses += sym -> forwardersClassDef } } - allMemberDefsExceptStaticForwarders + allMethodsExceptStaticForwarders } else { val forwarders = genStaticForwardersForClassOrInterface( - allMemberDefsExceptStaticForwarders, sym) - allMemberDefsExceptStaticForwarders ::: forwarders + allMethodsExceptStaticForwarders, sym) + allMethodsExceptStaticForwarders ::: forwarders } } - // Hashed definitions of the class - val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) - // The complete class definition val kind = if (isStaticModule(sym)) ClassKind.ModuleClass @@ -478,11 +483,15 @@ class JSCodeGen()(using genCtx: Context) { genClassInterfaces(sym, forJSClass = false), None, None, - hashedDefs, + fields, + allMethods, + jsConstructor = None, + memberExports, + jsNativeMembers, topLevelExportDefs)( optimizerHints) - classDefinition + ir.Hashers.hashClassDef(classDefinition) } /** Gen the IR ClassDef for a Scala.js-defined JS class. */ @@ -546,22 +555,22 @@ class JSCodeGen()(using genCtx: Context) { } // Static members (exported from the companion object) - val staticMembers = { + val (staticFields, staticExports) = { val module = sym.companionModule if (!module.exists) { - Nil + (Nil, Nil) } else { val companionModuleClass = module.moduleClass - val exports = withScopedVars(currentClassSym := companionModuleClass) { + val (staticFields, staticExports) = withScopedVars(currentClassSym := companionModuleClass) { jsExportsGen.genStaticExports(companionModuleClass) } - if (exports.exists(_.isInstanceOf[js.JSFieldDef])) { - val classInitializer = + + if (staticFields.nonEmpty) { + generatedMethods += genStaticConstructorWithStats(ir.Names.ClassInitializerName, genLoadModule(companionModuleClass)) - exports :+ classInitializer - } else { - exports } + + (staticFields, staticExports) } } @@ -587,17 +596,12 @@ class JSCodeGen()(using genCtx: Context) { (ctor, jsClassCaptures) } - // Generate fields (and add to methods + ctors) - val generatedMembers = { - genClassFields(td) ::: - generatedConstructor :: - jsExportsGen.genJSClassDispatchers(sym, dispatchMethodNames.result().distinct) ::: - generatedMethods.toList ::: - staticMembers - } + // Generate fields + val (fields, staticGetterDefs) = genClassFields(td) - // Hashed definitions of the class - val hashedMemberDefs = ir.Hashers.hashMemberDefs(generatedMembers) + val methods = generatedMethods.toList ::: staticGetterDefs + val jsMethodProps = + jsExportsGen.genJSClassDispatchers(sym, dispatchMethodNames.result().distinct) ::: staticExports // The complete class definition val kind = @@ -613,11 +617,15 @@ class JSCodeGen()(using genCtx: Context) { genClassInterfaces(sym, forJSClass = true), jsSuperClass = jsClassCaptures.map(_.head.ref), None, - hashedMemberDefs, + fields ::: staticFields, + methods, + Some(generatedConstructor), + jsMethodProps, + jsNativeMembers = Nil, topLevelExports)( OptimizerHints.empty) - classDefinition + ir.Hashers.hashClassDef(classDefinition) } /** Gen the IR ClassDef for a raw JS class or trait. @@ -647,6 +655,10 @@ class JSCodeGen()(using genCtx: Context) { None, jsNativeLoadSpec, Nil, + Nil, + None, + Nil, + Nil, Nil)( OptimizerHints.empty) } @@ -681,10 +693,7 @@ class JSCodeGen()(using genCtx: Context) { if (!isCandidateForForwarders(sym)) genMethodsList else genMethodsList ::: genStaticForwardersForClassOrInterface(genMethodsList, sym) - // Hashed definitions of the interface - val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) - - js.ClassDef( + val classDef = js.ClassDef( classIdent, originalNameOfClass(sym), ClassKind.Interface, @@ -693,9 +702,15 @@ class JSCodeGen()(using genCtx: Context) { superInterfaces, None, None, - hashedDefs, + Nil, + allMemberDefs, + None, + Nil, + Nil, Nil)( OptimizerHints.empty) + + ir.Hashers.hashClassDef(classDef) } private def genClassInterfaces(sym: ClassSymbol, forJSClass: Boolean)( @@ -763,15 +778,15 @@ class JSCodeGen()(using genCtx: Context) { * Precondition: `isCandidateForForwarders(sym)` is true */ def genStaticForwardersForClassOrInterface( - existingMembers: List[js.MemberDef], sym: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { + existingMethods: List[js.MethodDef], sym: Symbol)( + implicit pos: SourcePosition): List[js.MethodDef] = { val module = sym.companionModule if (!module.exists) { Nil } else { val moduleClass = module.moduleClass if (!moduleClass.isJSType) - genStaticForwardersFromModuleClass(existingMembers, moduleClass) + genStaticForwardersFromModuleClass(existingMethods, moduleClass) else Nil } @@ -781,13 +796,13 @@ class JSCodeGen()(using genCtx: Context) { * * Precondition: `isCandidateForForwarders(moduleClass)` is true */ - def genStaticForwardersFromModuleClass(existingMembers: List[js.MemberDef], + def genStaticForwardersFromModuleClass(existingMethods: List[js.MethodDef], moduleClass: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { + implicit pos: SourcePosition): List[js.MethodDef] = { assert(moduleClass.is(ModuleClass), moduleClass) - val existingPublicStaticMethodNames = existingMembers.collect { + val existingPublicStaticMethodNames = existingMethods.collect { case js.MethodDef(flags, name, _, _, _, _) if flags.namespace == js.MemberNamespace.PublicStatic => name.name @@ -849,7 +864,7 @@ class JSCodeGen()(using genCtx: Context) { js.MethodDef(flags, methodIdent, originalName, jsParams, resultType, Some { genApplyMethod(genLoadModule(moduleClass), m, jsParams.map(_.ref)) - })(OptimizerHints.empty, None) + })(OptimizerHints.empty, Unversioned) } } @@ -859,20 +874,23 @@ class JSCodeGen()(using genCtx: Context) { // Generate the fields of a class ------------------------------------------ /** Gen definitions for the fields of a class. */ - private def genClassFields(td: TypeDef): List[js.MemberDef] = { + private def genClassFields(td: TypeDef): (List[js.AnyFieldDef], List[js.MethodDef]) = { val classSym = td.symbol.asClass assert(currentClassSym.get == classSym, "genClassFields called with a ClassDef other than the current one") val isJSClass = classSym.isNonNativeJSClass + val fieldDefs = List.newBuilder[js.AnyFieldDef] + val staticGetterDefs = List.newBuilder[js.MethodDef] + // Term members that are neither methods nor modules are fields classSym.info.decls.filter { f => !f.isOneOf(MethodOrModule) && f.isTerm && !f.hasAnnotation(jsdefn.JSNativeAnnot) && !f.hasAnnotation(jsdefn.JSOptionalAnnot) && !f.hasAnnotation(jsdefn.JSExportStaticAnnot) - }.flatMap({ f => + }.foreach { f => implicit val pos = f.span val isTopLevelExport = f.hasAnnotation(jsdefn.JSExportTopLevelAnnot) @@ -897,28 +915,27 @@ class JSCodeGen()(using genCtx: Context) { else irTpe0 if (isJSClass && f.isJSExposed) - js.JSFieldDef(flags, genExpr(f.jsName)(f.sourcePos), irTpe) :: Nil + fieldDefs += js.JSFieldDef(flags, genExpr(f.jsName)(f.sourcePos), irTpe) else val fieldIdent = encodeFieldSym(f) val originalName = originalNameOfField(f) - val fieldDef = js.FieldDef(flags, fieldIdent, originalName, irTpe) - val optionalStaticFieldGetter = - if isJavaStatic then - // Here we are generating a public static getter for the static field, - // this is its API for other units. This is necessary for singleton - // enum values, which are backed by static fields. - val className = encodeClassName(classSym) - val body = js.Block( - js.LoadModule(className), - js.SelectStatic(className, fieldIdent)(irTpe)) - js.MethodDef(js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), - encodeStaticMemberSym(f), originalName, Nil, irTpe, - Some(body))( - OptimizerHints.empty, None) :: Nil - else - Nil - fieldDef :: optionalStaticFieldGetter - }).toList + fieldDefs += js.FieldDef(flags, fieldIdent, originalName, irTpe) + if isJavaStatic then + // Here we are generating a public static getter for the static field, + // this is its API for other units. This is necessary for singleton + // enum values, which are backed by static fields. + val className = encodeClassName(classSym) + val body = js.Block( + js.LoadModule(className), + js.SelectStatic(className, fieldIdent)(irTpe)) + staticGetterDefs += js.MethodDef( + js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), + encodeStaticMemberSym(f), originalName, Nil, irTpe, + Some(body))( + OptimizerHints.empty, Unversioned) + } + + (fieldDefs.result(), staticGetterDefs.result()) } def genExposedFieldIRType(f: Symbol): jstpe.Type = { @@ -956,7 +973,7 @@ class JSCodeGen()(using genCtx: Context) { Nil, jstpe.NoType, Some(stats))( - OptimizerHints.empty, None) + OptimizerHints.empty, Unversioned) } private def genRegisterReflectiveInstantiation(sym: Symbol)( @@ -1122,7 +1139,7 @@ class JSCodeGen()(using genCtx: Context) { val constructorDef = js.JSConstructorDef( js.MemberFlags.empty.withNamespace(js.MemberNamespace.Constructor), - formalArgs, restParam, constructorBody)(OptimizerHints.empty, None) + formalArgs, restParam, constructorBody)(OptimizerHints.empty, Unversioned) (jsClassCaptures, constructorDef) } @@ -1504,7 +1521,7 @@ class JSCodeGen()(using genCtx: Context) { } else if (sym.is(Deferred)) { Some(js.MethodDef(js.MemberFlags.empty, methodName, originalName, jsParams, toIRType(patchedResultType(sym)), None)( - OptimizerHints.empty, None)) + OptimizerHints.empty, Unversioned)) } else if (isIgnorableDefaultParam) { // #11592 None @@ -1545,7 +1562,7 @@ class JSCodeGen()(using genCtx: Context) { val namespace = js.MemberNamespace.Constructor js.MethodDef(js.MemberFlags.empty.withNamespace(namespace), methodName, originalName, jsParams, jstpe.NoType, Some(genStat(rhs)))( - optimizerHints, None) + optimizerHints, Unversioned) } else { val namespace = if (isMethodStaticInIR(sym)) { if (sym.isPrivate) js.MemberNamespace.PrivateStatic @@ -1590,7 +1607,7 @@ class JSCodeGen()(using genCtx: Context) { if (namespace.isStatic || !currentClassSym.isNonNativeJSClass) { val flags = js.MemberFlags.empty.withNamespace(namespace) js.MethodDef(flags, methodName, originalName, jsParams, resultIRType, Some(genBody()))( - optimizerHints, None) + optimizerHints, Unversioned) } else { val thisLocalIdent = freshLocalIdent("this") withScopedVars( @@ -1606,7 +1623,7 @@ class JSCodeGen()(using genCtx: Context) { js.MethodDef(flags, methodName, originalName, thisParamDef :: jsParams, resultIRType, Some(genBody()))( - optimizerHints, None) + optimizerHints, Unversioned) } } } @@ -2323,37 +2340,19 @@ class JSCodeGen()(using genCtx: Context) { // Partition class members. val privateFieldDefs = mutable.ListBuffer.empty[js.FieldDef] - val classDefMembers = mutable.ListBuffer.empty[js.MemberDef] - val instanceMembers = mutable.ListBuffer.empty[js.MemberDef] - var constructor: Option[js.JSConstructorDef] = None + val jsFieldDefs = mutable.ListBuffer.empty[js.JSFieldDef] - originalClassDef.memberDefs.foreach { + originalClassDef.fields.foreach { case fdef: js.FieldDef => privateFieldDefs += fdef case fdef: js.JSFieldDef => - instanceMembers += fdef - - case mdef: js.MethodDef => - assert(mdef.flags.namespace.isStatic, - "Non-static, unexported method in non-native JS class") - classDefMembers += mdef - - case cdef: js.JSConstructorDef => - assert(constructor.isEmpty, "two ctors in class") - constructor = Some(cdef) - - case mdef: js.JSMethodDef => - assert(!mdef.flags.namespace.isStatic, "Exported static method") - instanceMembers += mdef - - case property: js.JSPropertyDef => - instanceMembers += property - - case nativeMemberDef: js.JSNativeMemberDef => - throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) + jsFieldDefs += fdef } + assert(originalClassDef.jsNativeMembers.isEmpty, + "Found JS native members in anonymous JS class at " + pos) + assert(originalClassDef.topLevelExportDefs.isEmpty, "Found top-level exports in anonymous JS class at " + pos) @@ -2363,8 +2362,9 @@ class JSCodeGen()(using genCtx: Context) { val parent = js.ClassIdent(jsNames.ObjectClass) js.ClassDef(originalClassDef.name, originalClassDef.originalName, ClassKind.AbstractJSType, None, Some(parent), interfaces = Nil, - jsSuperClass = None, jsNativeLoadSpec = None, - classDefMembers.toList, Nil)( + jsSuperClass = None, jsNativeLoadSpec = None, fields = Nil, + methods = originalClassDef.methods, jsConstructor = None, + jsMethodProps = Nil, jsNativeMembers = Nil, topLevelExportDefs = Nil)( originalClassDef.optimizerHints) } @@ -2375,7 +2375,7 @@ class JSCodeGen()(using genCtx: Context) { val jsClassCaptures = originalClassDef.jsClassCaptures.getOrElse { throw new AssertionError(s"no class captures for anonymous JS class at $pos") } - val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { + val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = originalClassDef.jsConstructor.getOrElse { throw new AssertionError("No ctor found") } assert(ctorParams.isEmpty && ctorRestParam.isEmpty, @@ -2399,20 +2399,12 @@ class JSCodeGen()(using genCtx: Context) { def memberLambda(params: List[js.ParamDef], restParam: Option[js.ParamDef], body: js.Tree)(implicit pos: ir.Position): js.Closure = js.Closure(arrow = false, captureParams = Nil, params, restParam, body, captureValues = Nil) - val memberDefinitions0 = instanceMembers.toList.map { - case fdef: js.FieldDef => - throw new AssertionError("unexpected FieldDef") - - case fdef: js.JSFieldDef => - implicit val pos = fdef.pos - js.Assign(js.JSSelect(selfRef, fdef.name), jstpe.zeroOf(fdef.ftpe)) - - case mdef: js.MethodDef => - throw new AssertionError("unexpected MethodDef") - - case cdef: js.JSConstructorDef => - throw new AssertionError("unexpected JSConstructorDef") + val fieldDefinitions = jsFieldDefs.toList.map { fdef => + implicit val pos = fdef.pos + js.Assign(js.JSSelect(selfRef, fdef.name), jstpe.zeroOf(fdef.ftpe)) + } + val memberDefinitions0 = originalClassDef.jsMethodProps.toList.map { case mdef: js.JSMethodDef => implicit val pos = mdef.pos val impl = memberLambda(mdef.args, mdef.restParam, mdef.body) @@ -2434,13 +2426,12 @@ class JSCodeGen()(using genCtx: Context) { js.JSMethodApply(js.JSGlobalRef("Object"), js.StringLiteral("defineProperty"), List(selfRef, pdef.name, descriptor)) - - case nativeMemberDef: js.JSNativeMemberDef => - throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) } + val memberDefinitions1 = fieldDefinitions ::: memberDefinitions0 + val memberDefinitions = if (privateFieldDefs.isEmpty) { - memberDefinitions0 + memberDefinitions1 } else { /* Private fields, declared in FieldDefs, are stored in a separate * object, itself stored as a non-enumerable field of the `selfRef`. @@ -2481,7 +2472,7 @@ class JSCodeGen()(using genCtx: Context) { ) ) } - definePrivateFieldsObj :: memberDefinitions0 + definePrivateFieldsObj :: memberDefinitions1 } // Transform the constructor body. @@ -3581,7 +3572,7 @@ class JSCodeGen()(using genCtx: Context) { NoOriginalName, paramDefs, jstpe.AnyType, - Some(body))(OptimizerHints.empty, None) + Some(body))(OptimizerHints.empty, Unversioned) } } diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index 8c72f03e7cc4..b5f9446758a9 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -27,6 +27,7 @@ import org.scalajs.ir.Names.DefaultModuleID import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.Position.NoPosition import org.scalajs.ir.Trees.OptimizerHints +import org.scalajs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSExportUtils.* import dotty.tools.dotc.transform.sjs.JSSymUtils.* @@ -185,7 +186,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { }).toList } - def genStaticExports(classSym: Symbol): List[js.MemberDef] = { + def genStaticExports(classSym: Symbol): (List[js.JSFieldDef], List[js.JSMethodPropDef]) = { val exports = for { sym <- classSym.info.decls.toList info <- staticExportsOf(sym) @@ -193,10 +194,13 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { (info, sym) } - (for { + val fields = List.newBuilder[js.JSFieldDef] + val methodProps = List.newBuilder[js.JSMethodPropDef] + + for { (info, tups) <- exports.groupBy(_._1) kind <- checkSameKind(tups) - } yield { + } { def alts = tups.map(_._2) implicit val pos = info.pos @@ -205,10 +209,12 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { kind match { case Method => - genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = false, alts, static = true) + methodProps += + genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = false, alts, static = true) case Property => - genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = true, alts, static = true) + methodProps += + genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = true, alts, static = true) case Field => val sym = checkSingleField(tups) @@ -219,19 +225,21 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { .withMutable(true) val name = js.StringLiteral(info.jsName) val irTpe = genExposedFieldIRType(sym) - js.JSFieldDef(flags, name, irTpe) + fields += js.JSFieldDef(flags, name, irTpe) case kind => throw new AssertionError(s"unexpected static export kind: $kind") } - }).toList + } + + (fields.result(), methodProps.result()) } /** Generates exported methods and properties for a class. * * @param classSym symbol of the class we export for */ - def genMemberExports(classSym: ClassSymbol): List[js.MemberDef] = { + def genMemberExports(classSym: ClassSymbol): List[js.JSMethodPropDef] = { val classInfo = classSym.info val allExports = classInfo.memberDenots(takeAllFilter, { (name, buf) => if (isExportName(name)) @@ -251,7 +259,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { newlyDeclaredExportNames.map(genMemberExport(classSym, _)) } - private def genMemberExport(classSym: ClassSymbol, name: TermName): js.MemberDef = { + private def genMemberExport(classSym: ClassSymbol, name: TermName): js.JSMethodPropDef = { /* This used to be `.member(name)`, but it caused #3538, since we were * sometimes selecting mixin forwarders, whose type history does not go * far enough back in time to see varargs. We now explicitly exclude @@ -284,11 +292,11 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { genMemberExportOrDispatcher(JSName.Literal(jsName), isProp, alts.map(_.symbol), static = false) } - def genJSClassDispatchers(classSym: Symbol, dispatchMethodsNames: List[JSName]): List[js.MemberDef] = { + def genJSClassDispatchers(classSym: Symbol, dispatchMethodsNames: List[JSName]): List[js.JSMethodPropDef] = { dispatchMethodsNames.map(genJSClassDispatcher(classSym, _)) } - private def genJSClassDispatcher(classSym: Symbol, name: JSName): js.MemberDef = { + private def genJSClassDispatcher(classSym: Symbol, name: JSName): js.JSMethodPropDef = { val alts = classSym.info.membersBasedOnFlags(required = Method, excluded = Bridge) .map(_.symbol) .filter { sym => @@ -311,14 +319,14 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { em"Conflicting properties and methods for ${classSym.fullName}::$name.", firstAlt.srcPos) implicit val pos = firstAlt.span - js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None) + js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None)(Unversioned) } else { genMemberExportOrDispatcher(name, isProp, alts, static = false) } } private def genMemberExportOrDispatcher(jsName: JSName, isProp: Boolean, - alts: List[Symbol], static: Boolean): js.MemberDef = { + alts: List[Symbol], static: Boolean): js.JSMethodPropDef = { withNewLocalNameScope { if (isProp) genExportProperty(alts, jsName, static) @@ -362,7 +370,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { } } - js.JSPropertyDef(flags, genExpr(jsName)(alts.head.sourcePos), getterBody, setterArgAndBody) + js.JSPropertyDef(flags, genExpr(jsName)(alts.head.sourcePos), getterBody, setterArgAndBody)(Unversioned) } private def genExportMethod(alts0: List[Symbol], jsName: JSName, static: Boolean)(using Context): js.JSMethodDef = { @@ -389,7 +397,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { genOverloadDispatch(jsName, overloads, jstpe.AnyType) js.JSMethodDef(flags, genExpr(jsName), formalArgs, restParam, body)( - OptimizerHints.empty, None) + OptimizerHints.empty, Unversioned) } def genOverloadDispatch(jsName: JSName, alts: List[Exported], tpe: jstpe.Type)( diff --git a/project/Build.scala b/project/Build.scala index 8142e5e0744d..ed2460820d07 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1513,6 +1513,7 @@ object Build { "compliantArrayIndexOutOfBounds" -> (sems.arrayIndexOutOfBounds == CheckedBehavior.Compliant), "compliantArrayStores" -> (sems.arrayStores == CheckedBehavior.Compliant), "compliantNegativeArraySizes" -> (sems.negativeArraySizes == CheckedBehavior.Compliant), + "compliantNullPointers" -> (sems.nullPointers == CheckedBehavior.Compliant), "compliantStringIndexOutOfBounds" -> (sems.stringIndexOutOfBounds == CheckedBehavior.Compliant), "compliantModuleInit" -> (sems.moduleInit == CheckedBehavior.Compliant), "strictFloats" -> sems.strictFloats, @@ -1579,6 +1580,7 @@ object Build { (dir / "shared/src/test/scala" ** (("*.scala": FileFilter) -- "ReflectiveCallTest.scala" // uses many forms of structural calls that are not allowed in Scala 3 anymore -- "UTF16Test.scala" // refutable pattern match + -- "CharsetTest.scala" // bogus @tailrec that Scala 2 ignores but Scala 3 flags as an error )).get ++ (dir / "shared/src/test/require-sam" ** "*.scala").get diff --git a/project/plugins.sbt b/project/plugins.sbt index c94d4d5afe8d..d378848561b8 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.12.0") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.13.0") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") From 72894aa9697f7084ec01f95b3880ffbab4741138 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 11 Apr 2024 16:44:31 +0200 Subject: [PATCH 197/465] If there is an import selection for an indent, attach the original name --- compiler/src/dotty/tools/dotc/transform/CheckUnused.scala | 5 ++++- compiler/src/dotty/tools/dotc/typer/Typer.scala | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 7cff6fa5f1f0..040ab3fdec58 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -96,7 +96,8 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke ctx override def prepareForSelect(tree: tpd.Select)(using Context): Context = - unusedDataApply(_.registerUsed(tree.symbol, Some(tree.name))) + val name = tree.getAttachment(OriginalName).orElse(Some(tree.name)) + unusedDataApply(_.registerUsed(tree.symbol, name)) override def prepareForBlock(tree: tpd.Block)(using Context): Context = pushInBlockTemplatePackageDef(tree) @@ -327,6 +328,8 @@ object CheckUnused: */ private val _key = Property.StickyKey[UnusedData] + val OriginalName = Property.StickyKey[Name] + class PostTyper extends CheckUnused(PhaseMode.Aggregate, "PostTyper", _key) class PostInlining extends CheckUnused(PhaseMode.Report, "PostInlining", _key) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 9150ad6be392..2c7539495852 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -51,6 +51,7 @@ import NullOpsDecorator.* import cc.{CheckCaptures, isRetainsLike} import config.Config import config.MigrationVersion +import transform.CheckUnused.OriginalName import scala.annotation.constructorOnly @@ -629,7 +630,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val checkedType = checkNotShadowed(ownType) val tree1 = checkedType match case checkedType: NamedType if !prefixIsElidable(checkedType) => - ref(checkedType).withSpan(tree.span) + ref(checkedType).withSpan(tree.span).withAttachment(OriginalName, name) case _ => def isScalaModuleRef = checkedType match case moduleRef: TypeRef if moduleRef.symbol.is(ModuleClass, butNot = JavaDefined) => true From a5470faa77c8c2f5e4e59176190534d4368e5715 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 11 Apr 2024 17:19:01 +0200 Subject: [PATCH 198/465] Add test --- tests/warn/i20146.scala | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 tests/warn/i20146.scala diff --git a/tests/warn/i20146.scala b/tests/warn/i20146.scala new file mode 100644 index 000000000000..bc952104df5d --- /dev/null +++ b/tests/warn/i20146.scala @@ -0,0 +1,7 @@ +//> using options -Wunused:imports + +def test(list: List[Int]): Int = + import list.{head => first} + import list.{length => len} // warn + import list.{addString => add} // warn + first + list.length \ No newline at end of file From e6b00c305cc0017c82bea36faa0537619543a767 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 17 Apr 2024 17:42:38 +0200 Subject: [PATCH 199/465] Refactor isInImport --- .../tools/dotc/transform/CheckUnused.scala | 41 ++++++++++--------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 040ab3fdec58..c7cf7ea68804 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -680,36 +680,39 @@ object CheckUnused: } /** Given an import and accessibility, return selector that matches import<->symbol */ - private def isInImport(imp: tpd.Import, isAccessible: Boolean, symName: Option[Name], isDerived: Boolean)(using Context): Option[ImportSelector] = + private def isInImport(imp: tpd.Import, isAccessible: Boolean, altName: Option[Name], isDerived: Boolean)(using Context): Option[ImportSelector] = val tpd.Import(qual, sels) = imp - val dealiasedSym = dealias(sym) - val simpleSelections = qual.tpe.member(sym.name).alternatives - val typeSelections = sels.flatMap(n => qual.tpe.member(n.name.toTypeName).alternatives) - val termSelections = sels.flatMap(n => qual.tpe.member(n.name.toTermName).alternatives) - val sameTermPath = qual.isTerm && sym.exists && sym.owner.isType && qual.tpe.typeSymbol == sym.owner.asType - val selectionsToDealias = typeSelections ::: termSelections - val renamedSelection = if sameTermPath then sels.find(sel => sel.imported.name == sym.name) else None - val qualHasSymbol = simpleSelections.map(_.symbol).contains(sym) || (simpleSelections ::: selectionsToDealias).map(_.symbol).map(dealias).contains(dealiasedSym) || renamedSelection.isDefined - def selector = sels.find(sel => (sel.name.toTermName == sym.name || sel.name.toTypeName == sym.name) && symName.map(n => n.toTermName == sel.rename).getOrElse(true)) - def dealiasedSelector = if(isDerived) sels.flatMap(sel => selectionsToDealias.map(m => (sel, m.symbol))).collect { - case (sel, sym) if dealias(sym) == dealiasedSym => sel - }.headOption else None + val qualTpe = qual.tpe + val dealiasedSym = sym.dealias + val simpleSelections = qualTpe.member(sym.name).alternatives + val selectionsToDealias = sels.flatMap(sel => + qualTpe.member(sel.name.toTypeName).alternatives + ::: qualTpe.member(sel.name.toTermName).alternatives) + def qualHasSymbol = simpleSelections.map(_.symbol).contains(sym) || (simpleSelections ::: selectionsToDealias).map(_.symbol).map(_.dealias).contains(dealiasedSym) + def selector = sels.find(sel => (sel.name.toTermName == sym.name || sel.name.toTypeName == sym.name) && altName.map(n => n.toTermName == sel.rename).getOrElse(true)) + def dealiasedSelector = + if isDerived then + sels.flatMap(sel => selectionsToDealias.map(m => (sel, m.symbol))).collect { + case (sel, sym) if sym.dealias == dealiasedSym => sel + }.headOption + else None def givenSelector = if sym.is(Given) || sym.is(Implicit) then sels.filter(sel => sel.isGiven && !sel.bound.isEmpty).find(sel => sel.boundTpe =:= sym.info) else None def wildcard = sels.find(sel => sel.isWildcard && ((sym.is(Given) == sel.isGiven && sel.bound.isEmpty) || sym.is(Implicit))) - if qualHasSymbol && (!isAccessible || sym.isRenamedSymbol(symName)) && sym.exists then - selector.orElse(dealiasedSelector).orElse(givenSelector).orElse(wildcard).orElse(renamedSelection) // selector with name or wildcard (or given) + if sym.exists && qualHasSymbol && (!isAccessible || sym.isRenamedSymbol(altName)) then + selector.orElse(dealiasedSelector).orElse(givenSelector).orElse(wildcard) // selector with name or wildcard (or given) else None private def isRenamedSymbol(symNameInScope: Option[Name])(using Context) = sym.name != nme.NO_NAME && symNameInScope.exists(_.toSimpleName != sym.name.toSimpleName) - private def dealias(symbol: Symbol)(using Context): Symbol = - if(symbol.isType && symbol.asType.denot.isAliasType) then - symbol.asType.typeRef.dealias.typeSymbol - else symbol + private def dealias(using Context): Symbol = + if sym.isType && sym.asType.denot.isAliasType then + sym.asType.typeRef.dealias.typeSymbol + else sym + /** Annotated with @unused */ private def isUnusedAnnot(using Context): Boolean = sym.annotations.exists(a => a.symbol == ctx.definitions.UnusedAnnot) From d192e48ca61cccad99ba00c774e9c812e8233cdd Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 25 Apr 2024 14:26:05 +0200 Subject: [PATCH 200/465] Apply suggestions --- compiler/src/dotty/tools/dotc/transform/CheckUnused.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Typer.scala | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index c7cf7ea68804..263a3191c21f 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -96,7 +96,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke ctx override def prepareForSelect(tree: tpd.Select)(using Context): Context = - val name = tree.getAttachment(OriginalName).orElse(Some(tree.name)) + val name = tree.removeAttachment(OriginalName).orElse(Some(tree.name)) unusedDataApply(_.registerUsed(tree.symbol, name)) override def prepareForBlock(tree: tpd.Block)(using Context): Context = diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 2c7539495852..63692931b6dd 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -630,7 +630,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val checkedType = checkNotShadowed(ownType) val tree1 = checkedType match case checkedType: NamedType if !prefixIsElidable(checkedType) => - ref(checkedType).withSpan(tree.span).withAttachment(OriginalName, name) + ref(checkedType).withSpan(tree.span) case _ => def isScalaModuleRef = checkedType match case moduleRef: TypeRef if moduleRef.symbol.is(ModuleClass, butNot = JavaDefined) => true @@ -663,7 +663,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val selection = untpd.cpy.Select(tree)(qualifier, name) typed(selection, pt) else if rawType.exists then - setType(ensureAccessible(rawType, superAccess = false, tree.srcPos)) + val ref = setType(ensureAccessible(rawType, superAccess = false, tree.srcPos)) + if ref.symbol.name != name then + ref.withAttachment(OriginalName, name) + else ref else if name == nme._scope then // gross hack to support current xml literals. // awaiting a better implicits based solution for library-supported xml From 8825b070b1575b90d4c40678aade36ae0feb8ea9 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Fri, 26 Apr 2024 19:06:51 +0200 Subject: [PATCH 201/465] Add regression test for #16463 (#20269) Closes #16463 --- tests/neg/16463.scala | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 tests/neg/16463.scala diff --git a/tests/neg/16463.scala b/tests/neg/16463.scala new file mode 100644 index 000000000000..80a84cf02bc8 --- /dev/null +++ b/tests/neg/16463.scala @@ -0,0 +1,43 @@ +//> using scala "3.2.1" + +import scala.compiletime.ops.int._ + +object TupleOps { + import Tuple._ + + type Reduce[T <: NonEmptyTuple, F[_, _]] = + Fold[Tuple.Tail[T], Tuple.Head[T], F] + + type Maximum[T <: NonEmptyTuple] = Reduce[ + T, + [A, B] =>> (A, B) match { + case (Int, Int) => A `Max` B + } + ] + + type IndexOfRec[T <: Tuple, Elem, I <: Int] = Tuple.Elem[T, I] match { + case Elem => I + case _ => IndexOfRec[T, Elem, I + 1] + } + + type IndexOf[T <: Tuple, Elem] = IndexOfRec[T, Elem, 0] + + type DropLargest[T <: NonEmptyTuple] = + T `IndexOf` Maximum[T] match { + case Int => + ( + (T `Take` (T `IndexOf` Maximum[T])) `Concat` + (T `Drop` ((T `IndexOf` Maximum[T]) + 1)) + ) *: EmptyTuple + } + + type BubbleSort[T <: Tuple] = T match { + case EmptyTuple => EmptyTuple + case NonEmptyTuple => + BubbleSort[DropLargest[T]] `Concat` (Maximum[T] *: EmptyTuple) + } +} + +object demo extends App { + println(compiletime.constValue[TupleOps.BubbleSort[(1, 2)]]) // error: Recursion limit exceeded +} From e2c456f08a35d2cae5727c381c63a9e16936b583 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Mon, 29 Apr 2024 10:09:06 +0200 Subject: [PATCH 202/465] Delias type members in hover (#20173) Fixes https://github.com/scalameta/metals/issues/6230 --------- Co-authored-by: Katarzyna Marek --- .../src/main/dotty/tools/pc/AutoImports.scala | 2 +- .../dotty/tools/pc/AutoImportsProvider.scala | 2 +- .../tools/pc/CompletionItemResolver.scala | 4 +-- .../pc/ConvertToNamedArgumentsProvider.scala | 2 +- .../tools/pc/ExtractMethodProvider.scala | 2 +- .../main/dotty/tools/pc/HoverProvider.scala | 10 +++---- .../main/dotty/tools/pc/IndexedContext.scala | 4 +-- .../dotty/tools/pc/InferredTypeProvider.scala | 4 +-- .../dotty/tools/pc/MetalsInteractive.scala | 2 +- .../src/main/dotty/tools/pc/PcCollector.scala | 2 +- .../dotty/tools/pc/PcDefinitionProvider.scala | 2 +- .../pc/PcDocumentHighlightProvider.scala | 2 +- .../dotty/tools/pc/PcInlayHintsProvider.scala | 4 +-- .../tools/pc/PcInlineValueProviderImpl.scala | 2 +- .../dotty/tools/pc/PcRenameProvider.scala | 2 +- .../tools/pc/PcSemanticTokensProvider.scala | 2 +- .../tools/pc/SelectionRangeProvider.scala | 2 +- .../dotty/tools/pc/SemanticdbSymbols.scala | 2 +- .../tools/pc/SignatureHelpProvider.scala | 2 +- .../tools/pc/SymbolInformationProvider.scala | 6 ++-- .../pc/WorksheetSemanticdbProvider.scala | 2 +- .../completions/AmmoniteFileCompletions.scala | 2 +- .../completions/AmmoniteIvyCompletions.scala | 2 +- .../tools/pc/completions/CompletionPos.scala | 2 +- .../pc/completions/CompletionProvider.scala | 2 +- .../pc/completions/CompletionValue.scala | 2 +- .../tools/pc/completions/Completions.scala | 4 ++- .../pc/completions/FilenameCompletions.scala | 2 +- .../completions/InterpolatorCompletions.scala | 2 +- .../pc/completions/MatchCaseCompletions.scala | 14 ++++----- .../pc/completions/NamedArgCompletions.scala | 2 +- .../pc/completions/OverrideCompletions.scala | 2 +- .../pc/completions/ScalaCliCompletions.scala | 2 +- .../pc/printer/ShortenedTypePrinter.scala | 2 +- ...nts.scala => InteractiveEnrichments.scala} | 18 ++++++----- .../tools/pc/base/BaseCompletionSuite.scala | 2 +- .../dotty/tools/pc/base/BaseHoverSuite.scala | 2 +- .../tools/pc/base/BasePcDefinitionSuite.scala | 2 +- .../tools/pc/tests/hover/HoverDefnSuite.scala | 9 ++---- .../tools/pc/tests/hover/HoverTermSuite.scala | 30 +++++++++++++++++++ .../dotty/tools/pc/utils/PcAssertions.scala | 2 +- .../dotty/tools/pc/utils/TestInlayHints.scala | 2 +- 42 files changed, 100 insertions(+), 69 deletions(-) rename presentation-compiler/src/main/dotty/tools/pc/utils/{MtagsEnrichments.scala => InteractiveEnrichments.scala} (96%) diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala index e86732c3453d..bf814ef682e0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala index 8546bbf62384..ded7845ffa4e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala @@ -15,7 +15,7 @@ import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.pc.AutoImports.* import dotty.tools.pc.completions.CompletionPos -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala b/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala index c962617ac7fb..291ffe1fec30 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala @@ -9,7 +9,7 @@ import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types.TermRef -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.CompletionItem @@ -62,7 +62,7 @@ object CompletionItemResolver extends ItemResolver: if companion == NoSymbol || gsym.is(JavaDefined) then if gsymDoc.isEmpty() then if gsym.isAliasType then - fullDocstring(gsym.info.metalsDealias.typeSymbol, search) + fullDocstring(gsym.info.deepDealias.typeSymbol, search) else if gsym.is(Method) then gsym.info.finalResultType match case tr @ TermRef(_, sym) => diff --git a/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala index 00bfe17cb21b..d4f945760cc0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.core.Types.MethodType import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala index 55c4e4d9e4b6..4416d0c0d000 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala @@ -23,7 +23,7 @@ import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.TextEdit import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala index 6f39b4871a06..c0f2d49d341e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala @@ -23,7 +23,7 @@ import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* object HoverProvider: @@ -106,7 +106,7 @@ object HoverProvider: if symbol.name == nme.selectDynamic || symbol.name == nme.applyDynamic => fallbackToDynamics(path, printer) case symbolTpes @ ((symbol, tpe) :: _) => - val exprTpw = tpe.widenTermRefExpr.metalsDealias + val exprTpw = tpe.widenTermRefExpr.deepDealias val hoverString = tpw match // https://github.com/scala/scala3/issues/8891 @@ -121,7 +121,7 @@ object HoverProvider: if tpe != NoType then tpe else tpw - printer.hoverSymbol(sym, finalTpe) + printer.hoverSymbol(sym, finalTpe.deepDealias) end match end hoverString @@ -185,9 +185,9 @@ object HoverProvider: findRefinement(parent) case _ => None - val refTpe = sel.typeOpt.widen.metalsDealias match + val refTpe = sel.typeOpt.widen.deepDealias match case r: RefinedType => Some(r) - case t: (TermRef | TypeProxy) => Some(t.termSymbol.info.metalsDealias) + case t: (TermRef | TypeProxy) => Some(t.termSymbol.info.deepDealias) case _ => None refTpe.flatMap(findRefinement).asJava diff --git a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala index ad6fe9420a81..6b74e3aa2ec1 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.typer.ImportInfo import dotty.tools.pc.IndexedContext.Result -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* sealed trait IndexedContext: given ctx: Context @@ -75,7 +75,7 @@ sealed trait IndexedContext: ) private def isTypeAliasOf(alias: Symbol, queriedSym: Symbol): Boolean = - alias.isAliasType && alias.info.metalsDealias.typeSymbol == queriedSym + alias.isAliasType && alias.info.deepDealias.typeSymbol == queriedSym final def isEmpty: Boolean = this match case IndexedContext.Empty => true diff --git a/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala index b37b1b6dff6c..d8cdbcd8fe69 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala @@ -24,7 +24,7 @@ import dotty.tools.dotc.util.Spans import dotty.tools.dotc.util.Spans.Span import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.TextEdit import org.eclipse.lsp4j as l @@ -101,7 +101,7 @@ final class InferredTypeProvider( case _ => true if isInScope(tpe) then tpe - else tpe.metalsDealias + else tpe.deepDealias val printer = ShortenedTypePrinter( symbolSearch, diff --git a/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala b/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala index 648c59725742..9a541ef69942 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala @@ -240,7 +240,7 @@ object MetalsInteractive: end match end enclosingSymbolsWithExpressionType - import dotty.tools.pc.utils.MtagsEnrichments.* + import dotty.tools.pc.utils.InteractiveEnrichments.* private def recoverError( tree: Tree, diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala index 60def237badb..c447123c8725 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala @@ -26,7 +26,7 @@ import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans.Span -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* abstract class PcCollector[T]( driver: InteractiveDriver, diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala index 536ddae4203c..fc97dd1f1176 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala @@ -19,7 +19,7 @@ import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.Location diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala index cd4f4919b1ef..d9b94ebb82a3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala @@ -6,7 +6,7 @@ import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.DocumentHighlight import org.eclipse.lsp4j.DocumentHighlightKind diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala index 6d634f56363c..70aaa82eae05 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala @@ -4,7 +4,7 @@ package dotty.tools.pc import java.nio.file.Paths import scala.meta.internal.metals.ReportContext -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.printer.ShortenedTypePrinter import scala.meta.internal.pc.InlayHints import scala.meta.internal.pc.LabelPart @@ -139,7 +139,7 @@ class PcInlayHintsProvider( isInScope(tycon) && args.forall(isInScope) case _ => true if isInScope(tpe) then tpe - else tpe.metalsDealias(using indexedCtx.ctx) + else tpe.deepDealias(using indexedCtx.ctx) val dealiased = optDealias(tpe) val tpeStr = printer.tpe(dealiased) diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala index 39365475a075..38b5e8d0069b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala @@ -17,7 +17,7 @@ import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala index 8a441e1e385a..94482767f917 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala @@ -8,7 +8,7 @@ import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala index f1a0b6a65aa7..a5332f1e4ff6 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.core.Symbols.NoSymbol import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.SemanticTokenModifiers import org.eclipse.lsp4j.SemanticTokenTypes diff --git a/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala index 9dce37028bea..7973f4103ff6 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j import org.eclipse.lsp4j.SelectionRange diff --git a/presentation-compiler/src/main/dotty/tools/pc/SemanticdbSymbols.scala b/presentation-compiler/src/main/dotty/tools/pc/SemanticdbSymbols.scala index d298a88fc655..9ef19587948e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SemanticdbSymbols.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SemanticdbSymbols.scala @@ -7,7 +7,7 @@ import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.semanticdb.* -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* object SemanticdbSymbols: diff --git a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala index 80317185458b..edfd9c95fa84 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala @@ -10,7 +10,7 @@ import dotty.tools.dotc.util.Signatures import dotty.tools.dotc.util.SourceFile import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l import scala.jdk.CollectionConverters.* diff --git a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala index aa1508f89313..0743361f255d 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala @@ -12,9 +12,9 @@ import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.* -import dotty.tools.pc.utils.MtagsEnrichments.metalsDealias +import dotty.tools.pc.utils.InteractiveEnrichments.deepDealias import dotty.tools.pc.SemanticdbSymbols -import dotty.tools.pc.utils.MtagsEnrichments.allSymbols +import dotty.tools.pc.utils.InteractiveEnrichments.allSymbols class SymbolInformationProvider(using Context): private def toSymbols( @@ -77,7 +77,7 @@ class SymbolInformationProvider(using Context): then classSym.asClass.parentSyms.map(SemanticdbSymbols.symbolName) else Nil val dealisedSymbol = - if sym.isAliasType then sym.info.metalsDealias.typeSymbol else sym + if sym.isAliasType then sym.info.deepDealias.typeSymbol else sym val classOwner = sym.ownersIterator.drop(1).find(s => s.isClass || s.is(Flags.Module)) val overridden = sym.denot.allOverriddenSymbols.toList diff --git a/presentation-compiler/src/main/dotty/tools/pc/WorksheetSemanticdbProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/WorksheetSemanticdbProvider.scala index 4c8c92759a3b..bd9efb49d148 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/WorksheetSemanticdbProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/WorksheetSemanticdbProvider.scala @@ -2,7 +2,7 @@ package dotty.tools.pc import java.nio.file.Path -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* trait WorksheetSemanticdbProvider: diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala index 31bf7c348119..81337c7d8dcb 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.ast.tpd.Tree import dotty.tools.dotc.ast.untpd.ImportSelector import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.StdNames.* -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala index ec4a1813a437..718b57cd4828 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala @@ -4,7 +4,7 @@ import scala.meta.internal.mtags.CoursierComplete import dotty.tools.dotc.ast.untpd.ImportSelector import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* object AmmoniteIvyCompletions: def contribute( diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala index 6e828f8f2058..ad571ff843c3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala @@ -10,7 +10,7 @@ import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans.* import dotty.tools.dotc.interactive.Completion -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 7e02c23229e8..9cd98de33141 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -22,7 +22,7 @@ import dotty.tools.pc.AutoImports.AutoImportEdits import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.Command import org.eclipse.lsp4j.CompletionItem diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala index e1877a1a9c88..9071b2cd2a23 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala @@ -10,7 +10,7 @@ import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.Types.Type import dotty.tools.pc.printer.ShortenedTypePrinter -import dotty.tools.pc.utils.MtagsEnrichments.decoded +import dotty.tools.pc.utils.InteractiveEnrichments.decoded import org.eclipse.lsp4j.CompletionItemKind import org.eclipse.lsp4j.CompletionItemTag diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index 3f2d89a15b72..052287193540 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -33,7 +33,9 @@ import dotty.tools.dotc.util.SrcPos import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.buildinfo.BuildInfo import dotty.tools.pc.completions.OverrideCompletions.OverrideExtractor -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* +import dotty.tools.dotc.core.Denotations.SingleDenotation +import dotty.tools.dotc.interactive.Interactive class Completions( text: String, diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/FilenameCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/FilenameCompletions.scala index a9dfa55f89bd..8d2e97856e82 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/FilenameCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/FilenameCompletions.scala @@ -4,7 +4,7 @@ package completions import dotty.tools.dotc.ast.tpd.TypeDef import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* object FilenameCompletions: diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala index 9c973e6e63e0..2e39c17b24b3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala @@ -15,7 +15,7 @@ import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.Types.Type import dotty.tools.pc.CompilerSearchVisitor import dotty.tools.pc.IndexedContext -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala index 908865124f58..3ca57fc39cdf 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala @@ -32,7 +32,7 @@ import dotty.tools.pc.AutoImports.SymbolImport import dotty.tools.pc.MetalsInteractive.* import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l @@ -86,12 +86,12 @@ object CaseKeywordCompletion: ) => val args = head.argTypes.init if args.length > 1 then - Some(definitions.tupleType(args).widen.metalsDealias) - else args.headOption.map(_.widen.metalsDealias) + Some(definitions.tupleType(args).widen.deepDealias) + else args.headOption.map(_.widen.deepDealias) case _ => None case _ => None case sel => - Some(sel.tpe.widen.metalsDealias) + Some(sel.tpe.widen.deepDealias) selTpe .map { selTpe => @@ -156,7 +156,7 @@ object CaseKeywordCompletion: indexedContext.scopeSymbols .foreach(s => - val ts = s.info.metalsDealias.typeSymbol + val ts = s.info.deepDealias.typeSymbol if isValid(ts) then visit(autoImportsGen.inferSymbolImport(ts)) ) // Step 2: walk through known subclasses of sealed types. @@ -259,8 +259,8 @@ object CaseKeywordCompletion: clientSupportsSnippets ) - val tpeStr = printer.tpe(selector.tpe.widen.metalsDealias.bounds.hi) - val tpe = selector.typeOpt.widen.metalsDealias.bounds.hi match + val tpeStr = printer.tpe(selector.tpe.widen.deepDealias.bounds.hi) + val tpe = selector.typeOpt.widen.deepDealias.bounds.hi match case tr @ TypeRef(_, _) => tr.underlying case t => t diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala index 8ac5ef64af10..647b151a635b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala @@ -29,7 +29,7 @@ import dotty.tools.dotc.core.Types.TypeBounds import dotty.tools.dotc.core.Types.WildcardType import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.IndexedContext -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import scala.annotation.tailrec object NamedArgCompletions: diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala index 8d96396999da..df0bb70b596c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala @@ -29,7 +29,7 @@ import dotty.tools.pc.AutoImports.AutoImport import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala index fce35ab69ce3..e2a0a033ee6b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala @@ -4,7 +4,7 @@ import scala.meta.internal.mtags.CoursierComplete import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* class ScalaCliCompletions( coursierComplete: CoursierComplete, diff --git a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala index a7cf1a703a1f..559e199f3449 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala @@ -29,7 +29,7 @@ import dotty.tools.pc.AutoImports.ImportSel.Rename import dotty.tools.pc.IndexedContext import dotty.tools.pc.IndexedContext.Result import dotty.tools.pc.Params -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.TextEdit diff --git a/presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala similarity index 96% rename from presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala rename to presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala index e4385392973f..fb41e8859801 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala @@ -20,8 +20,7 @@ import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.SymDenotations.NoDenotation import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.core.Types.AppliedType -import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition @@ -31,7 +30,7 @@ import dotty.tools.pc.SemanticdbSymbols import org.eclipse.lsp4j as l -object MtagsEnrichments extends CommonMtagsEnrichments: +object InteractiveEnrichments extends CommonMtagsEnrichments: extension (driver: InteractiveDriver) @@ -399,11 +398,16 @@ object MtagsEnrichments extends CommonMtagsEnrichments: end extension extension (tpe: Type) - def metalsDealias(using Context): Type = + def deepDealias(using Context): Type = tpe.dealias match case app @ AppliedType(tycon, params) => - // we dealias applied type params by hand, because `dealias` doesn't do it - AppliedType(tycon, params.map(_.metalsDealias)) + AppliedType(tycon, params.map(_.deepDealias)) + case aliasingBounds: AliasingBounds => + aliasingBounds.derivedAlias(aliasingBounds.alias.dealias) + case TypeBounds(lo, hi) => + TypeBounds(lo.dealias, hi.dealias) + case RefinedType(parent, name, refinedInfo) => + RefinedType(parent.dealias, name, refinedInfo.deepDealias) case dealised => dealised -end MtagsEnrichments +end InteractiveEnrichments diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala index 776aab8bc2f7..3e5269b947a0 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala @@ -9,7 +9,7 @@ import scala.meta.pc.CancelToken import scala.language.unsafeNulls import dotty.tools.pc.completions.CompletionSource -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.utils.{TestCompletions, TextEdits} import org.eclipse.lsp4j.{CompletionItem, CompletionList} diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala index 0b8d663f8b33..0f385631d9dc 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala @@ -5,7 +5,7 @@ import java.nio.file.Paths import scala.meta.internal.metals.{CompilerOffsetParams, CompilerRangeParams} import scala.language.unsafeNulls -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.utils.{RangeReplace, TestHovers} abstract class BaseHoverSuite diff --git a/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala index 8269d4ce1c44..58c2bcdb171c 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala @@ -8,7 +8,7 @@ import scala.language.unsafeNulls import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.{SourceFile, SourcePosition} -import dotty.tools.pc.utils.MtagsEnrichments.toLsp +import dotty.tools.pc.utils.InteractiveEnrichments.toLsp import dotty.tools.pc.utils.TextEdits import org.eclipse.lsp4j.TextEdit diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverDefnSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverDefnSuite.scala index 7a647fa40f5f..f4ce4473e60a 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverDefnSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverDefnSuite.scala @@ -223,13 +223,8 @@ class HoverDefnSuite extends BaseHoverSuite: | <> |} |""".stripMargin, - """|**Expression type**: - |```scala - |Option[Int] - |``` - |**Symbol signature**: - |```scala - |val x: Option[T] + """|```scala + |val x: Option[Int] |``` |""".stripMargin.hover ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala index d1e90241e639..b51974b00fb0 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala @@ -653,3 +653,33 @@ class HoverTermSuite extends BaseHoverSuite: |``` |""".stripMargin ) + + @Test def `dealias-type-members-in-structural-types1`: Unit = + check( + """object Obj { + | trait A extends Sup { self => + | type T + | def member : T + | } + | val x: A { type T = Int} = ??? + | + | <> + | + |}""".stripMargin, + """def member: Int""".stripMargin.hover + ) + + @Test def `dealias-type-members-in-structural-types2`: Unit = + check( + """object Obj: + | trait A extends Sup { self => + | type T + | def fun(body: A { type T = self.T} => Unit) = () + | } + | val x: A { type T = Int} = ??? + | + | x.fun: <> => + | () + |""".stripMargin, + """yy: A{type T = Int}""".stripMargin.hover + ) diff --git a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala index 6dfc8acec66c..ef15121c6702 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala @@ -4,7 +4,7 @@ import scala.language.unsafeNulls import dotty.tools.pc.completions.CompletionSource import dotty.tools.dotc.util.DiffUtil -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.hamcrest import org.hamcrest.* diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala index 98ebb0852735..a923b76b955c 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala @@ -3,7 +3,7 @@ package dotty.tools.pc.utils import scala.collection.mutable.ListBuffer import scala.meta.internal.jdk.CollectionConverters._ -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.utils.TextEdits import org.eclipse.lsp4j.InlayHint From dd527fc120aa6e7819b40869f7537a5c88aa657c Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Sun, 28 Apr 2024 18:10:23 +0200 Subject: [PATCH 203/465] Remove left-over line --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index f406ec5f26c0..b19afb5c24af 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1104,7 +1104,6 @@ trait Applications extends Compatibility { case msg: TypeMismatch => msg.inTree match case Some(arg) if tree.args.exists(_.span == arg.span) => - val Select(qualifier, _) = fun1: @unchecked val noteText = i"""The required type comes from a parameter of the automatically |inserted `apply` method of `${qualifier.tpe}`, From 1f06af15885e7fabb5a6dc801a7c946622b4cab8 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Mon, 29 Apr 2024 11:21:00 +0200 Subject: [PATCH 204/465] Move maybeAddInsertedApplyNote to outer scope, add explicit types --- .../dotty/tools/dotc/typer/Applications.scala | 55 ++++++++++--------- 1 file changed, 29 insertions(+), 26 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index b19afb5c24af..05175900e04e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1024,6 +1024,34 @@ trait Applications extends Compatibility { } } + /** If the applied function is an automatically inserted `apply` + * method and one of its arguments has a type mismatch , append + * a note to the error message that explains where the required + * type comes from. See #19680 and associated test case. + */ + def maybeAddInsertedApplyNote(failedState: TyperState, fun1: Tree)(using Context): Unit = + if fun1.symbol.name == nme.apply && fun1.span.isSynthetic then + fun1 match + case Select(qualifier, _) => + def mapMessage(dia: Diagnostic): Diagnostic = + dia match + case dia: Diagnostic.Error => + dia.msg match + case msg: TypeMismatch => + msg.inTree match + case Some(arg) if tree.args.exists(_.span == arg.span) => + val noteText = + i"""The required type comes from a parameter of the automatically + |inserted `apply` method of `${qualifier.tpe}`, + |which is the type of `${qualifier.show}`.""".stripMargin + Diagnostic.Error(msg.appendExplanation("\n\n" + noteText), dia.pos) + case _ => dia + case msg => dia + case dia => dia + failedState.reporter.mapBufferedMessages(mapMessage) + case _ => () + else () + fun1.tpe match { case err: ErrorType => cpy.Apply(tree)(fun1, proto.typedArgs()).withType(err) case TryDynamicCallType => @@ -1085,35 +1113,10 @@ trait Applications extends Compatibility { } { (failedVal, failedState) => def fail = - insertedApplyNote() + maybeAddInsertedApplyNote(failedState, fun1) failedState.commit() failedVal - /** If the applied function is an automatically inserted `apply` - * method and one of its arguments has a type mismatch , append - * a note to the error message that explains where the required - * type comes from. See #19680 and associated test case. - */ - def insertedApplyNote() = - if fun1.symbol.name == nme.apply && fun1.span.isSynthetic then - fun1 match - case Select(qualifier, _) => - failedState.reporter.mapBufferedMessages: - case dia: Diagnostic.Error => - dia.msg match - case msg: TypeMismatch => - msg.inTree match - case Some(arg) if tree.args.exists(_.span == arg.span) => - val noteText = - i"""The required type comes from a parameter of the automatically - |inserted `apply` method of `${qualifier.tpe}`, - |which is the type of `${qualifier.show}`.""".stripMargin - Diagnostic.Error(msg.appendExplanation("\n\n" + noteText), dia.pos) - case _ => dia - case msg => dia - case dia => dia - case _ => () - // Try once with original prototype and once (if different) with tupled one. // The reason we need to try both is that the decision whether to use tupled // or not was already taken but might have to be revised when an implicit From 4694b3bfeb4bca6d23578c94b4efa222f5d9da4b Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Sun, 14 Apr 2024 18:17:39 +0200 Subject: [PATCH 205/465] Add support for companion in MacroAnnotations --- .../dotty/tools/dotc/CompilationUnit.scala | 4 +- .../dotc/ast/TreeMapWithTrackedStats.scala | 77 ++++++++ .../dotty/tools/dotc/transform/Inlining.scala | 91 ++++++--- .../dotc/transform/MacroAnnotations.scala | 176 ++++++++++-------- .../tools/dotc/transform/PostTyper.scala | 3 +- .../scala/annotation/MacroAnnotation.scala | 35 ++-- .../annot-accessIndirect/Macro_1.scala | 4 +- .../annot-accessIndirect/Macro_2.scala | 6 +- tests/neg-macros/annot-crash/Macro_1.scala | 2 +- tests/neg-macros/annot-empty-result.check | 6 +- .../annot-empty-result/Macro_1.scala | 2 +- .../annot-error-annot/Macro_1.scala | 6 +- .../neg-macros/annot-ill-abort/Macro_1.scala | 2 +- .../Macro_1.scala | 8 +- .../annot-mod-class-add-top-val/Macro_1.scala | 8 +- .../Macro_1.scala | 4 +- .../Macro_1.scala | 4 +- tests/neg-macros/annot-on-type/Macro_1.scala | 4 +- .../annot-result-owner/Macro_1.scala | 6 +- tests/neg-macros/annot-suspend-cycle.check | 12 +- .../annot-suspend-cycle/Macro.scala | 10 +- tests/neg-macros/i18677-a/Macro_1.scala | 24 ++- tests/neg-macros/i18677-b/Macro_1.scala | 24 ++- tests/neg-macros/i18825/Macro_1.scala | 10 +- tests/neg-macros/i19676.check | 5 + tests/neg-macros/i19676/Macro_1.scala | 19 ++ tests/neg-macros/i19676/Test_2.scala | 4 + tests/neg-macros/wrong-owner.check | 2 +- tests/neg-macros/wrong-owner/Macro_1.scala | 10 +- .../Macro_1.scala | 11 ++ .../Test_2.scala | 3 + .../pos-macros/annot-in-object/Macro_1.scala | 6 +- tests/pos-macros/annot-suspend/Macro_1.scala | 4 +- .../annot-then-inline/Macro_1.scala | 8 +- tests/pos-macros/i19537/Macro_1.scala | 3 +- tests/pos-macros/i19539/Macro_1.scala | 3 +- .../macro-annot-with-companion/Macro_1.scala | 12 ++ .../macro-annot-with-companion/Test_2.scala | 15 ++ .../annot-add-global-class/Macro_1.scala | 8 +- .../annot-add-global-object/Macro_1.scala | 8 +- .../annot-add-local-class/Macro_1.scala | 8 +- .../annot-add-local-object/Macro_1.scala | 8 +- .../annot-add-nested-class/Macro_1.scala | 8 +- .../annot-add-nested-object/Macro_1.scala | 8 +- .../annot-annot-order/Macro_1.scala | 10 +- tests/run-macros/annot-bind/Macro_1.scala | 8 +- .../run-macros/annot-changeVal/Macro_1.scala | 6 +- .../annot-concrete-class/Macro_1.scala | 2 +- tests/run-macros/annot-export/Macro_1.scala | 6 +- tests/run-macros/annot-gen2/Macro_1.scala | 8 +- tests/run-macros/annot-gen2/Macro_2.scala | 8 +- tests/run-macros/annot-generate/Macro_1.scala | 4 +- tests/run-macros/annot-generate/Macro_2.scala | 8 +- .../run-macros/annot-macro-main/Macro_1.scala | 10 +- tests/run-macros/annot-memo/Macro_1.scala | 10 +- .../annot-mod-class-add-def/Macro_1.scala | 10 +- .../Macro_1.scala | 10 +- .../Macro_1.scala | 10 +- .../Macro_1.scala | 10 +- .../annot-mod-class-add-val/Macro_1.scala | 10 +- .../annot-mod-class-add-var/Macro_1.scala | 10 +- .../annot-mod-class-data/Macro_1.scala | 10 +- .../annot-mod-class-equals/Macro_1.scala | 10 +- .../annot-mod-class-mod-def/Macro_1.scala | 10 +- .../annot-mod-class-mod-val/Macro_1.scala | 10 +- .../Macro_1.scala | 10 +- .../Macro_1.scala | 10 +- .../Macro_1.scala | 10 +- .../annot-result-order/Macro_1.scala | 4 +- .../run-macros/annot-simple-fib/Macro_1.scala | 8 +- .../run-macros/annot-unrollLast/Macro_1.scala | 2 +- tests/run-macros/i18806/Macro_1.scala | 10 +- tests/run-macros/i19676/Macro_1.scala | 28 +++ tests/run-macros/i19676/Test_2.scala | 36 ++++ tests/run/quotes-add-erased/Macro_1.scala | 2 +- 75 files changed, 631 insertions(+), 350 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala create mode 100644 tests/neg-macros/i19676.check create mode 100644 tests/neg-macros/i19676/Macro_1.scala create mode 100644 tests/neg-macros/i19676/Test_2.scala create mode 100644 tests/pos-macros/annot-dependency-between-modules/Macro_1.scala create mode 100644 tests/pos-macros/annot-dependency-between-modules/Test_2.scala create mode 100644 tests/pos-macros/macro-annot-with-companion/Macro_1.scala create mode 100644 tests/pos-macros/macro-annot-with-companion/Test_2.scala create mode 100644 tests/run-macros/i19676/Macro_1.scala create mode 100644 tests/run-macros/i19676/Test_2.scala diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index adced57d5801..326107a410a9 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -17,7 +17,7 @@ import config.{SourceVersion, Feature} import StdNames.nme import scala.annotation.internal.sharable import scala.util.control.NoStackTrace -import transform.MacroAnnotations +import transform.MacroAnnotations.isMacroAnnotation class CompilationUnit protected (val source: SourceFile, val info: CompilationUnitInfo | Null) { @@ -193,7 +193,7 @@ object CompilationUnit { case _ => case _ => for annot <- tree.symbol.annotations do - if MacroAnnotations.isMacroAnnotation(annot) then + if annot.isMacroAnnotation then ctx.compilationUnit.hasMacroAnnotations = true traverseChildren(tree) } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala new file mode 100644 index 000000000000..b302a2463a4e --- /dev/null +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala @@ -0,0 +1,77 @@ +package dotty.tools.dotc +package ast + +import tpd.* +import core.Contexts.* +import core.Symbols.* +import util.Property + +import scala.collection.mutable + +/** + * It is safe to assume that the companion of a tree is in the same scope. + * Therefore, when expanding MacroAnnotations, we will only keep track of + * the trees in the same scope as the current transformed tree + */ +abstract class TreeMapWithTrackedStats extends TreeMapWithImplicits: + + import TreeMapWithTrackedStats.* + + /** Fetch the corresponding tracked tree for a given symbol */ + protected final def getTracked(sym: Symbol)(using Context): Option[MemberDef] = + for trees <- ctx.property(TrackedTrees) + tree <- trees.get(sym) + yield tree + + /** Update the tracked trees */ + protected final def updateTracked(tree: Tree)(using Context): Tree = + tree match + case tree: MemberDef => + trackedTrees.update(tree.symbol, tree) + tree + case _ => tree + end updateTracked + + /** Process a list of trees and give the priority to trakced trees */ + private final def withUpdatedTrackedTrees(stats: List[Tree])(using Context) = + val trackedTrees = TreeMapWithTrackedStats.trackedTrees + stats.mapConserve: + case tree: MemberDef if trackedTrees.contains(tree.symbol) => + trackedTrees(tree.symbol) + case stat => stat + + override def transform(tree: Tree)(using Context): Tree = + tree match + case PackageDef(_, stats) => + inContext(trackedDefinitionsCtx(stats)): // Step I: Collect and memoize all the definition trees + // Step II: Transform the tree + val pkg@PackageDef(pid, stats) = super.transform(tree): @unchecked + // Step III: Reconcile between the symbols in syms and the tree + cpy.PackageDef(pkg)(pid = pid, stats = withUpdatedTrackedTrees(stats)) + case block: Block => + inContext(trackedDefinitionsCtx(block.stats)): // Step I: Collect all the member definitions in the block + // Step II: Transform the tree + val b@Block(stats, expr) = super.transform(tree): @unchecked + // Step III: Reconcile between the symbols in syms and the tree + cpy.Block(b)(expr = expr, stats = withUpdatedTrackedTrees(stats)) + case TypeDef(_, impl: Template) => + inContext(trackedDefinitionsCtx(impl.body)): // Step I: Collect and memoize all the stats + // Step II: Transform the tree + val newTree@TypeDef(name, impl: Template) = super.transform(tree): @unchecked + // Step III: Reconcile between the symbols in syms and the tree + cpy.TypeDef(newTree)(rhs = cpy.Template(impl)(body = withUpdatedTrackedTrees(impl.body))) + case _ => super.transform(tree) + +end TreeMapWithTrackedStats + +object TreeMapWithTrackedStats: + private val TrackedTrees = new Property.Key[mutable.Map[Symbol, tpd.MemberDef]] + + /** Fetch the tracked trees in the cuurent context */ + private def trackedTrees(using Context): mutable.Map[Symbol, MemberDef] = + ctx.property(TrackedTrees).get + + /** Build a context and track the provided MemberDef trees */ + private def trackedDefinitionsCtx(stats: List[Tree])(using Context): Context = + val treesToTrack = stats.collect { case m: MemberDef => (m.symbol, m) } + ctx.fresh.setProperty(TrackedTrees, mutable.Map(treesToTrack*)) diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 94df114290e4..335d5a38931a 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -1,20 +1,24 @@ package dotty.tools.dotc package transform +import ast.tpd +import ast.Trees.* +import ast.TreeMapWithTrackedStats import core.* import Flags.* +import Decorators.* import Contexts.* import Symbols.* +import Decorators.* +import config.Printers.inlining +import DenotTransformers.IdentityDenotTransformer +import MacroAnnotations.hasMacroAnnotation +import inlines.Inlines +import quoted.* +import staging.StagingLevel +import util.Property -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.Trees.* -import dotty.tools.dotc.quoted.* -import dotty.tools.dotc.inlines.Inlines -import dotty.tools.dotc.ast.TreeMapWithImplicits -import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer -import dotty.tools.dotc.staging.StagingLevel - -import scala.collection.mutable.ListBuffer +import scala.collection.mutable /** Inlines all calls to inline methods that are not in an inline method or a quote */ class Inlining extends MacroTransform, IdentityDenotTransformer { @@ -56,38 +60,21 @@ class Inlining extends MacroTransform, IdentityDenotTransformer { def newTransformer(using Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = - new InliningTreeMap().transform(tree) + InliningTreeMap().transform(tree) } - private class InliningTreeMap extends TreeMapWithImplicits { + private class InliningTreeMap extends TreeMapWithTrackedStats { /** List of top level classes added by macro annotation in a package object. * These are added to the PackageDef that owns this particular package object. */ - private val newTopClasses = MutableSymbolMap[ListBuffer[Tree]]() + private val newTopClasses = MutableSymbolMap[mutable.ListBuffer[Tree]]() override def transform(tree: Tree)(using Context): Tree = { tree match case tree: MemberDef => - if tree.symbol.is(Inline) then tree - else if tree.symbol.is(Param) then super.transform(tree) - else if - !tree.symbol.isPrimaryConstructor - && StagingLevel.level == 0 - && MacroAnnotations.hasMacroAnnotation(tree.symbol) - then - val trees = (new MacroAnnotations(self)).expandAnnotations(tree) - val trees1 = trees.map(super.transform) - - // Find classes added to the top level from a package object - val (topClasses, trees2) = - if ctx.owner.isPackageObject then trees1.partition(_.symbol.owner == ctx.owner.owner) - else (Nil, trees1) - if topClasses.nonEmpty then - newTopClasses.getOrElseUpdate(ctx.owner.owner, new ListBuffer) ++= topClasses - - flatTree(trees2) - else super.transform(tree) + // Fetch the latest tracked tree (It might have already been transformed by its companion) + transformMemberDef(getTracked(tree.symbol).getOrElse(tree)) case _: Typed | _: Block => super.transform(tree) case _: PackageDef => @@ -113,7 +100,49 @@ class Inlining extends MacroTransform, IdentityDenotTransformer { else Inlines.inlineCall(tree1) else super.transform(tree) } + + private def transformMemberDef(tree: MemberDef)(using Context) : Tree = + if tree.symbol.is(Inline) then tree + else if tree.symbol.is(Param) then + super.transform(tree) + else if + !tree.symbol.isPrimaryConstructor + && StagingLevel.level == 0 + && tree.symbol.hasMacroAnnotation + then + // Fetch the companion's tree + val companionSym = + if tree.symbol.is(ModuleClass) then tree.symbol.companionClass + else if tree.symbol.is(ModuleVal) then NoSymbol + else tree.symbol.companionModule.moduleClass + + // Expand and process MacroAnnotations + val companion = getTracked(companionSym) + val (trees, newCompanion) = MacroAnnotations.expandAnnotations(tree, companion) + + // Enter the new symbols & Update the tracked trees + (newCompanion.toList ::: trees).foreach: tree => + MacroAnnotations.enterMissingSymbols(tree, self) + + // Perform inlining on the expansion of the annotations + val trees1 = trees.map(super.transform) + trees1.foreach(updateTracked) + if newCompanion ne companion then + newCompanion.map(super.transform).foreach(updateTracked) + + // Find classes added to the top level from a package object + val (topClasses, trees2) = + if ctx.owner.isPackageObject then trees1.partition(_.symbol.owner == ctx.owner.owner) + else (Nil, trees1) + if topClasses.nonEmpty then + newTopClasses.getOrElseUpdate(ctx.owner.owner, new mutable.ListBuffer) ++= topClasses + flatTree(trees2) + else + updateTracked(super.transform(tree)) + end transformMemberDef + } + } object Inlining: diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala index c83e4d7b7819..6625190661e3 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala @@ -3,19 +3,19 @@ package transform import scala.language.unsafeNulls -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.Trees.* -import dotty.tools.dotc.config.Printers.{macroAnnot => debug} -import dotty.tools.dotc.core.Annotations.* -import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.core.Decorators.* -import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer -import dotty.tools.dotc.core.Flags.* -import dotty.tools.dotc.core.MacroClassLoader -import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.core.Types.* -import dotty.tools.dotc.quoted.* -import dotty.tools.dotc.util.SrcPos +import ast.tpd +import ast.Trees.* +import config.Printers.macroAnnot as debug +import core.Annotations.* +import core.Contexts.* +import core.Decorators.* +import core.DenotTransformers.DenotTransformer +import core.Flags.* +import core.MacroClassLoader +import core.Symbols.* +import core.Types.* +import quoted.* +import util.SrcPos import scala.quoted.runtime.impl.{QuotesImpl, SpliceScope} import scala.quoted.Quotes @@ -23,94 +23,118 @@ import scala.util.control.NonFatal import java.lang.reflect.InvocationTargetException -class MacroAnnotations(phase: IdentityDenotTransformer): +object MacroAnnotations: import tpd.* - import MacroAnnotations.* + + extension (annot: Annotation) + /** Is this an annotation that implements `scala.annation.MacroAnnotation` */ + def isMacroAnnotation(using Context): Boolean = + annot.tree.symbol.maybeOwner.derivesFrom(defn.MacroAnnotationClass) + end extension + + extension (sym: Symbol) + /** Is this symbol annotated with an annotation that implements `scala.annation.MacroAnnotation` */ + def hasMacroAnnotation(using Context): Boolean = + sym.getAnnotation(defn.MacroAnnotationClass).isDefined + end extension /** Expands every macro annotation that is on this tree. * Returns a list with transformed definition and any added definitions. */ - def expandAnnotations(tree: MemberDef)(using Context): List[DefTree] = - if !hasMacroAnnotation(tree.symbol) then - List(tree) - else if tree.symbol.is(Module) && !tree.symbol.isClass then - // only class is transformed - List(tree) + def expandAnnotations(tree: MemberDef, companion: Option[MemberDef])(using Context): (List[MemberDef], Option[MemberDef]) = + if !tree.symbol.hasMacroAnnotation then + (List(tree), companion) + else if tree.symbol.is(ModuleVal) then + // only module classes are transformed + (List(tree), companion) else if tree.symbol.isType && !tree.symbol.isClass then report.error("macro annotations are not supported on type", tree) - List(tree) + (List(tree), companion) else debug.println(i"Expanding macro annotations of:\n$tree") - val macroInterpreter = new Interpreter(tree.srcPos, MacroClassLoader.fromContext) - val allTrees = List.newBuilder[DefTree] - var insertedAfter: List[List[DefTree]] = Nil + val prefixedTrees = List.newBuilder[MemberDef] // Apply all macro annotation to `tree` and collect new definitions in order - val transformedTree: DefTree = tree.symbol.annotations.foldLeft(tree) { (tree, annot) => - if isMacroAnnotation(annot) then - debug.println(i"Expanding macro annotation: ${annot}") - - // Interpret call to `new myAnnot(..).transform(using )()` - val transformedTrees = callMacro(macroInterpreter, tree, annot) - transformedTrees.span(_.symbol != tree.symbol) match - case (prefixed, newTree :: suffixed) => - allTrees ++= prefixed - insertedAfter = suffixed :: insertedAfter - for prefixedTree <- prefixed do - checkMacroDef(prefixedTree, tree, annot) - for suffixedTree <- suffixed do - checkMacroDef(suffixedTree, tree, annot) - TreeChecker.checkMacroGeneratedTree(tree, newTree) - newTree - case (Nil, Nil) => - report.error(i"Unexpected `Nil` returned by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) - tree - case (_, Nil) => - report.error(i"Transformed tree for ${tree} was not return by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) - tree - else - tree - } - - allTrees += transformedTree - insertedAfter.foreach(allTrees.++=) - - val result = allTrees.result() - for tree <- result do enterMissingSymbols(tree) + val unprocessed = (tree, companion, List.empty[MemberDef]) + val (transformedTree, transformedCompanion, suffixed) = + tree.symbol.annotations.foldLeft(unprocessed): (lastResult, annot) => + if annot.isMacroAnnotation then + val (tree, companion, suffixed) = lastResult + debug.println(i"Expanding macro annotation: ${annot}") + // Interpret call to `new myAnnot(..).transform(using )(, )` + val (transformedTrees, transformedCompanion) = callMacro(macroInterpreter, tree, companion, annot) + // Establish the trees order and check the integrity of the trees + transformedTrees.span(_.symbol != tree.symbol) match + case (newPrefixed, newTree :: newSuffixed) => + // Check the integrity of the generated trees + for prefixedTree <- newPrefixed do checkMacroDef(prefixedTree, tree, annot) + for suffixedTree <- newSuffixed do checkMacroDef(suffixedTree, tree, annot) + for tcompanion <- transformedCompanion do TreeChecker.checkMacroGeneratedTree(companion.get, tcompanion) + TreeChecker.checkMacroGeneratedTree(tree, newTree) + prefixedTrees ++= newPrefixed + (newTree, transformedCompanion, newSuffixed ::: suffixed) + case (_, Nil) => + report.error(i"Transformed tree for ${tree.symbol} was not return by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) + lastResult + else + lastResult + end val + + // Complete the list of transformed/generated definitions + val result = prefixedTrees.result() ::: transformedTree :: suffixed debug.println(result.map(_.show).mkString("expanded to:\n", "\n", "")) - result + (result, transformedCompanion) + end expandAnnotations - /** Interpret the code `new annot(..).transform(using )()` */ - private def callMacro(interpreter: Interpreter, tree: MemberDef, annot: Annotation)(using Context): List[MemberDef] = - // TODO: Remove when scala.annaotaion.MacroAnnotation is no longer experimental + /** Interpret the code `new annot(..).transform(using )(, )` */ + private def callMacro(interpreter: Interpreter, tree: MemberDef, companion: Option[MemberDef], annot: Annotation) + (using Context): (List[MemberDef], Option[MemberDef]) = + // TODO: Remove when scala.annotation.MacroAnnotation is no longer experimental import scala.reflect.Selectable.reflectiveSelectable type MacroAnnotation = { - def transform(using Quotes)(tree: Object/*Erased type of quotes.refelct.Definition*/): List[MemberDef /*quotes.refelct.Definition known to be MemberDef in QuotesImpl*/] + def transform(using Quotes)( + tree: Object/*Erased type of quotes.reflect.Definition*/, + companion: Option[Object/*Erased type of quotes.reflect.Definition*/] + ): List[MemberDef /*quotes.refelct.Definition known to be MemberDef in QuotesImpl*/] } // Interpret macro annotation instantiation `new myAnnot(..)` + // TODO: Make this error handling stronger (no error handling at the moment) val annotInstance = interpreter.interpret[MacroAnnotation](annot.tree).get + // TODO: Remove when scala.annaotaion.MacroAnnotation is no longer experimental assert(annotInstance.getClass.getClassLoader.loadClass("scala.annotation.MacroAnnotation").isInstance(annotInstance)) val quotes = QuotesImpl()(using SpliceScope.contextWithNewSpliceScope(tree.symbol.sourcePos)(using MacroExpansion.context(tree)).withOwner(tree.symbol.owner)) - try annotInstance.transform(using quotes)(tree.asInstanceOf[quotes.reflect.Definition]) + try + val result = annotInstance.transform(using quotes)(tree, companion) + // Process the result based on if the companion was present or not + // The idea is that we try to find a transformation of the companion if we do provide one + companion.map(_.symbol) match + case None => (result, companion) + case Some(companionSym) => result.partition(_.symbol == companionSym) match + case (Nil, result) => (result, companion) // companion didn't change + case (newCompanion :: Nil, result) => (result, Some(newCompanion)) + case (_, result) => + report.error(i"Transformed companion for ${tree.symbol} was returned more than once by `(${annot.tree}).transform(..)` during macro expansion", annot.tree) + (result, companion) + catch - // TODO: Replace this case when scala.annaotaion.MacroAnnotation is no longer experimental and reflectiveSelectable is not used + // TODO: Replace this case when scala.annotation.MacroAnnotation is no longer experimental and reflectiveSelectable is not used // Replace this case with the nested cases. case ex0: InvocationTargetException => ex0.getCause match case ex: scala.quoted.runtime.StopMacroExpansion => if !ctx.reporter.hasErrors then report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", annot.tree) - List(tree) + (List(tree), companion) case Interpreter.MissingClassValidInCurrentRun(sym, origin) => Interpreter.suspendOnMissing(sym, origin, annot.tree) case NonFatal(ex) => - val stack0 = ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.MacroAnnotations") + val stack0 = ex.getStackTrace.takeWhile(_.getClassName != this.getClass().getName()) val stack = stack0.take(1 + stack0.lastIndexWhere(_.getMethodName == "transform")) val msg = em"""Failed to evaluate macro. @@ -118,9 +142,10 @@ class MacroAnnotations(phase: IdentityDenotTransformer): | ${stack.mkString("\n ")} |""" report.error(msg, annot.tree) - List(tree) + (List(tree), companion) case _ => throw ex0 + end callMacro /** Check that this tree can be added by the macro annotation */ private def checkMacroDef(newTree: DefTree, annotatedTree: Tree, annot: Annotation)(using Context) = @@ -133,14 +158,13 @@ class MacroAnnotations(phase: IdentityDenotTransformer): report.error(i"macro annotation $annot added $sym with an inconsistent owner. Expected it to be owned by ${annotated.owner} but was owned by ${sym.owner}.", annot.tree) else if annotated.isClass && annotated.owner.is(Package) /*&& !sym.isClass*/ then report.error(i"macro annotation can not add top-level ${sym.showKind}. $annot tried to add $sym.", annot.tree) + end checkMacroDef - /** - * Enter the symbols generated by MacroAnnotations - */ - private def enterMissingSymbols(tree: DefTree)(using Context) = new TreeTraverser { + /** Enter the symbols generated by MacroAnnotations */ + def enterMissingSymbols(tree: MemberDef, phase: DenotTransformer)(using Context) = new TreeTraverser { def traverse(tree: tpd.Tree)(using Context): Unit = tree match case tdef @ TypeDef(_, template: Template) => - val isSymbolInDecls = tdef.symbol.asClass.info.decls.toList.toSet + val isSymbolInDecls = atNextPhase(tdef.symbol.asClass.info.decls.toList.toSet) for tree <- template.body if tree.isDef do if tree.symbol.owner != tdef.symbol then report.error(em"Macro added a definition with the wrong owner - ${tree.symbol.owner} - ${tdef.symbol} in ${tree.source}", tree.srcPos) @@ -150,12 +174,4 @@ class MacroAnnotations(phase: IdentityDenotTransformer): case _ => traverseChildren(tree) }.traverse(tree) -object MacroAnnotations: - - /** Is this an annotation that implements `scala.annation.MacroAnnotation` */ - def isMacroAnnotation(annot: Annotation)(using Context): Boolean = - annot.tree.symbol.maybeOwner.derivesFrom(defn.MacroAnnotationClass) - - /** Is this symbol annotated with an annotation that implements `scala.annation.MacroAnnotation` */ - def hasMacroAnnotation(sym: Symbol)(using Context): Boolean = - sym.getAnnotation(defn.MacroAnnotationClass).isDefined +end MacroAnnotations diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 30c994a1777c..e809c0fad463 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -20,6 +20,7 @@ import util.SrcPos import reporting.* import NameKinds.WildcardParamName import cc.* +import dotty.tools.dotc.transform.MacroAnnotations.hasMacroAnnotation object PostTyper { val name: String = "posttyper" @@ -560,7 +561,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => /** Check if the definition has macro annotation and sets `compilationUnit.hasMacroAnnotations` if needed. */ private def registerIfHasMacroAnnotations(tree: DefTree)(using Context) = - if !Inlines.inInlineMethod && MacroAnnotations.hasMacroAnnotation(tree.symbol) then + if !Inlines.inInlineMethod && tree.symbol.hasMacroAnnotation then ctx.compilationUnit.hasMacroAnnotations = true /** Check macro annotations implementations */ diff --git a/library/src/scala/annotation/MacroAnnotation.scala b/library/src/scala/annotation/MacroAnnotation.scala index 02e9470f06fd..ffaa0cf88464 100644 --- a/library/src/scala/annotation/MacroAnnotation.scala +++ b/library/src/scala/annotation/MacroAnnotation.scala @@ -45,9 +45,12 @@ trait MacroAnnotation extends StaticAnnotation: * import scala.collection.concurrent * * class memoize extends MacroAnnotation: - * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + * def transform(using Quotes)( + * definition: quotes.reflect.Definition, + * companion: Option[quotes.reflect.Definition] + * ): List[quotes.reflect.Definition] = * import quotes.reflect.* - * tree match + * definition match * case DefDef(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(rhsTree)) => * (param.tpt.tpe.asType, tpt.tpe.asType) match * case ('[t], '[u]) => @@ -58,16 +61,17 @@ trait MacroAnnotation extends StaticAnnotation: * '{ concurrent.TrieMap.empty[t, u] }.asTerm * val cacheVal = ValDef(cacheSymbol, Some(cacheRhs)) * val newRhs = - * given Quotes = tree.symbol.asQuotes + * given Quotes = definition.symbol.asQuotes * val cacheRefExpr = Ref(cacheSymbol).asExprOf[concurrent.Map[t, u]] * val paramRefExpr = Ref(param.symbol).asExprOf[t] * val rhsExpr = rhsTree.asExprOf[u] * '{ $cacheRefExpr.getOrElseUpdate($paramRefExpr, $rhsExpr) }.asTerm - * val newTree = DefDef.copy(tree)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) + * val newTree = DefDef.copy(definition)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) * List(cacheVal, newTree) * case _ => * report.error("Annotation only supported on `def` with a single argument are supported") - * List(tree) + * List(definition) + * end transform * ``` * with this macro annotation a user can write * ```scala @@ -102,11 +106,14 @@ trait MacroAnnotation extends StaticAnnotation: * * @experimental * class equals extends MacroAnnotation: - * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + * def transform(using Quotes)( + * definition: quotes.reflect.Definition, + * companion: Option[quotes.reflect.Definition] + * ): List[quotes.reflect.Definition] = * import quotes.reflect.* - * tree match + * definition match * case ClassDef(className, ctr, parents, self, body) => - * val cls = tree.symbol + * val cls = definition.symbol * * val constructorParameters = ctr.paramss.collect { case clause: TermParamClause => clause } * if constructorParameters.size != 1 || constructorParameters.head.params.isEmpty then @@ -139,10 +146,11 @@ trait MacroAnnotation extends StaticAnnotation: * val hashCodeOverrideDef = DefDef(hashCodeOverrideSym, _ => Some(Ref(hashSym))) * * val newBody = equalsOverrideDef :: hashVal :: hashCodeOverrideDef :: body - * List(ClassDef.copy(tree)(className, ctr, parents, self, newBody)) + * List(ClassDef.copy(definition)(className, ctr, parents, self, newBody)) * case _ => * report.error("Annotation only supports `class`") - * List(tree) + * List(definition) + * end transform * * private def equalsExpr[T: Type](that: Expr[Any], thisFields: List[Expr[Any]])(using Quotes): Expr[Boolean] = * '{ @@ -204,9 +212,10 @@ trait MacroAnnotation extends StaticAnnotation: * override def hashCode(): Int = hash$macro$1 * ``` * - * @param Quotes Implicit instance of Quotes used for tree reflection - * @param tree Tree that will be transformed + * @param Quotes Implicit instance of Quotes used for tree reflection + * @param definition Tree that will be transformed + * @param companion Tree for the companion class or module if the definition is respectively a module or a class * * @syntax markdown */ - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] diff --git a/tests/neg-macros/annot-accessIndirect/Macro_1.scala b/tests/neg-macros/annot-accessIndirect/Macro_1.scala index 8679edcfc0c3..7f4136e10652 100644 --- a/tests/neg-macros/annot-accessIndirect/Macro_1.scala +++ b/tests/neg-macros/annot-accessIndirect/Macro_1.scala @@ -3,9 +3,9 @@ import scala.quoted._ @experimental class hello extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val helloSymbol = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("hello"), TypeRepr.of[String], Flags.EmptyFlags, Symbol.noSymbol) val helloVal = ValDef(helloSymbol, Some(Literal(StringConstant("Hello, World!")))) - List(helloVal, tree) + List(helloVal, definition) } diff --git a/tests/neg-macros/annot-accessIndirect/Macro_2.scala b/tests/neg-macros/annot-accessIndirect/Macro_2.scala index d069175ce166..5fa2ba1aa2bf 100644 --- a/tests/neg-macros/annot-accessIndirect/Macro_2.scala +++ b/tests/neg-macros/annot-accessIndirect/Macro_2.scala @@ -3,16 +3,16 @@ import scala.quoted._ @experimental class foo extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val s = '{@hello def foo1(x: Int): Int = x + 1;()}.asTerm val fooDef = s.asInstanceOf[Inlined].body.asInstanceOf[Block].statements.head.asInstanceOf[DefDef] val hello = Ref(Symbol.spliceOwner.declaredFields("hello").head).asExprOf[String] // error - tree match + definition match case DefDef(name, params, tpt, Some(t)) => val rhs = '{ ${t.asExprOf[String]} + $hello }.asTerm - val newDef = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newDef = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(fooDef, newDef) } diff --git a/tests/neg-macros/annot-crash/Macro_1.scala b/tests/neg-macros/annot-crash/Macro_1.scala index f3d5b3f602f8..06fb08062181 100644 --- a/tests/neg-macros/annot-crash/Macro_1.scala +++ b/tests/neg-macros/annot-crash/Macro_1.scala @@ -3,6 +3,6 @@ import scala.quoted._ @experimental class crash extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = ??? } diff --git a/tests/neg-macros/annot-empty-result.check b/tests/neg-macros/annot-empty-result.check index 6d43c19664cb..5a0e637837e4 100644 --- a/tests/neg-macros/annot-empty-result.check +++ b/tests/neg-macros/annot-empty-result.check @@ -2,12 +2,12 @@ -- Error: tests/neg-macros/annot-empty-result/Test_2.scala:5:2 --------------------------------------------------------- 5 | @nilAnnot // error | ^^^^^^^^^ - | Unexpected `Nil` returned by `(new nilAnnot()).transform(..)` during macro expansion + | Transformed tree for method f2 was not return by `(new nilAnnot()).transform(..)` during macro expansion -- Error: tests/neg-macros/annot-empty-result/Test_2.scala:9:4 --------------------------------------------------------- 9 | @nilAnnot // error | ^^^^^^^^^ - | Unexpected `Nil` returned by `(new nilAnnot()).transform(..)` during macro expansion + | Transformed tree for method f3 was not return by `(new nilAnnot()).transform(..)` during macro expansion -- Error: tests/neg-macros/annot-empty-result/Test_2.scala:1:0 --------------------------------------------------------- 1 |@nilAnnot // error |^^^^^^^^^ - |Unexpected `Nil` returned by `(new nilAnnot()).transform(..)` during macro expansion + |Transformed tree for method f1 was not return by `(new nilAnnot()).transform(..)` during macro expansion diff --git a/tests/neg-macros/annot-empty-result/Macro_1.scala b/tests/neg-macros/annot-empty-result/Macro_1.scala index ff3be61c05d2..9b22027ce929 100644 --- a/tests/neg-macros/annot-empty-result/Macro_1.scala +++ b/tests/neg-macros/annot-empty-result/Macro_1.scala @@ -3,6 +3,6 @@ import scala.quoted._ @experimental class nilAnnot extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = Nil } diff --git a/tests/neg-macros/annot-error-annot/Macro_1.scala b/tests/neg-macros/annot-error-annot/Macro_1.scala index d54b69903e02..6597a11be7d2 100644 --- a/tests/neg-macros/annot-error-annot/Macro_1.scala +++ b/tests/neg-macros/annot-error-annot/Macro_1.scala @@ -3,7 +3,7 @@ import scala.quoted._ @experimental class error extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - quotes.reflect.report.error("MACRO ERROR", tree.pos) - List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + quotes.reflect.report.error("MACRO ERROR", definition.pos) + List(definition) } diff --git a/tests/neg-macros/annot-ill-abort/Macro_1.scala b/tests/neg-macros/annot-ill-abort/Macro_1.scala index 446ce0a5331b..4689ffbe2f2c 100644 --- a/tests/neg-macros/annot-ill-abort/Macro_1.scala +++ b/tests/neg-macros/annot-ill-abort/Macro_1.scala @@ -3,6 +3,6 @@ import scala.quoted._ @experimental class crash extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = throw new scala.quoted.runtime.StopMacroExpansion } diff --git a/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala b/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala index b5c49695ad2a..7dc8cb2c4479 100644 --- a/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala +++ b/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala @@ -4,14 +4,14 @@ import scala.collection.mutable @experimental class addTopLevelMethod extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => val methType = MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Int]) val methSym = Symbol.newMethod(Symbol.spliceOwner, Symbol.freshName("toLevelMethod"), methType, Flags.EmptyFlags, Symbol.noSymbol) val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) - List(methDef, tree) + List(methDef, definition) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala b/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala index c6f21e181879..12443d329108 100644 --- a/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala +++ b/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala @@ -4,13 +4,13 @@ import scala.collection.mutable @experimental class addTopLevelVal extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => val valSym = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("toLevelVal"), TypeRepr.of[Int], Flags.EmptyFlags, Symbol.noSymbol) val valDef = ValDef(valSym, Some(Literal(IntConstant(1)))) - List(valDef, tree) + List(valDef, definition) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala b/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala index 45679b65c03b..8609af376ad7 100644 --- a/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala +++ b/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala @@ -5,9 +5,9 @@ import scala.collection.mutable @experimental // Assumes annotation is on top level def or val class addTopLevelMethodOutsidePackageObject extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val methType = MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Int]) val methSym = Symbol.newMethod(Symbol.spliceOwner.owner, Symbol.freshName("toLevelMethod"), methType, Flags.EmptyFlags, Symbol.noSymbol) val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) - List(methDef, tree) + List(methDef, definition) diff --git a/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala b/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala index c6c4c32afcb8..a9ae0efd76b4 100644 --- a/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala +++ b/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala @@ -5,8 +5,8 @@ import scala.collection.mutable @experimental // Assumes annotation is on top level def or val class addTopLevelValOutsidePackageObject extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val valSym = Symbol.newVal(Symbol.spliceOwner.owner, Symbol.freshName("toLevelVal"), TypeRepr.of[Int], Flags.EmptyFlags, Symbol.noSymbol) val valDef = ValDef(valSym, Some(Literal(IntConstant(1)))) - List(valDef, tree) + List(valDef, definition) diff --git a/tests/neg-macros/annot-on-type/Macro_1.scala b/tests/neg-macros/annot-on-type/Macro_1.scala index 7468c5a200a6..631a5bcc201d 100644 --- a/tests/neg-macros/annot-on-type/Macro_1.scala +++ b/tests/neg-macros/annot-on-type/Macro_1.scala @@ -3,6 +3,6 @@ import scala.quoted._ @experimental class voidAnnot extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) } diff --git a/tests/neg-macros/annot-result-owner/Macro_1.scala b/tests/neg-macros/annot-result-owner/Macro_1.scala index 34f7541f726b..af3d8dbb6656 100644 --- a/tests/neg-macros/annot-result-owner/Macro_1.scala +++ b/tests/neg-macros/annot-result-owner/Macro_1.scala @@ -3,9 +3,9 @@ import scala.quoted._ @experimental class insertVal extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ // Use of wrong owner - val valSym = Symbol.newVal(tree.symbol, Symbol.freshName("definitionWithWrongOwner"), TypeRepr.of[Unit], Flags.Private, Symbol.noSymbol) + val valSym = Symbol.newVal(definition.symbol, Symbol.freshName("definitionWithWrongOwner"), TypeRepr.of[Unit], Flags.Private, Symbol.noSymbol) val valDef = ValDef(valSym, Some('{}.asTerm)) - List(valDef, tree) + List(valDef, definition) diff --git a/tests/neg-macros/annot-suspend-cycle.check b/tests/neg-macros/annot-suspend-cycle.check index 437398f1d668..7b87943be2af 100644 --- a/tests/neg-macros/annot-suspend-cycle.check +++ b/tests/neg-macros/annot-suspend-cycle.check @@ -1,9 +1,9 @@ --- [E129] Potential Issue Warning: tests/neg-macros/annot-suspend-cycle/Macro.scala:7:4 -------------------------------- -7 | new Foo - | ^^^^^^^ - | A pure expression does nothing in statement position - | - | longer explanation available when compiling with `-explain` +-- [E129] Potential Issue Warning: tests/neg-macros/annot-suspend-cycle/Macro.scala:10:6 ------------------------------- +10 | new Foo + | ^^^^^^^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` Cyclic macro dependencies in tests/neg-macros/annot-suspend-cycle/Test.scala. Compilation stopped since no further progress can be made. diff --git a/tests/neg-macros/annot-suspend-cycle/Macro.scala b/tests/neg-macros/annot-suspend-cycle/Macro.scala index 4143e2c32062..9106c3e2a37e 100644 --- a/tests/neg-macros/annot-suspend-cycle/Macro.scala +++ b/tests/neg-macros/annot-suspend-cycle/Macro.scala @@ -3,7 +3,11 @@ import scala.quoted._ @experimental class cycle extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - new Foo - List(tree) + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ): List[quotes.reflect.Definition] = + new Foo + List(definition) + end transform } diff --git a/tests/neg-macros/i18677-a/Macro_1.scala b/tests/neg-macros/i18677-a/Macro_1.scala index c3df616ed4e6..ecd90869e081 100644 --- a/tests/neg-macros/i18677-a/Macro_1.scala +++ b/tests/neg-macros/i18677-a/Macro_1.scala @@ -6,13 +6,17 @@ import quoted.* trait Foo class extendFoo extends MacroAnnotation : - override def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - import quotes.reflect.* - tree match - case ClassDef(name, ctr, p, self, body) => - val parents = List(TypeTree.of[Foo]) - val newTree = ClassDef.copy(tree)(name, ctr, parents, self, body) - newTree :: Nil - case _ => - report.error("@extendFoo can only annotate class definitions") - tree :: Nil \ No newline at end of file + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ): List[quotes.reflect.Definition] = + import quotes.reflect.* + definition match + case ClassDef(name, ctr, p, self, body) => + val parents = List(TypeTree.of[Foo]) + val newTree = ClassDef.copy(definition)(name, ctr, parents, self, body) + newTree :: Nil + case _ => + report.error("@extendFoo can only annotate class definitions") + definition :: Nil + end transform diff --git a/tests/neg-macros/i18677-b/Macro_1.scala b/tests/neg-macros/i18677-b/Macro_1.scala index 9e1b9be5e696..9895b15942b9 100644 --- a/tests/neg-macros/i18677-b/Macro_1.scala +++ b/tests/neg-macros/i18677-b/Macro_1.scala @@ -6,13 +6,17 @@ import quoted.* class Foo class extendFoo extends MacroAnnotation : - override def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - import quotes.reflect.* - tree match - case ClassDef(name, ctr, p, self, body) => - val parents = List(TypeTree.of[Foo]) - val newTree = ClassDef.copy(tree)(name, ctr, parents, self, body) - newTree :: Nil - case _ => - report.error("@extendFoo can only annotate class definitions") - tree :: Nil \ No newline at end of file + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ): List[quotes.reflect.Definition] = + import quotes.reflect.* + definition match + case ClassDef(name, ctr, p, self, body) => + val parents = List(TypeTree.of[Foo]) + val newTree = ClassDef.copy(definition)(name, ctr, parents, self, body) + newTree :: Nil + case _ => + report.error("@extendFoo can only annotate class definitions") + definition :: Nil + end transform diff --git a/tests/neg-macros/i18825/Macro_1.scala b/tests/neg-macros/i18825/Macro_1.scala index c099954f3858..adeb320c1403 100644 --- a/tests/neg-macros/i18825/Macro_1.scala +++ b/tests/neg-macros/i18825/Macro_1.scala @@ -4,16 +4,16 @@ import scala.quoted.* @experimental class toString extends MacroAnnotation : - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant("Hello from macro")))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, toStringDef :: body) List(newClassDef) case _ => report.error("@toString can only be annotated on class definitions") - tree :: Nil + definition :: Nil diff --git a/tests/neg-macros/i19676.check b/tests/neg-macros/i19676.check new file mode 100644 index 000000000000..ca6b89a55087 --- /dev/null +++ b/tests/neg-macros/i19676.check @@ -0,0 +1,5 @@ + +-- Error: tests/neg-macros/i19676/Test_2.scala:3:0 --------------------------------------------------------------------- +3 |@buggy // error + |^^^^^^ + |Transformed companion for class Foo was returned more than once by `(new buggy()).transform(..)` during macro expansion diff --git a/tests/neg-macros/i19676/Macro_1.scala b/tests/neg-macros/i19676/Macro_1.scala new file mode 100644 index 000000000000..a5598c448a1c --- /dev/null +++ b/tests/neg-macros/i19676/Macro_1.scala @@ -0,0 +1,19 @@ +//> using options -experimental -Yno-experimental + +import scala.annotation.MacroAnnotation +import scala.quoted.* + +class buggy extends MacroAnnotation: + + def transform(using Quotes) + (definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + + import quotes.reflect.* + companion match + case Some(companion) => + List(definition, companion, companion) + case None => + report.error("The goal of this test is to return the companion more than once to trigger a compilation error") + List(definition) + end transform \ No newline at end of file diff --git a/tests/neg-macros/i19676/Test_2.scala b/tests/neg-macros/i19676/Test_2.scala new file mode 100644 index 000000000000..0142d263f103 --- /dev/null +++ b/tests/neg-macros/i19676/Test_2.scala @@ -0,0 +1,4 @@ +//> using options -experimental -Yno-experimental + +@buggy // error +case class Foo() \ No newline at end of file diff --git a/tests/neg-macros/wrong-owner.check b/tests/neg-macros/wrong-owner.check index ca8751d0fe1c..26316d3fc687 100644 --- a/tests/neg-macros/wrong-owner.check +++ b/tests/neg-macros/wrong-owner.check @@ -17,6 +17,6 @@ | |Error: |assertion failed: bad owner; method toString has owner class String, expected was class Foo - |owner chain = method toString, class String, package java.lang, package java, package , ctxOwners = class Foo, class Foo, package , package , package , package , package , package , package , package , package , , , , , + |owner chain = method toString, class String, package java.lang, package java, package , ctxOwners = class Foo, class Foo, package , package , package , package , package , package , package , package , package , package , , , , , | |stacktrace available when compiling with `-Ydebug` diff --git a/tests/neg-macros/wrong-owner/Macro_1.scala b/tests/neg-macros/wrong-owner/Macro_1.scala index 85127b701f81..2e101f849802 100644 --- a/tests/neg-macros/wrong-owner/Macro_1.scala +++ b/tests/neg-macros/wrong-owner/Macro_1.scala @@ -4,16 +4,16 @@ import scala.quoted.* @experimental class wrongOwner extends MacroAnnotation : - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") val toStringOverrideSym = Symbol.newMethod(Symbol.classSymbol("java.lang.String"), "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant("Hello from macro")))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, toStringDef :: body) List(newClassDef) case _ => report.error("@toString can only be annotated on class definitions") - tree :: Nil + definition :: Nil diff --git a/tests/pos-macros/annot-dependency-between-modules/Macro_1.scala b/tests/pos-macros/annot-dependency-between-modules/Macro_1.scala new file mode 100644 index 000000000000..94a5d44c0777 --- /dev/null +++ b/tests/pos-macros/annot-dependency-between-modules/Macro_1.scala @@ -0,0 +1,11 @@ +import scala.annotation.* +import scala.quoted.* + +@experimental +class void extends MacroAnnotation: + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ) : List[quotes.reflect.Definition] = + definition +: companion.toList + end transform diff --git a/tests/pos-macros/annot-dependency-between-modules/Test_2.scala b/tests/pos-macros/annot-dependency-between-modules/Test_2.scala new file mode 100644 index 000000000000..f809330af52d --- /dev/null +++ b/tests/pos-macros/annot-dependency-between-modules/Test_2.scala @@ -0,0 +1,3 @@ + +@void @void +class Foo \ No newline at end of file diff --git a/tests/pos-macros/annot-in-object/Macro_1.scala b/tests/pos-macros/annot-in-object/Macro_1.scala index 143bd46b8ecc..2086f9ec4cb1 100644 --- a/tests/pos-macros/annot-in-object/Macro_1.scala +++ b/tests/pos-macros/annot-in-object/Macro_1.scala @@ -6,9 +6,11 @@ import scala.quoted._ object Foo: @experimental class void extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) object Bar: @experimental class void extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) diff --git a/tests/pos-macros/annot-suspend/Macro_1.scala b/tests/pos-macros/annot-suspend/Macro_1.scala index 3c391a1a041f..4ae96186b777 100644 --- a/tests/pos-macros/annot-suspend/Macro_1.scala +++ b/tests/pos-macros/annot-suspend/Macro_1.scala @@ -5,5 +5,5 @@ import scala.quoted._ @experimental class void extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) diff --git a/tests/pos-macros/annot-then-inline/Macro_1.scala b/tests/pos-macros/annot-then-inline/Macro_1.scala index 99fece18299a..f3226e24d4da 100644 --- a/tests/pos-macros/annot-then-inline/Macro_1.scala +++ b/tests/pos-macros/annot-then-inline/Macro_1.scala @@ -5,14 +5,14 @@ import scala.quoted._ @experimental class useInlinedIdentity extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case DefDef(name, params, tpt, Some(rhs)) => val newRhs = - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes '{ inlinedIdentity(${rhs.asExpr}) }.asTerm - List(DefDef.copy(tree)(name, params, tpt, Some(newRhs))) + List(DefDef.copy(definition)(name, params, tpt, Some(newRhs))) } inline def inlinedIdentity(x: Any): x.type = x diff --git a/tests/pos-macros/i19537/Macro_1.scala b/tests/pos-macros/i19537/Macro_1.scala index 932994657d24..a44c212599d3 100644 --- a/tests/pos-macros/i19537/Macro_1.scala +++ b/tests/pos-macros/i19537/Macro_1.scala @@ -3,4 +3,5 @@ import scala.quoted.* @experimental class annotation extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition) = List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) diff --git a/tests/pos-macros/i19539/Macro_1.scala b/tests/pos-macros/i19539/Macro_1.scala index 932994657d24..a44c212599d3 100644 --- a/tests/pos-macros/i19539/Macro_1.scala +++ b/tests/pos-macros/i19539/Macro_1.scala @@ -3,4 +3,5 @@ import scala.quoted.* @experimental class annotation extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition) = List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) diff --git a/tests/pos-macros/macro-annot-with-companion/Macro_1.scala b/tests/pos-macros/macro-annot-with-companion/Macro_1.scala new file mode 100644 index 000000000000..386f5fbd09d5 --- /dev/null +++ b/tests/pos-macros/macro-annot-with-companion/Macro_1.scala @@ -0,0 +1,12 @@ +import scala.annotation.MacroAnnotation + +import scala.quoted.* + +class transform extends MacroAnnotation: + override def transform(using Quotes)( + tree: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ) : List[quotes.reflect.Definition] = { + import quotes.reflect.* + List(tree) + } diff --git a/tests/pos-macros/macro-annot-with-companion/Test_2.scala b/tests/pos-macros/macro-annot-with-companion/Test_2.scala new file mode 100644 index 000000000000..ddf73334389b --- /dev/null +++ b/tests/pos-macros/macro-annot-with-companion/Test_2.scala @@ -0,0 +1,15 @@ + +@transform +class Foo + +@transform +class Bar + +@transform +object Foo + +@transform +class A + +@transform +object B \ No newline at end of file diff --git a/tests/run-macros/annot-add-global-class/Macro_1.scala b/tests/run-macros/annot-add-global-class/Macro_1.scala index 6ac77913e3ab..a6ebf1c64d9d 100644 --- a/tests/run-macros/annot-add-global-class/Macro_1.scala +++ b/tests/run-macros/annot-add-global-class/Macro_1.scala @@ -8,9 +8,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -25,8 +25,8 @@ class addClass extends MacroAnnotation: val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) List(clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-global-object/Macro_1.scala b/tests/run-macros/annot-add-global-object/Macro_1.scala index f7c901a49aa5..c2fe5bbfe9e5 100644 --- a/tests/run-macros/annot-add-global-object/Macro_1.scala +++ b/tests/run-macros/annot-add-global-object/Macro_1.scala @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -23,8 +23,8 @@ class addClass extends MacroAnnotation: val modDef = ClassDef.module(mod, parents, body = List(runDef)) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) modDef.toList ::: newDef :: Nil case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-local-class/Macro_1.scala b/tests/run-macros/annot-add-local-class/Macro_1.scala index 57a2d543ffbc..07f6d98e7c99 100644 --- a/tests/run-macros/annot-add-local-class/Macro_1.scala +++ b/tests/run-macros/annot-add-local-class/Macro_1.scala @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -22,8 +22,8 @@ class addClass extends MacroAnnotation: val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) List(clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-local-object/Macro_1.scala b/tests/run-macros/annot-add-local-object/Macro_1.scala index 6f6e11e7361c..18534730e16b 100644 --- a/tests/run-macros/annot-add-local-object/Macro_1.scala +++ b/tests/run-macros/annot-add-local-object/Macro_1.scala @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -23,8 +23,8 @@ class addClass extends MacroAnnotation: val (modVal, clsDef) = ClassDef.module(mod, parents, body = List(runDef)) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) List(modVal, clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-nested-class/Macro_1.scala b/tests/run-macros/annot-add-nested-class/Macro_1.scala index e13e3841501a..631e3044a39c 100644 --- a/tests/run-macros/annot-add-nested-class/Macro_1.scala +++ b/tests/run-macros/annot-add-nested-class/Macro_1.scala @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -23,8 +23,8 @@ class addClass extends MacroAnnotation: val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) List(clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-nested-object/Macro_1.scala b/tests/run-macros/annot-add-nested-object/Macro_1.scala index f8cde8de5bf0..0069a1010a78 100644 --- a/tests/run-macros/annot-add-nested-object/Macro_1.scala +++ b/tests/run-macros/annot-add-nested-object/Macro_1.scala @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -23,8 +23,8 @@ class addClass extends MacroAnnotation: val (modVal, clsDef) = ClassDef.module(mod, parents, body = List(runDef)) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) List(modVal, clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-annot-order/Macro_1.scala b/tests/run-macros/annot-annot-order/Macro_1.scala index 9d3e8e40c01a..a177225d635f 100644 --- a/tests/run-macros/annot-annot-order/Macro_1.scala +++ b/tests/run-macros/annot-annot-order/Macro_1.scala @@ -5,15 +5,15 @@ import scala.quoted._ @experimental class print(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(rhsTree)) => - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes rhsTree.asExpr match case '{ $rhsExpr: t } => val newRhs = '{ println(${Expr(msg)}); $rhsExpr }.asTerm - List(DefDef.copy(tree)(name, params, tpt, Some(newRhs))) + List(DefDef.copy(definition)(name, params, tpt, Some(newRhs))) case _ => report.error("Annotation only supported on `def`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-bind/Macro_1.scala b/tests/run-macros/annot-bind/Macro_1.scala index 0997f35ccf4a..fd0aaf1343a0 100644 --- a/tests/run-macros/annot-bind/Macro_1.scala +++ b/tests/run-macros/annot-bind/Macro_1.scala @@ -5,15 +5,15 @@ import scala.quoted._ @experimental class bind(str: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ValDef(name, tpt, Some(rhsTree)) => val valSym = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName(str), tpt.tpe, Flags.Private, Symbol.noSymbol) val valDef = ValDef(valSym, Some(rhsTree)) val newRhs = Ref(valSym) - val newTree = ValDef.copy(tree)(name, tpt, Some(newRhs)) + val newTree = ValDef.copy(definition)(name, tpt, Some(newRhs)) List(valDef, newTree) case _ => report.error("Annotation only supported on `val` with a single argument are supported") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-changeVal/Macro_1.scala b/tests/run-macros/annot-changeVal/Macro_1.scala index d55282f8c390..d32cbe85ef35 100644 --- a/tests/run-macros/annot-changeVal/Macro_1.scala +++ b/tests/run-macros/annot-changeVal/Macro_1.scala @@ -7,8 +7,8 @@ import scala.annotation.MacroAnnotation object ChangeVal: @experimental class change(i: Int) extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match - case ValDef(n, t, _) => List(ValDef.copy(tree)(n, t, Some(Literal(IntConstant(i))))) + definition match + case ValDef(n, t, _) => List(ValDef.copy(definition)(n, t, Some(Literal(IntConstant(i))))) } diff --git a/tests/run-macros/annot-concrete-class/Macro_1.scala b/tests/run-macros/annot-concrete-class/Macro_1.scala index e91f9c1ccafe..326b450088e6 100644 --- a/tests/run-macros/annot-concrete-class/Macro_1.scala +++ b/tests/run-macros/annot-concrete-class/Macro_1.scala @@ -5,7 +5,7 @@ import scala.quoted.* class implementAFoo extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(tree: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* tree match case ClassDef(name, cstr, parents, self, body) => diff --git a/tests/run-macros/annot-export/Macro_1.scala b/tests/run-macros/annot-export/Macro_1.scala index fbe97684079b..b135245790ba 100644 --- a/tests/run-macros/annot-export/Macro_1.scala +++ b/tests/run-macros/annot-export/Macro_1.scala @@ -6,10 +6,10 @@ import scala.collection.mutable.Map @experimental class returnClassName extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, _) => val rhs = Literal(StringConstant(Symbol.spliceOwner.name.stripSuffix("$"))) - List(DefDef.copy(tree)(name, params, tpt, Some(rhs))) + List(DefDef.copy(definition)(name, params, tpt, Some(rhs))) } diff --git a/tests/run-macros/annot-gen2/Macro_1.scala b/tests/run-macros/annot-gen2/Macro_1.scala index 05428aac7375..503d6e192cdd 100644 --- a/tests/run-macros/annot-gen2/Macro_1.scala +++ b/tests/run-macros/annot-gen2/Macro_1.scala @@ -5,14 +5,14 @@ import scala.quoted._ @experimental class hello extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(t)) => - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val rhs = '{ ${t.asExprOf[String]} + "hello" }.asTerm - val newDef = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newDef = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(newDef) } diff --git a/tests/run-macros/annot-gen2/Macro_2.scala b/tests/run-macros/annot-gen2/Macro_2.scala index 3e2e228abb3e..9cb734dfdb00 100644 --- a/tests/run-macros/annot-gen2/Macro_2.scala +++ b/tests/run-macros/annot-gen2/Macro_2.scala @@ -5,11 +5,11 @@ import scala.quoted._ @experimental class foo extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(t)) => - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val s = Ref(params.head.params.head.symbol).asExprOf[String] val rhs = '{ @hello def foo1(s: String): String = ${ @@ -18,6 +18,6 @@ class foo extends MacroAnnotation { } foo1($s) }.asTerm - val newDef = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newDef = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(newDef) } diff --git a/tests/run-macros/annot-generate/Macro_1.scala b/tests/run-macros/annot-generate/Macro_1.scala index b88cc62afb06..b64d888aab80 100644 --- a/tests/run-macros/annot-generate/Macro_1.scala +++ b/tests/run-macros/annot-generate/Macro_1.scala @@ -5,9 +5,9 @@ import scala.quoted._ @experimental class hello extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val helloSymbol = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("hello"), TypeRepr.of[String], Flags.EmptyFlags, Symbol.noSymbol) val helloVal = ValDef(helloSymbol, Some(Literal(StringConstant("Hello, World!")))) - List(helloVal, tree) + List(helloVal, definition) } diff --git a/tests/run-macros/annot-generate/Macro_2.scala b/tests/run-macros/annot-generate/Macro_2.scala index 911625eac645..37c88459e3e1 100644 --- a/tests/run-macros/annot-generate/Macro_2.scala +++ b/tests/run-macros/annot-generate/Macro_2.scala @@ -5,15 +5,15 @@ import scala.quoted._ @experimental class foo extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(t)) => - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val rhs = '{ @hello def foo(x: Int): Int = x + 1 ${t.asExprOf[Int]} }.asTerm - val newDef = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newDef = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(newDef) } diff --git a/tests/run-macros/annot-macro-main/Macro_1.scala b/tests/run-macros/annot-macro-main/Macro_1.scala index 2a585bee2bc1..4470febe1244 100644 --- a/tests/run-macros/annot-macro-main/Macro_1.scala +++ b/tests/run-macros/annot-macro-main/Macro_1.scala @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class mainMacro extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), _, _) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -17,10 +17,10 @@ class mainMacro extends MacroAnnotation: val cls = Symbol.newClass(Symbol.spliceOwner.owner, name, parents = parents.map(_.tpe), decls, selfType = None) val mainSym = cls.declaredMethod("main").head - val mainDef = DefDef(mainSym, _ => Some(Apply(Ref(tree.symbol), Nil))) + val mainDef = DefDef(mainSym, _ => Some(Apply(Ref(definition.symbol), Nil))) val clsDef = ClassDef(cls, parents, body = List(mainDef)) - List(clsDef, tree) + List(clsDef, definition) case _ => report.error("Annotation only supports `def` without arguments") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-memo/Macro_1.scala b/tests/run-macros/annot-memo/Macro_1.scala index cd990e1d6cce..492f1e337dfb 100644 --- a/tests/run-macros/annot-memo/Macro_1.scala +++ b/tests/run-macros/annot-memo/Macro_1.scala @@ -6,9 +6,9 @@ import scala.collection.concurrent @experimental class memoize extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(rhsTree)) => (param.tpt.tpe.asType, tpt.tpe.asType) match case ('[t], '[u]) => @@ -19,13 +19,13 @@ class memoize extends MacroAnnotation: '{ concurrent.TrieMap.empty[t, u] }.asTerm val cacheVal = ValDef(cacheSymbol, Some(cacheRhs)) val newRhs = - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val cacheRefExpr = Ref(cacheSymbol).asExprOf[concurrent.Map[t, u]] val paramRefExpr = Ref(param.symbol).asExprOf[t] val rhsExpr = rhsTree.asExprOf[u] '{ $cacheRefExpr.getOrElseUpdate($paramRefExpr, $rhsExpr) }.asTerm - val newTree = DefDef.copy(tree)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) + val newTree = DefDef.copy(definition)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) List(cacheVal, newTree) case _ => report.error("Annotation only supported on `def` with a single argument are supported") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-def/Macro_1.scala b/tests/run-macros/annot-mod-class-add-def/Macro_1.scala index 855dce06f279..acd878f7d556 100644 --- a/tests/run-macros/annot-mod-class-add-def/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-def/Macro_1.scala @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addIndirectToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val stringMethType = ByNameType.apply(TypeRepr.of[String]) val stringSym = Symbol.newMethod(cls, Symbol.freshName("string"), stringMethType, Flags.Private, Symbol.noSymbol) val stringDef = DefDef(stringSym, _ => Some(Literal(StringConstant(msg)))) @@ -20,9 +20,9 @@ class addIndirectToString(msg: String) extends MacroAnnotation: val toStringDef = DefDef(toStringOverrideSym, _ => Some(Ref(stringSym))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, stringDef :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, stringDef :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala b/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala index 395bfd7a28db..4294e44dd45b 100644 --- a/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addInnerClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol def showClassDecls(showCls: Symbol): List[Symbol] = List(Symbol.newMethod(showCls, "showMe", MethodType(List("x"))(_ => List(cls.typeRef), _ => TypeRepr.of[String]))) @@ -27,9 +27,9 @@ class addInnerClass extends MacroAnnotation: val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringMethType, Flags.Override, Symbol.noSymbol) val toStringDef = DefDef(toStringOverrideSym, _ => Some(newShowCallShowMe)) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, showClass :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, showClass :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala b/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala index f72f28b610d6..c772f6cf43a6 100644 --- a/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addMemoToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val stringLazyValSym = Symbol.newVal(cls, Symbol.freshName("string"), TypeRepr.of[String], Flags.Lazy | Flags.Private, Symbol.noSymbol) val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info @@ -19,9 +19,9 @@ class addMemoToString(msg: String) extends MacroAnnotation: val stringLazyValDef = ValDef(stringLazyValSym, Some(Literal(StringConstant(msg)))) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Ref(stringLazyValSym))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, stringLazyValDef :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, stringLazyValDef :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala b/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala index 0156812adeb1..50df9e86446d 100644 --- a/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addInnerClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringMethType, Flags.Override, Symbol.noSymbol) @@ -28,9 +28,9 @@ class addInnerClass extends MacroAnnotation: val showClass = ClassDef(showClassSym, parents, body = List(showMeDef)) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Block(List(showClass), newShowCallShowMe))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-val/Macro_1.scala b/tests/run-macros/annot-mod-class-add-val/Macro_1.scala index fc0294dcb051..cf69949dd8ad 100644 --- a/tests/run-macros/annot-mod-class-add-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-val/Macro_1.scala @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addMemoToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val stringValSym = Symbol.newVal(cls, Symbol.freshName("string"), TypeRepr.of[String], Flags.Private, Symbol.noSymbol) val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info @@ -19,9 +19,9 @@ class addMemoToString(msg: String) extends MacroAnnotation: val stringValDef = ValDef(stringValSym, Some(Literal(StringConstant(msg)))) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Ref(stringValSym))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, stringValDef :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, stringValDef :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-var/Macro_1.scala b/tests/run-macros/annot-mod-class-add-var/Macro_1.scala index be38689613e7..0eb8592accbf 100644 --- a/tests/run-macros/annot-mod-class-add-var/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-var/Macro_1.scala @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addCountToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val countVarSym = Symbol.newVal(cls, Symbol.freshName("count"), TypeRepr.of[Int], Flags.Mutable | Flags.Private, Symbol.noSymbol) val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info @@ -26,9 +26,9 @@ class addCountToString(msg: String) extends MacroAnnotation: ) )) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, countVarDef :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, countVarDef :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-data/Macro_1.scala b/tests/run-macros/annot-mod-class-data/Macro_1.scala index a175eb274268..938b4d7d4bd6 100644 --- a/tests/run-macros/annot-mod-class-data/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-data/Macro_1.scala @@ -5,11 +5,11 @@ import scala.quoted.* @experimental class data extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case ClassDef(className, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val constructorParameters = ctr.paramss.collect { case clause: TermParamClause => clause } if constructorParameters.size != 1 || constructorParameters.head.params.isEmpty then @@ -51,10 +51,10 @@ class data extends MacroAnnotation: val equalsOverrideDef = DefDef(equalsOverrideSym, equalsOverrideDefBody) val newBody = toStringDef :: hashCodeOverrideDef :: equalsOverrideDef :: body - List(ClassDef.copy(tree)(className, ctr, parents, self, newBody)) + List(ClassDef.copy(definition)(className, ctr, parents, self, newBody)) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) private def toStringExpr(className: String, thisFields: List[Expr[Any]])(using Quotes): Expr[String] = val fieldsSeq = Expr.ofSeq(thisFields) diff --git a/tests/run-macros/annot-mod-class-equals/Macro_1.scala b/tests/run-macros/annot-mod-class-equals/Macro_1.scala index 10184eada1e2..625598d3911e 100644 --- a/tests/run-macros/annot-mod-class-equals/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-equals/Macro_1.scala @@ -5,11 +5,11 @@ import scala.quoted.* @experimental class equals extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case ClassDef(className, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val constructorParameters = ctr.paramss.collect { case clause: TermParamClause => clause } if constructorParameters.size != 1 || constructorParameters.head.params.isEmpty then @@ -42,10 +42,10 @@ class equals extends MacroAnnotation: val hashCodeOverrideDef = DefDef(hashCodeOverrideSym, _ => Some(Ref(hashSym))) val newBody = equalsOverrideDef :: hashVal :: hashCodeOverrideDef :: body - List(ClassDef.copy(tree)(className, ctr, parents, self, newBody)) + List(ClassDef.copy(definition)(className, ctr, parents, self, newBody)) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) private def equalsExpr[T: Type](that: Expr[Any], thisFields: List[Expr[Any]])(using Quotes): Expr[Boolean] = '{ diff --git a/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala b/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala index 7a73b0a773e9..9c28d7427f9c 100644 --- a/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class modToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringSym = cls.methodMember("toString").head val newBody = body.span(_.symbol != toStringSym) match @@ -21,7 +21,7 @@ class modToString(msg: String) extends MacroAnnotation: report.error("toString was not defined") body - List(ClassDef.copy(tree)(name, ctr, parents, self, newBody)) + List(ClassDef.copy(definition)(name, ctr, parents, self, newBody)) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala b/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala index fda7b5f037d8..0d07a6bd2006 100644 --- a/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class setValue(field: String, value: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val valSym = cls.fieldMember(field) val newBody = body.span(_.symbol != valSym) match @@ -21,7 +21,7 @@ class setValue(field: String, value: String) extends MacroAnnotation: report.error(s"`val $field` was not defined") body - List(ClassDef.copy(tree)(name, ctr, parents, self, newBody)) + List(ClassDef.copy(definition)(name, ctr, parents, self, newBody)) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-override-def/Macro_1.scala b/tests/run-macros/annot-mod-class-override-def/Macro_1.scala index e6d7bba79d54..2e9b9356d845 100644 --- a/tests/run-macros/annot-mod-class-override-def/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-override-def/Macro_1.scala @@ -6,19 +6,19 @@ import scala.collection.mutable @experimental class genToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant(msg)))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-override-val/Macro_1.scala b/tests/run-macros/annot-mod-class-override-val/Macro_1.scala index d7409a649427..6ec02e349051 100644 --- a/tests/run-macros/annot-mod-class-override-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-override-val/Macro_1.scala @@ -6,18 +6,18 @@ import scala.collection.mutable @experimental class overrideField(field: String, value: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val overrideSym = Symbol.newVal(cls, field, TypeRepr.of[String], Flags.Override, Symbol.noSymbol) val valDef = ValDef(overrideSym, Some(Literal(StringConstant(value)))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, valDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, valDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala b/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala index fbcb9049d947..6760714aaa5e 100644 --- a/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala @@ -6,15 +6,15 @@ import scala.collection.mutable @experimental class newUnusedSymbol extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringMethType, Flags.Override, Symbol.noSymbol) // Test that toStringOverrideSym is not accidentally entered in the class - List(tree) + List(definition) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-result-order/Macro_1.scala b/tests/run-macros/annot-result-order/Macro_1.scala index c81641037b67..cb9121eb8d01 100644 --- a/tests/run-macros/annot-result-order/Macro_1.scala +++ b/tests/run-macros/annot-result-order/Macro_1.scala @@ -5,7 +5,7 @@ import scala.quoted._ @experimental class print(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ def printMsg(msg: String) = val valSym = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("print"), TypeRepr.of[Unit], Flags.Private, Symbol.noSymbol) @@ -13,4 +13,4 @@ class print(msg: String) extends MacroAnnotation: given Quotes = valSym.asQuotes '{ println(${Expr(msg)}) }.asTerm ValDef(valSym, Some(valRhs)) - List(printMsg(s"before: $msg"), tree, printMsg(s"after: $msg")) + List(printMsg(s"before: $msg"), definition, printMsg(s"after: $msg")) diff --git a/tests/run-macros/annot-simple-fib/Macro_1.scala b/tests/run-macros/annot-simple-fib/Macro_1.scala index e5852d5ce73c..f8a74663f775 100644 --- a/tests/run-macros/annot-simple-fib/Macro_1.scala +++ b/tests/run-macros/annot-simple-fib/Macro_1.scala @@ -6,9 +6,9 @@ import scala.collection.mutable.Map @experimental class memoize extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(fibTree)) => val cacheName = Symbol.freshName(name + "Cache") val cacheSymbol = Symbol.newVal(Symbol.spliceOwner, cacheName, TypeRepr.of[Map[Int, Int]], Flags.EmptyFlags, Symbol.noSymbol) @@ -17,7 +17,7 @@ class memoize extends MacroAnnotation { '{Map.empty[Int, Int]}.asTerm val cacheVal = ValDef(cacheSymbol, Some(cacheRhs)) val rhs = - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val fibCache = Ref(cacheSymbol).asExprOf[Map[Int, Int]] val n = Ref(params.head.params.head.symbol).asExprOf[Int] '{ @@ -28,6 +28,6 @@ class memoize extends MacroAnnotation { $fibCache($n) = res res }.asTerm - val newFib = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newFib = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(cacheVal, newFib) } diff --git a/tests/run-macros/annot-unrollLast/Macro_1.scala b/tests/run-macros/annot-unrollLast/Macro_1.scala index e220811433e3..bcb2a94d5ac3 100644 --- a/tests/run-macros/annot-unrollLast/Macro_1.scala +++ b/tests/run-macros/annot-unrollLast/Macro_1.scala @@ -12,7 +12,7 @@ class unrollLast extends StaticAnnotation @experimental class unrollHelper extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(tree: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ tree match case tree: DefDef => transformDefDef(tree) diff --git a/tests/run-macros/i18806/Macro_1.scala b/tests/run-macros/i18806/Macro_1.scala index 461080b67b95..06ab612ce416 100644 --- a/tests/run-macros/i18806/Macro_1.scala +++ b/tests/run-macros/i18806/Macro_1.scala @@ -3,11 +3,11 @@ import scala.quoted._ @experimental class gen1 extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol // val meth = cls.methodMember("foo").head // val fooTpe = cls.typeRef.memberType(meth) @@ -17,8 +17,8 @@ class gen1 extends MacroAnnotation: val fooDef = DefDef(fooOverrideSym, _ => Some(Literal(StringConstant("hi")))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, fooDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, fooDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/i19676/Macro_1.scala b/tests/run-macros/i19676/Macro_1.scala new file mode 100644 index 000000000000..4c9007d84bbb --- /dev/null +++ b/tests/run-macros/i19676/Macro_1.scala @@ -0,0 +1,28 @@ +//> using options -experimental -Yno-experimental + +import scala.annotation.MacroAnnotation +import scala.quoted.* + +class companionToString(str: String) extends MacroAnnotation: + + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ): List[quotes.reflect.Definition] = + + import quotes.reflect.* + companion match + case Some(cls@ClassDef(name, ctr, parents, self, body)) => + val symbol = cls.symbol + val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") + val toStringOverrideSym = Symbol.newMethod(symbol, "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) + val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant(s"$name: $str")))) + val newCompanion = ClassDef.copy(cls)(name, ctr, parents, self, toStringDef :: body) + List(definition, newCompanion) + case Some(unexpected) => + report.error(s"Unexpected companion: ${unexpected.show}") + List(definition) + case None => + report.error("Companion is not available to transform") + List(definition) + end transform \ No newline at end of file diff --git a/tests/run-macros/i19676/Test_2.scala b/tests/run-macros/i19676/Test_2.scala new file mode 100644 index 000000000000..4110d3c7f482 --- /dev/null +++ b/tests/run-macros/i19676/Test_2.scala @@ -0,0 +1,36 @@ +//> using options -experimental -Yno-experimental + +@companionToString("transformed by class") +class InPackage + +@companionToString("transformed by object") +object InPackage + +val (cls: Any, obj: Any) = { + + @companionToString("transformed by class") + class InBlock + + @companionToString("transformed by object") + object InBlock + + (new InBlock, InBlock) +} + +object Wrapper { + + @companionToString("transformed by class") + class InInnerClass + + @companionToString("transformed by object") + object InInnerClass + +} + +@main def Test = + assert((new InPackage).toString() == "InPackage: transformed by object") + assert(InPackage.toString() == "InPackage$: transformed by class") + assert(cls.toString() == "InBlock: transformed by object") + assert(obj.toString() == "InBlock$: transformed by class") + assert((new Wrapper.InInnerClass).toString() == "InInnerClass: transformed by object") + assert(Wrapper.InInnerClass.toString() == "InInnerClass$: transformed by class") diff --git a/tests/run/quotes-add-erased/Macro_1.scala b/tests/run/quotes-add-erased/Macro_1.scala index 5b95051a3744..66ec1c3642d8 100644 --- a/tests/run/quotes-add-erased/Macro_1.scala +++ b/tests/run/quotes-add-erased/Macro_1.scala @@ -7,7 +7,7 @@ import scala.quoted._ class NewAnnotation extends scala.annotation.Annotation class erasedParamsMethod extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(tree: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ tree match case ClassDef(name, ctr, parents, self, body) => From c714ae5127f7900973128f7c43e5bab08ab5c24b Mon Sep 17 00:00:00 2001 From: willerf Date: Mon, 29 Apr 2024 06:17:01 -0700 Subject: [PATCH 206/465] special case for next field of colon colon in global init checker --- compiler/src/dotty/tools/dotc/transform/init/Objects.scala | 3 ++- tests/init-global/pos/list-colon-colon-next.scala | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 tests/init-global/pos/list-colon-colon-next.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 793d4b41b174..7b5932fe78a5 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -812,7 +812,8 @@ class Objects(using Context @constructorOnly): else Bottom else if target.exists then - if target.isOneOf(Flags.Mutable) then + def isNextFieldOfColonColon: Boolean = ref.klass == defn.ConsClass && target.name.toString == "next" + if target.isOneOf(Flags.Mutable) && !isNextFieldOfColonColon then if ref.hasVar(target) then val addr = ref.varAddr(target) if addr.owner == State.currentObject then diff --git a/tests/init-global/pos/list-colon-colon-next.scala b/tests/init-global/pos/list-colon-colon-next.scala new file mode 100644 index 000000000000..5a87cb311bea --- /dev/null +++ b/tests/init-global/pos/list-colon-colon-next.scala @@ -0,0 +1,5 @@ +object A: + val a: List[Int] = List(1, 2, 3) + +object B: + val b = A.a.size From 017e7457256d63a49d27ae9155f0491bcbe23330 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 13:58:34 +0000 Subject: [PATCH 207/465] Bump VirtusLab/scala-cli-setup from 1.2.2 to 1.3.0 Bumps [VirtusLab/scala-cli-setup](https://github.com/virtuslab/scala-cli-setup) from 1.2.2 to 1.3.0. - [Release notes](https://github.com/virtuslab/scala-cli-setup/releases) - [Commits](https://github.com/virtuslab/scala-cli-setup/compare/v1.2.2...v1.3.0) --- updated-dependencies: - dependency-name: VirtusLab/scala-cli-setup dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 641e24835c31..9c3405235b31 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.2.2 + - uses: VirtusLab/scala-cli-setup@v1.3.0 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From 87b2ddb5327f1b51faeab206439ee76683971739 Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Mon, 29 Apr 2024 22:19:18 +0800 Subject: [PATCH 208/465] Extend whitelist and add tests --- .../tools/dotc/transform/init/Objects.scala | 23 +++++++++++++++---- tests/init-global/pos/EmptyMap.scala | 6 +++++ tests/init-global/pos/EmptyMap2.scala | 4 ++++ tests/init-global/pos/EmptySet.scala | 6 +++++ tests/init-global/pos/EmptySet2.scala | 4 ++++ .../init-global/pos/EmptyVectorIterator.scala | 7 ++++++ tests/init-global/pos/LazyList.scala | 4 ++++ 7 files changed, 49 insertions(+), 5 deletions(-) create mode 100644 tests/init-global/pos/EmptyMap.scala create mode 100644 tests/init-global/pos/EmptyMap2.scala create mode 100644 tests/init-global/pos/EmptySet.scala create mode 100644 tests/init-global/pos/EmptySet2.scala create mode 100644 tests/init-global/pos/EmptyVectorIterator.scala create mode 100644 tests/init-global/pos/LazyList.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 793d4b41b174..1f06023ae892 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -29,6 +29,7 @@ import scala.collection.mutable import scala.annotation.tailrec import scala.annotation.constructorOnly import dotty.tools.dotc.core.Flags.AbstractOrTrait +import Decorators.* /** Check initialization safety of static objects * @@ -68,11 +69,21 @@ import dotty.tools.dotc.core.Flags.AbstractOrTrait * */ class Objects(using Context @constructorOnly): - val immutableHashSetBuider: Symbol = requiredClass("scala.collection.immutable.HashSetBuilder") + val immutableHashSetNode: Symbol = requiredClass("scala.collection.immutable.SetNode") // TODO: this should really be an annotation on the rhs of the field initializer rather than the field itself. - val HashSetBuilder_rootNode: Symbol = immutableHashSetBuider.requiredValue("rootNode") - - val whiteList = Set(HashSetBuilder_rootNode) + val SetNode_EmptySetNode: Symbol = Denotations.staticRef("scala.collection.immutable.SetNode.EmptySetNode".toTermName).symbol + val immutableHashSet: Symbol = requiredModule("scala.collection.immutable.HashSet") + val HashSet_EmptySet: Symbol = Denotations.staticRef("scala.collection.immutable.HashSet.EmptySet".toTermName).symbol + val immutableVector: Symbol = requiredModule("scala.collection.immutable.Vector") + val Vector_EmptyIterator: Symbol = immutableVector.requiredValue("emptyIterator") + val immutableMapNode: Symbol = requiredModule("scala.collection.immutable.MapNode") + val MapNode_EmptyMapNode: Symbol = immutableMapNode.requiredValue("EmptyMapNode") + val immutableHashMap: Symbol = requiredModule("scala.collection.immutable.HashMap") + val HashMap_EmptyMap: Symbol = immutableHashMap.requiredValue("EmptyMap") + val immutableLazyList: Symbol = requiredModule("scala.collection.immutable.LazyList") + val LazyList_empty: Symbol = immutableLazyList.requiredValue("_empty") + + val whiteList: Set[Symbol] = Set() // ----------------------------- abstract domain ----------------------------- @@ -162,7 +173,7 @@ class Objects(using Context @constructorOnly): extends Ref(valsMap = mutable.Map.empty, varsMap = mutable.Map.empty, outersMap = mutable.Map.empty): val owner = klass - def show(using Context) = "ObjectRef(" + klass.show + ")" + def show(using Context) = "ObjectRef(" + klass.show + ")" + "valMap = " + vals + "varMap = " + vars /** * Represents values that are instances of the specified class. @@ -821,6 +832,7 @@ class Objects(using Context @constructorOnly): errorReadOtherStaticObject(State.currentObject, addr) Bottom else if ref.isObjectRef && ref.klass.hasSource then + println(s"Uninitialized field Position 2, ref = $ref, target = $target") report.warning("Access uninitialized field " + field.show + ". " + Trace.show, Trace.position) Bottom else @@ -829,6 +841,7 @@ class Objects(using Context @constructorOnly): else if ref.hasVal(target) then ref.valValue(target) else if ref.isObjectRef && ref.klass.hasSource then + println(s"Uninitialized field Position 2, ref = $ref, target = $target") report.warning("Access uninitialized field " + field.show + ". " + Trace.show, Trace.position) Bottom else diff --git a/tests/init-global/pos/EmptyMap.scala b/tests/init-global/pos/EmptyMap.scala new file mode 100644 index 000000000000..776fa81763d6 --- /dev/null +++ b/tests/init-global/pos/EmptyMap.scala @@ -0,0 +1,6 @@ +import scala.collection.immutable.HashMap + +object O { + val emptyMap: HashMap[Int, Int] = HashMap.empty + val key = emptyMap.get(0) +} diff --git a/tests/init-global/pos/EmptyMap2.scala b/tests/init-global/pos/EmptyMap2.scala new file mode 100644 index 000000000000..b66c92f449da --- /dev/null +++ b/tests/init-global/pos/EmptyMap2.scala @@ -0,0 +1,4 @@ +import scala.collection.immutable.HashMap + +object A: + val a = HashMap.empty[Int, Int].updated(1, 2) diff --git a/tests/init-global/pos/EmptySet.scala b/tests/init-global/pos/EmptySet.scala new file mode 100644 index 000000000000..7966d01a0aac --- /dev/null +++ b/tests/init-global/pos/EmptySet.scala @@ -0,0 +1,6 @@ +import scala.collection.immutable.HashSet + +object O { + val emptySet = HashSet.empty + val emptySetSize = emptySet.size +} diff --git a/tests/init-global/pos/EmptySet2.scala b/tests/init-global/pos/EmptySet2.scala new file mode 100644 index 000000000000..f2945c050eba --- /dev/null +++ b/tests/init-global/pos/EmptySet2.scala @@ -0,0 +1,4 @@ +import scala.collection.immutable.HashSet + +object A: + val a = HashSet.empty[Int] + 1 diff --git a/tests/init-global/pos/EmptyVectorIterator.scala b/tests/init-global/pos/EmptyVectorIterator.scala new file mode 100644 index 000000000000..40fcce8d7d3e --- /dev/null +++ b/tests/init-global/pos/EmptyVectorIterator.scala @@ -0,0 +1,7 @@ +import scala.collection.immutable.Vector + +object O { + val emptyVector = Vector.empty + val emptyVectorIterator = emptyVector.iterator + val hasNext = emptyVectorIterator.hasNext +} diff --git a/tests/init-global/pos/LazyList.scala b/tests/init-global/pos/LazyList.scala new file mode 100644 index 000000000000..fea70b5a8a54 --- /dev/null +++ b/tests/init-global/pos/LazyList.scala @@ -0,0 +1,4 @@ +import scala.collection.immutable.LazyList + +object A: + val a = LazyList.empty[Int] :+ 1 \ No newline at end of file From 6e7ea68ffb66a314750e54b46f3d42fb4c52ffd3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 29 Apr 2024 19:47:45 +0200 Subject: [PATCH 209/465] Fix scala-js/scala-js#4801: Rebase the super JS type as seen from the this type in JS super call. When doing a super call to a method of a path-dependent JS super class, the `superClass.typeRef` is only valid as seen from the super class' thisType. We need to rebase it with `asSeenFrom` to be in the context of the current class' thisType. Forward port of the upstream commit https://github.com/scala-js/scala-js/commit/3cef9d095172b2b5b8189684991d55fa16878875 --- .../dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala index 853fead6f799..5c7119860ae4 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala @@ -637,7 +637,11 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => private def maybeWrapSuperCallWithContextualJSClassValue(tree: Tree)(using Context): Tree = { methPart(tree) match { case Select(sup: Super, _) if isInnerOrLocalJSClass(sup.symbol.asClass.superClass) => - wrapWithContextualJSClassValue(sup.symbol.asClass.superClass.typeRef)(tree) + val superClass = sup.symbol.asClass.superClass + val jsClassTypeInSuperClass = superClass.typeRef + // scala-js#4801 Rebase the super class type on the current class' this type + val jsClassTypeAsSeenFromThis = jsClassTypeInSuperClass.asSeenFrom(currentClass.thisType, superClass) + wrapWithContextualJSClassValue(jsClassTypeAsSeenFromThis)(tree) case _ => tree } From 34742325408fc155ee01000c76c26580b5b9bd2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 26 Apr 2024 15:51:28 +0200 Subject: [PATCH 210/465] Upgrade to Scala.js 1.14.0. --- project/Build.scala | 1 + project/plugins.sbt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index dcf6ec13760d..a9095e924c1b 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1581,6 +1581,7 @@ object Build { -- "ReflectiveCallTest.scala" // uses many forms of structural calls that are not allowed in Scala 3 anymore -- "UTF16Test.scala" // refutable pattern match -- "CharsetTest.scala" // bogus @tailrec that Scala 2 ignores but Scala 3 flags as an error + -- "ClassDiffersOnlyInCaseTest.scala" // looks like the Scala 3 compiler itself does not deal with that )).get ++ (dir / "shared/src/test/require-sam" ** "*.scala").get diff --git a/project/plugins.sbt b/project/plugins.sbt index d378848561b8..63ace3e44e26 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.13.0") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.14.0") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") From 710aad8e35b59251b0114a12c7269055b0b9700a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 29 Apr 2024 19:55:36 +0200 Subject: [PATCH 211/465] Upgrade to Scala.js 1.15.0. --- compiler/test/dotty/Properties.scala | 3 +++ compiler/test/dotty/tools/vulpix/TestConfiguration.scala | 1 + project/Build.scala | 1 + project/plugins.sbt | 2 +- 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/compiler/test/dotty/Properties.scala b/compiler/test/dotty/Properties.scala index e085b0de4875..86e0788a3b8f 100644 --- a/compiler/test/dotty/Properties.scala +++ b/compiler/test/dotty/Properties.scala @@ -103,6 +103,9 @@ object Properties { /** scalajs-javalib jar */ def scalaJSJavalib: String = sys.props("dotty.tests.classes.scalaJSJavalib") + /** scalajs-scalalib jar */ + def scalaJSScalalib: String = sys.props("dotty.tests.classes.scalaJSScalalib") + /** scalajs-library jar */ def scalaJSLibrary: String = sys.props("dotty.tests.classes.scalaJSLibrary") } diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala index 086d590fbfc7..e97ef47e6fef 100644 --- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala +++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala @@ -52,6 +52,7 @@ object TestConfiguration { lazy val scalaJSClasspath = mkClasspath(List( Properties.scalaJSJavalib, + Properties.scalaJSScalalib, Properties.scalaJSLibrary, Properties.dottyLibraryJS )) diff --git a/project/Build.scala b/project/Build.scala index a9095e924c1b..f265fe1ff359 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1650,6 +1650,7 @@ object Build { Seq( "-Ddotty.tests.classes.dottyLibraryJS=" + dottyLibraryJSJar, "-Ddotty.tests.classes.scalaJSJavalib=" + findArtifactPath(externalJSDeps, "scalajs-javalib"), + "-Ddotty.tests.classes.scalaJSScalalib=" + findArtifactPath(externalJSDeps, "scalajs-scalalib_2.13"), "-Ddotty.tests.classes.scalaJSLibrary=" + findArtifactPath(externalJSDeps, "scalajs-library_2.13"), ) }, diff --git a/project/plugins.sbt b/project/plugins.sbt index 63ace3e44e26..91591e8b104f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.14.0") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.15.0") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") From 78f3eeb15848e950619b91eeaf8e918f9a7d49aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 29 Apr 2024 22:09:42 +0200 Subject: [PATCH 212/465] Fix scala-js/scala-js#4929: Fix logic for moving early assignements in JS ctors. Previously, we moved all statements in the constructors after the super constructor call. However, it turns out that there are statements that must be kept before, notably local `val`s generated for default arguments to the super constructor. We now keep statements where they are by default. We only move statements of the form `C.this.field = ident;`, which are the only ones that require access to `this`. Forward port of the upstream commit https://github.com/scala-js/scala-js/commit/2e4594f0739cc58b74f1de7d5c4cc51b72a1371a --- .../dotty/tools/backend/sjs/JSCodeGen.scala | 68 +++++++++++++------ 1 file changed, 49 insertions(+), 19 deletions(-) diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 8328afd52573..ca933c44f70e 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -1146,42 +1146,72 @@ class JSCodeGen()(using genCtx: Context) { private def genPrimaryJSClassCtor(dd: DefDef): PrimaryJSCtor = { val sym = dd.symbol - val Block(stats, _) = dd.rhs: @unchecked assert(sym.isPrimaryConstructor, s"called with non-primary ctor: $sym") + var preSuperStats = List.newBuilder[js.Tree] var jsSuperCall: Option[js.JSSuperConstructorCall] = None - val jsStats = List.newBuilder[js.Tree] + val postSuperStats = List.newBuilder[js.Tree] - /* Move all statements after the super constructor call since JS - * cannot access `this` before the super constructor call. + /* Move param accessor initializers after the super constructor call since + * JS cannot access `this` before the super constructor call. * * dotc inserts statements before the super constructor call for param * accessor initializers (including val's and var's declared in the - * params). We move those after the super constructor call, and are - * therefore executed later than for a Scala class. + * params). Those statements are assignments whose rhs'es are always simple + * Idents (the constructor params). + * + * There can also be local `val`s before the super constructor call for + * default arguments to the super constructor. These must remain before. + * + * Our strategy is therefore to move only the field assignments after the + * super constructor call. They are therefore executed later than for a + * Scala class (as specified for non-native JS classes semantics). + * However, side effects and evaluation order of all the other + * computations remains unchanged. */ withPerMethodBodyState(sym) { - stats.foreach { - case tree @ Apply(fun @ Select(Super(This(_), _), _), args) - if fun.symbol.isClassConstructor => - assert(jsSuperCall.isEmpty, s"Found 2 JS Super calls at ${dd.sourcePos}") - implicit val pos: Position = tree.span - jsSuperCall = Some(js.JSSuperConstructorCall(genActualJSArgs(fun.symbol, args))) + def isThisField(tree: Tree): Boolean = tree match { + case Select(ths: This, _) => ths.symbol == currentClassSym.get + case tree: Ident => desugarIdent(tree).exists(isThisField(_)) + case _ => false + } - case stat => - val jsStat = genStat(stat) - assert(jsSuperCall.isDefined || !jsStat.isInstanceOf[js.VarDef], - "Trying to move a local VarDef after the super constructor call of a non-native JS class at " + - dd.sourcePos) - jsStats += jsStat + def rec(tree: Tree): Unit = { + tree match { + case Block(stats, expr) => + stats.foreach(rec(_)) + rec(expr) + + case tree @ Apply(fun @ Select(Super(This(_), _), _), args) + if fun.symbol.isClassConstructor => + assert(jsSuperCall.isEmpty, s"Found 2 JS Super calls at ${dd.sourcePos}") + implicit val pos: Position = tree.span + jsSuperCall = Some(js.JSSuperConstructorCall(genActualJSArgs(fun.symbol, args))) + + case tree if jsSuperCall.isDefined => + // Once we're past the super constructor call, everything goes after. + postSuperStats += genStat(tree) + + case Assign(lhs, Ident(_)) if isThisField(lhs) => + /* If that shape appears before the jsSuperCall, it is a param + * accessor initializer. We move it. + */ + postSuperStats += genStat(tree) + + case stat => + // Other statements are left before. + preSuperStats += genStat(stat) + } } + + rec(dd.rhs) } assert(jsSuperCall.isDefined, s"Did not find Super call in primary JS construtor at ${dd.sourcePos}") new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), - js.JSConstructorBody(Nil, jsSuperCall.get, jsStats.result())(dd.span)) + js.JSConstructorBody(preSuperStats.result(), jsSuperCall.get, postSuperStats.result())(dd.span)) } private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = { From 8ebef0f33bf4d8bd749c7e3d95b9aedea0cfe4ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 29 Apr 2024 20:01:31 +0200 Subject: [PATCH 213/465] Upgrade to Scala.js 1.16.0. Code changes are forward ports of the following refactorings: * Remove the parameters to StoreModule IR nodes. https://github.com/scala-js/scala-js/commit/659d51808b94e46a13efd7599d6119d23ea07dfc * Refactor: Make FieldName a composite of ClassName and SimpleFieldName. https://github.com/scala-js/scala-js/commit/723663b76a4dc2775dbab12f3c268e33990b6b55 --- .../src/dotty/tools/backend/sjs/JSCodeGen.scala | 16 ++++++---------- .../src/dotty/tools/backend/sjs/JSEncoding.scala | 4 ++-- project/Build.scala | 3 ++- project/plugins.sbt | 2 +- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index ca933c44f70e..6e2449b5c299 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -927,7 +927,7 @@ class JSCodeGen()(using genCtx: Context) { val className = encodeClassName(classSym) val body = js.Block( js.LoadModule(className), - js.SelectStatic(className, fieldIdent)(irTpe)) + js.SelectStatic(fieldIdent)(irTpe)) staticGetterDefs += js.MethodDef( js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), encodeStaticMemberSym(f), originalName, Nil, irTpe, @@ -2243,10 +2243,7 @@ class JSCodeGen()(using genCtx: Context) { if (isStaticModule(currentClassSym) && !isModuleInitialized.get.value && currentMethodSym.get.isClassConstructor) { isModuleInitialized.get.value = true - val className = encodeClassName(currentClassSym) - val thisType = jstpe.ClassType(className) - val initModule = js.StoreModule(className, js.This()(thisType)) - js.Block(superCall, initModule) + js.Block(superCall, js.StoreModule()) } else { superCall } @@ -4463,13 +4460,12 @@ class JSCodeGen()(using genCtx: Context) { js.JSSelect(qual, genPrivateFieldsSymbol()), encodeFieldSymAsStringLiteral(sym)) } else { - js.JSPrivateSelect(qual, encodeClassName(sym.owner), - encodeFieldSym(sym)) + js.JSPrivateSelect(qual, encodeFieldSym(sym)) } (f, true) } else if (sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot)) { - val f = js.SelectStatic(encodeClassName(sym.owner), encodeFieldSym(sym))(jstpe.AnyType) + val f = js.SelectStatic(encodeFieldSym(sym))(jstpe.AnyType) (f, true) } else if (sym.hasAnnotation(jsdefn.JSExportStaticAnnot)) { val jsName = sym.getAnnotation(jsdefn.JSExportStaticAnnot).get.argumentConstantString(0).getOrElse { @@ -4495,9 +4491,9 @@ class JSCodeGen()(using genCtx: Context) { val f = if sym.is(JavaStatic) then - js.SelectStatic(className, fieldIdent)(irType) + js.SelectStatic(fieldIdent)(irType) else - js.Select(qual, className, fieldIdent)(irType) + js.Select(qual, fieldIdent)(irType) (f, boxed) } diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala index f2b90d5b1161..098f592daa30 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala @@ -17,7 +17,7 @@ import dotty.tools.dotc.transform.sjs.JSSymUtils.* import org.scalajs.ir import org.scalajs.ir.{Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{LocalName, LabelName, FieldName, SimpleMethodName, MethodName, ClassName} +import org.scalajs.ir.Names.{LocalName, LabelName, SimpleFieldName, FieldName, SimpleMethodName, MethodName, ClassName} import org.scalajs.ir.OriginalName import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.UTF8String @@ -173,7 +173,7 @@ object JSEncoding { } def encodeFieldSym(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.FieldIdent = - js.FieldIdent(FieldName(encodeFieldSymAsString(sym))) + js.FieldIdent(FieldName(encodeClassName(sym.owner), SimpleFieldName(encodeFieldSymAsString(sym)))) def encodeFieldSymAsStringLiteral(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.StringLiteral = js.StringLiteral(encodeFieldSymAsString(sym)) diff --git a/project/Build.scala b/project/Build.scala index f265fe1ff359..ebe46a2e85d1 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1508,7 +1508,8 @@ object Build { "isNoModule" -> (moduleKind == ModuleKind.NoModule), "isESModule" -> (moduleKind == ModuleKind.ESModule), "isCommonJSModule" -> (moduleKind == ModuleKind.CommonJSModule), - "isFullOpt" -> (stage == FullOptStage), + "usesClosureCompiler" -> linkerConfig.closureCompiler, + "hasMinifiedNames" -> (linkerConfig.closureCompiler || linkerConfig.minify), "compliantAsInstanceOfs" -> (sems.asInstanceOfs == CheckedBehavior.Compliant), "compliantArrayIndexOutOfBounds" -> (sems.arrayIndexOutOfBounds == CheckedBehavior.Compliant), "compliantArrayStores" -> (sems.arrayStores == CheckedBehavior.Compliant), diff --git a/project/plugins.sbt b/project/plugins.sbt index 91591e8b104f..59e58007a4a0 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.15.0") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.16.0") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") From 34280669af67b12c67de84ea34304622f922d4f6 Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Tue, 30 Apr 2024 10:25:50 +0800 Subject: [PATCH 214/465] Correction --- compiler/src/dotty/tools/dotc/transform/init/Objects.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 1f06023ae892..8c0a34092330 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -83,7 +83,7 @@ class Objects(using Context @constructorOnly): val immutableLazyList: Symbol = requiredModule("scala.collection.immutable.LazyList") val LazyList_empty: Symbol = immutableLazyList.requiredValue("_empty") - val whiteList: Set[Symbol] = Set() + val whiteList: Set[Symbol] = Set(SetNode_EmptySetNode, HashSet_EmptySet, Vector_EmptyIterator, MapNode_EmptyMapNode, HashMap_EmptyMap, LazyList_empty) // ----------------------------- abstract domain ----------------------------- @@ -173,7 +173,7 @@ class Objects(using Context @constructorOnly): extends Ref(valsMap = mutable.Map.empty, varsMap = mutable.Map.empty, outersMap = mutable.Map.empty): val owner = klass - def show(using Context) = "ObjectRef(" + klass.show + ")" + "valMap = " + vals + "varMap = " + vars + def show(using Context) = "ObjectRef(" + klass.show + ")" /** * Represents values that are instances of the specified class. @@ -832,7 +832,6 @@ class Objects(using Context @constructorOnly): errorReadOtherStaticObject(State.currentObject, addr) Bottom else if ref.isObjectRef && ref.klass.hasSource then - println(s"Uninitialized field Position 2, ref = $ref, target = $target") report.warning("Access uninitialized field " + field.show + ". " + Trace.show, Trace.position) Bottom else @@ -841,7 +840,6 @@ class Objects(using Context @constructorOnly): else if ref.hasVal(target) then ref.valValue(target) else if ref.isObjectRef && ref.klass.hasSource then - println(s"Uninitialized field Position 2, ref = $ref, target = $target") report.warning("Access uninitialized field " + field.show + ". " + Trace.show, Trace.position) Bottom else From 321881a80e6f663107470217c748541f1639f189 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 30 Apr 2024 08:14:29 +0200 Subject: [PATCH 215/465] Avoid conversion of `Unit` type into `()` term Fixes #20286 --- .../tools/dotc/transform/BetaReduce.scala | 3 ++- tests/pos-macros/i20286/Macro_1.scala | 24 +++++++++++++++++++ tests/pos-macros/i20286/Test_2.scala | 17 +++++++++++++ 3 files changed, 43 insertions(+), 1 deletion(-) create mode 100644 tests/pos-macros/i20286/Macro_1.scala create mode 100644 tests/pos-macros/i20286/Test_2.scala diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index dd20ff9557ca..60c1bc7c61bb 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -153,7 +153,8 @@ object BetaReduce: val expansion1 = new TreeMap { override def transform(tree: Tree)(using Context) = tree.tpe.widenTermRefExpr match case ConstantType(const) if isPureExpr(tree) => cpy.Literal(tree)(const) - case tpe: TypeRef if tpe.derivesFrom(defn.UnitClass) && isPureExpr(tree) => cpy.Literal(tree)(Constant(())) + case tpe: TypeRef if tree.isTerm && tpe.derivesFrom(defn.UnitClass) && isPureExpr(tree) => + cpy.Literal(tree)(Constant(())) case _ => super.transform(tree) }.transform(expansion) diff --git a/tests/pos-macros/i20286/Macro_1.scala b/tests/pos-macros/i20286/Macro_1.scala new file mode 100644 index 000000000000..d582d33a1198 --- /dev/null +++ b/tests/pos-macros/i20286/Macro_1.scala @@ -0,0 +1,24 @@ +import scala.quoted.* + +type P[+T] = ParsingRun[T] +trait ParsingRun[+T] { + var successValue: Any + def freshSuccessUnit(): ParsingRun[Unit] + +} + +object MacroInlineImpls { + inline def flatMapXInline[T, V]( + lhs: ParsingRun[T] + )(inline f: T => ParsingRun[V]): ParsingRun[V] = { + f(lhs.successValue.asInstanceOf[T]) + } + + def parsedSequence0[T: Type, V: Type, R: Type]( + lhs: Expr[ParsingRun[T]], + rhs: Expr[ParsingRun[V]] + )(using quotes: Quotes): Expr[ParsingRun[R]] = { + import quotes.reflect.* + '{ $rhs.asInstanceOf[ParsingRun[R]] } + } +} diff --git a/tests/pos-macros/i20286/Test_2.scala b/tests/pos-macros/i20286/Test_2.scala new file mode 100644 index 000000000000..b60a5682c051 --- /dev/null +++ b/tests/pos-macros/i20286/Test_2.scala @@ -0,0 +1,17 @@ +implicit inline def LiteralStr(s: String)(implicit ctx: P[Any]): P[Unit] = ??? + +extension [T](inline parse0: P[T]) { + inline def ~[V, R](inline other: P[V])(using + ctx: P[?] + ): P[R] = ${ MacroInlineImpls.parsedSequence0[T, V, R]('parse0, 'other) } + + inline def flatMapX[V](inline f: T => P[V]): P[V] = + MacroInlineImpls.flatMapXInline[T, V](parse0)(f) +} + +def deeper[$: P]: P[Int] = ??? +def newline[$: P]: P[Unit] = ??? +def blockBody[p: P]: P[Seq[Int]] = newline ~ deeper.flatMapX { i => + val y = LiteralStr("")(using ???) + ??? +} From 258c11ac444fe1752cde3f3d0ddfe7d0be4f01c6 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 28 Feb 2024 11:27:01 +0100 Subject: [PATCH 216/465] Enable experimental mode when experimental feature is imported The `@experimental` flag is added to top-level definitions in the package where the language feature is imported. --- compiler/src/dotty/tools/dotc/Driver.scala | 1 - .../src/dotty/tools/dotc/config/Feature.scala | 28 ++++++---- .../dotty/tools/dotc/core/Definitions.scala | 7 +++ .../tools/dotc/transform/PostTyper.scala | 10 ++-- .../src/dotty/tools/dotc/typer/Checking.scala | 54 ++++++++++--------- .../tools/dotc/typer/CrossVersionChecks.scala | 14 ++--- .../other-new-features/experimental-defs.md | 2 + tests/neg-macros/i18677-a.check | 4 +- tests/neg-macros/i18677-b.check | 4 +- ...expeimental-flag-with-lang-feature-1.scala | 5 -- tests/neg/experimental-erased.scala | 11 ---- ...import-with-top-level-val-underscore.check | 6 +++ ...import-with-top-level-val-underscore.scala | 6 +++ tests/neg/experimental-imports.scala | 6 +-- ...perimental-message-experimental-flag.check | 7 +-- tests/neg/experimental-message.check | 15 +++--- tests/neg/experimental-nested-imports-2.scala | 8 +-- tests/neg/experimental-nested-imports-3.scala | 8 +-- tests/neg/experimental-nested-imports.scala | 8 +-- tests/neg/experimental.scala | 2 +- tests/neg/experimentalErased.scala | 24 --------- tests/neg/experimentalOverloads.scala | 2 + tests/neg/i13091.check | 16 ++++++ tests/neg/use-experimental-def.check | 5 +- tests/pos-custom-args/captures/try.scala | 2 +- ...expeimental-flag-with-lang-feature-2.scala | 2 +- .../experimental-package-imports.scala | 8 +-- tests/pos/interleaving-chainedParams.scala | 2 +- tests/run-macros/term-show.check | 2 +- 29 files changed, 145 insertions(+), 124 deletions(-) delete mode 100644 tests/neg/expeimental-flag-with-lang-feature-1.scala delete mode 100644 tests/neg/experimental-erased.scala create mode 100644 tests/neg/experimental-import-with-top-level-val-underscore.check create mode 100644 tests/neg/experimental-import-with-top-level-val-underscore.scala delete mode 100644 tests/neg/experimentalErased.scala create mode 100644 tests/neg/i13091.check rename tests/{neg => pos}/expeimental-flag-with-lang-feature-2.scala (71%) rename tests/{neg => pos}/experimental-package-imports.scala (62%) diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 580c0eae1810..6625b5ca6ea2 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -80,7 +80,6 @@ class Driver { val ictx = rootCtx.fresh val summary = command.distill(args, ictx.settings)(ictx.settingsState)(using ictx) ictx.setSettings(summary.sstate) - Feature.checkExperimentalSettings(using ictx) MacroClassLoader.init(ictx) Positioned.init(using ictx) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index c90338302ce7..f4c76d42b25f 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -34,6 +34,11 @@ object Feature: val captureChecking = experimental("captureChecking") val into = experimental("into") + def experimentalAutoEnableFeatures(using Context): List[TermName] = + defn.languageExperimentalFeatures + .map(sym => experimental(sym.name)) + .filterNot(_ == captureChecking) // TODO is this correct? + /** Is `feature` enabled by by a command-line setting? The enabling setting is * * -language:feature @@ -157,18 +162,23 @@ object Feature: private def experimentalUseSite(which: String): String = s"""Experimental $which may only be used under experimental mode: | 1. in a definition marked as @experimental, or - | 2. compiling with the -experimental compiler flag, or - | 3. with a nightly or snapshot version of the compiler. + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag, or + | 4. with a nightly or snapshot version of the compiler. |""".stripMargin - /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ - def checkExperimentalSettings(using Context): Unit = - for setting <- ctx.settings.language.value - if setting.startsWith("experimental.") && setting != "experimental.macros" - do checkExperimentalFeature(s"feature $setting", NoSourcePosition) - def isExperimentalEnabled(using Context): Boolean = - (Properties.unstableExperimentalEnabled && !ctx.settings.YnoExperimental.value) || ctx.settings.experimental.value + (Properties.unstableExperimentalEnabled && !ctx.settings.YnoExperimental.value) || + ctx.settings.experimental.value || + experimentalAutoEnableFeatures.exists(enabled) + + def isExperimentalEnabledBySetting(using Context): Boolean = + (Properties.unstableExperimentalEnabled && !ctx.settings.YnoExperimental.value) || + ctx.settings.experimental.value || + experimentalAutoEnableFeatures.exists(enabledBySetting) + + def isExperimentalEnabledByImport(using Context): Boolean = + experimentalAutoEnableFeatures.exists(enabledByImport) /** Handle language import `import language..` if it is one * of the global imports `pureFunctions` or `captureChecking`. In this case diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index e813786068a5..675084ec230b 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -2008,6 +2008,13 @@ class Definitions { CapabilityAnnot, RequiresCapabilityAnnot, RetainsAnnot, RetainsCapAnnot, RetainsByNameAnnot) + /** Experimental language features defined in `scala.runtime.stdLibPatches.language.experimental`. + * + * This list does not include `scala.language.experimental.macros`. + */ + @tu lazy val languageExperimentalFeatures: List[TermSymbol] = + LanguageExperimentalModule.moduleClass.info.decls.toList.filter(_.isAllOf(Lazy | Module)).map(_.asTerm) + // ----- primitive value class machinery ------------------------------------------ class PerRun[T](generate: Context ?=> T) { diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index e809c0fad463..3d189567a5cb 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -546,8 +546,8 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } override def transformStats[T](trees: List[Tree], exprOwner: Symbol, wrapResult: List[Tree] => Context ?=> T)(using Context): T = - try super.transformStats(trees, exprOwner, wrapResult) - finally Checking.checkExperimentalImports(trees) + Checking.checkAndAdaptExperimentalImports(trees) + super.transformStats(trees, exprOwner, wrapResult) /** Transforms the rhs tree into a its default tree if it is in an `erased` val/def. * Performed to shrink the tree that is known to be erased later. @@ -590,8 +590,10 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => (sym.owner.is(Package) || (sym.owner.isPackageObject && !sym.isConstructor)) if sym.is(Module) then ExperimentalAnnotation.copy(sym.companionClass).foreach(sym.addAnnotation) - if !sym.hasAnnotation(defn.ExperimentalAnnot) && ctx.settings.experimental.value && isTopLevelDefinitionInSource(sym) then - sym.addAnnotation(ExperimentalAnnotation("Added by -experimental", sym.span)) + if !sym.hasAnnotation(defn.ExperimentalAnnot) + && Feature.isExperimentalEnabledBySetting && isTopLevelDefinitionInSource(sym) + then + sym.addAnnotation(ExperimentalAnnotation("Added by -experimental or -language:experimental.*", sym.span)) // It needs to run at the phase of the postTyper --- otherwise, the test of the symbols will use // the transformed denotation with added `Serializable` and `AbstractFunction1`. diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index bfbcd4358853..9aa80311d853 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -42,6 +42,7 @@ import cc.{isCaptureChecking, isRetainsLike} import collection.mutable import reporting.* +import Annotations.ExperimentalAnnotation object Checking { import tpd.* @@ -797,50 +798,53 @@ object Checking { tree /** Check that experimental language imports in `trees` - * are done only in experimental scopes, or in a top-level - * scope with only @experimental definitions. + * are done only in experimental scopes. For for top-level + * experimental imports, all top-level definitions are transformed + * to @experimental definitions. + * */ - def checkExperimentalImports(trees: List[Tree])(using Context): Unit = - - def nonExperimentalStat(trees: List[Tree]): Tree = trees match - case (_: Import | EmptyTree) :: rest => - nonExperimentalStat(rest) + def checkAndAdaptExperimentalImports(trees: List[Tree])(using Context): Unit = + def nonExperimentalStats(trees: List[Tree]): List[Tree] = trees match + case (_: ImportOrExport | EmptyTree) :: rest => + nonExperimentalStats(rest) case (tree @ TypeDef(_, impl: Template)) :: rest if tree.symbol.isPackageObject => - nonExperimentalStat(impl.body).orElse(nonExperimentalStat(rest)) + nonExperimentalStats(impl.body) ::: nonExperimentalStats(rest) case (tree: PackageDef) :: rest => - nonExperimentalStat(tree.stats).orElse(nonExperimentalStat(rest)) + nonExperimentalStats(tree.stats) ::: nonExperimentalStats(rest) case (tree: MemberDef) :: rest => if tree.symbol.isExperimental || tree.symbol.is(Synthetic) then - nonExperimentalStat(rest) + nonExperimentalStats(rest) else - tree + tree :: nonExperimentalStats(rest) case tree :: rest => - tree + tree :: nonExperimentalStats(rest) case Nil => - EmptyTree + Nil for case imp @ Import(qual, selectors) <- trees do def isAllowedImport(sel: untpd.ImportSelector) = val name = Feature.experimental(sel.name) name == Feature.scala2macros - || name == Feature.erasedDefinitions || name == Feature.captureChecking languageImport(qual) match case Some(nme.experimental) if !ctx.owner.isInExperimentalScope && !selectors.forall(isAllowedImport) => - def check(stable: => String) = - Feature.checkExperimentalFeature("features", imp.srcPos, - s"\n\nNote: the scope enclosing the import is not considered experimental because it contains the\nnon-experimental $stable") - if ctx.owner.is(Package) then - // allow top-level experimental imports if all definitions are @experimental - nonExperimentalStat(trees) match - case EmptyTree => - case tree: MemberDef => check(i"${tree.symbol}") - case tree => check(i"expression ${tree}") - else Feature.checkExperimentalFeature("features", imp.srcPos) + if ctx.owner.is(Package) || ctx.owner.name.startsWith(str.REPL_SESSION_LINE) then + // mark all top-level definitions as @experimental + for tree <- nonExperimentalStats(trees) do + tree match + case tree: MemberDef => + // TODO move this out of checking (into posttyper?) + val sym = tree.symbol + if !sym.isExperimental then + sym.addAnnotation(ExperimentalAnnotation(i"Added by top level $imp", sym.span)) + case tree => + // There is no definition to attach the @experimental annotation + report.error("Implementation restriction: top-level `val _ = ...` is not supported with experimental language imports.", tree.srcPos) + else Feature.checkExperimentalFeature("feature local import", imp.srcPos) case _ => - end checkExperimentalImports + end checkAndAdaptExperimentalImports /** Checks that PolyFunction only have valid refinements. * diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 790ffb2ad343..5ce1b02733d0 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -132,12 +132,14 @@ class CrossVersionChecks extends MiniPhase: } override def transformOther(tree: Tree)(using Context): Tree = - tree.foreachSubTree { // Find references in type trees and imports - case tree: Ident => transformIdent(tree) - case tree: Select => transformSelect(tree) - case tree: TypeTree => transformTypeTree(tree) - case _ => - } + val inPackage = ctx.owner.is(Package) || ctx.owner.isPackageObject + if !(inPackage && tree.isInstanceOf[ImportOrExport] && Feature.isExperimentalEnabledByImport) then + tree.foreachSubTree { // Find references in type trees and imports + case tree: Ident => transformIdent(tree) + case tree: Select => transformSelect(tree) + case tree: TypeTree => transformTypeTree(tree) + case _ => + } tree end CrossVersionChecks diff --git a/docs/_docs/reference/other-new-features/experimental-defs.md b/docs/_docs/reference/other-new-features/experimental-defs.md index b71b20ecc036..4741bb31fde2 100644 --- a/docs/_docs/reference/other-new-features/experimental-defs.md +++ b/docs/_docs/reference/other-new-features/experimental-defs.md @@ -268,6 +268,8 @@ Experimental definitions can only be referenced in an experimental scope. Experi 6. Any code compiled using a [_Nightly_](https://search.maven.org/artifact/org.scala-lang/scala3-compiler_3) or _Snapshot_ version of the compiler is considered to be in an experimental scope. Can use the `-Yno-experimental` compiler flag to disable it and run as a proper release. +7. An experimental language feature is imported in at the package level. All top-level definitions will be marked as `@experimental`. + In any other situation, a reference to an experimental definition will cause a compilation error. ## Experimental overriding diff --git a/tests/neg-macros/i18677-a.check b/tests/neg-macros/i18677-a.check index d190ce36318a..07f0a66cec81 100644 --- a/tests/neg-macros/i18677-a.check +++ b/tests/neg-macros/i18677-a.check @@ -7,10 +7,10 @@ |The tree does not conform to the compiler's tree invariants. | |Macro was: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental or -language:experimental.*") @extendFoo class AFoo() | |The macro returned: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() extends Foo + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental or -language:experimental.*") @extendFoo class AFoo() extends Foo | |Error: |assertion failed: Parents of class symbol differs from the parents in the tree for class AFoo diff --git a/tests/neg-macros/i18677-b.check b/tests/neg-macros/i18677-b.check index eca2bdcde726..28cbc0521c30 100644 --- a/tests/neg-macros/i18677-b.check +++ b/tests/neg-macros/i18677-b.check @@ -7,10 +7,10 @@ |The tree does not conform to the compiler's tree invariants. | |Macro was: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental or -language:experimental.*") @extendFoo class AFoo() | |The macro returned: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() extends Foo + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental or -language:experimental.*") @extendFoo class AFoo() extends Foo | |Error: |assertion failed: Parents of class symbol differs from the parents in the tree for class AFoo diff --git a/tests/neg/expeimental-flag-with-lang-feature-1.scala b/tests/neg/expeimental-flag-with-lang-feature-1.scala deleted file mode 100644 index a5ece729fa3d..000000000000 --- a/tests/neg/expeimental-flag-with-lang-feature-1.scala +++ /dev/null @@ -1,5 +0,0 @@ -//> using options -Yno-experimental - -import scala.language.experimental.erasedDefinitions - -erased def erasedFun(erased x: Int): Int = x // error // error diff --git a/tests/neg/experimental-erased.scala b/tests/neg/experimental-erased.scala deleted file mode 100644 index 3619d0059a95..000000000000 --- a/tests/neg/experimental-erased.scala +++ /dev/null @@ -1,11 +0,0 @@ -//> using options -Yno-experimental - -import language.experimental.erasedDefinitions -import annotation.experimental - -@experimental -erased class CanThrow[-E <: Exception] - -erased class CanThrow2[-E <: Exception] // error - -def other = 1 diff --git a/tests/neg/experimental-import-with-top-level-val-underscore.check b/tests/neg/experimental-import-with-top-level-val-underscore.check new file mode 100644 index 000000000000..7d595949cdeb --- /dev/null +++ b/tests/neg/experimental-import-with-top-level-val-underscore.check @@ -0,0 +1,6 @@ +-- Error: tests/neg/experimental-import-with-top-level-val-underscore.scala:4:4 ---------------------------------------- +4 |val _ = // error + | ^ + | Implementation restriction: top-level `val _ = ...` is not supported with experimental language imports. +5 | println("Hello, world!") +6 | 42 diff --git a/tests/neg/experimental-import-with-top-level-val-underscore.scala b/tests/neg/experimental-import-with-top-level-val-underscore.scala new file mode 100644 index 000000000000..69db6892b5ee --- /dev/null +++ b/tests/neg/experimental-import-with-top-level-val-underscore.scala @@ -0,0 +1,6 @@ + +import language.experimental.erasedDefinitions + +val _ = // error + println("Hello, world!") + 42 diff --git a/tests/neg/experimental-imports.scala b/tests/neg/experimental-imports.scala index 3a672ac65a22..9b79fbabcd15 100644 --- a/tests/neg/experimental-imports.scala +++ b/tests/neg/experimental-imports.scala @@ -14,7 +14,7 @@ object Object2: import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions + import language.experimental.erasedDefinitions // error erased def f = 1 @experimental @@ -29,7 +29,7 @@ object Class2: import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions + import language.experimental.erasedDefinitions // error erased def f = 1 @experimental @@ -44,5 +44,5 @@ def fun2 = import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions + import language.experimental.erasedDefinitions // error erased def f = 1 diff --git a/tests/neg/experimental-message-experimental-flag.check b/tests/neg/experimental-message-experimental-flag.check index ce3a85a89916..942d9195d3e6 100644 --- a/tests/neg/experimental-message-experimental-flag.check +++ b/tests/neg/experimental-message-experimental-flag.check @@ -2,9 +2,10 @@ -- Error: tests/neg/experimental-message-experimental-flag/Test_2.scala:3:10 ------------------------------------------- 3 |def g() = f() // error | ^ - | method f is marked @experimental: Added by -experimental + | method f is marked @experimental: Added by -experimental or -language:experimental.* | | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or - | 2. compiling with the -experimental compiler flag, or - | 3. with a nightly or snapshot version of the compiler. + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag, or + | 4. with a nightly or snapshot version of the compiler. diff --git a/tests/neg/experimental-message.check b/tests/neg/experimental-message.check index d57fe58f27cf..1850e64bc156 100644 --- a/tests/neg/experimental-message.check +++ b/tests/neg/experimental-message.check @@ -5,8 +5,9 @@ | | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or - | 2. compiling with the -experimental compiler flag, or - | 3. with a nightly or snapshot version of the compiler. + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag, or + | 4. with a nightly or snapshot version of the compiler. -- Error: tests/neg/experimental-message.scala:16:2 -------------------------------------------------------------------- 16 | f2() // error | ^^ @@ -14,8 +15,9 @@ | | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or - | 2. compiling with the -experimental compiler flag, or - | 3. with a nightly or snapshot version of the compiler. + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag, or + | 4. with a nightly or snapshot version of the compiler. -- Error: tests/neg/experimental-message.scala:17:2 -------------------------------------------------------------------- 17 | f3() // error | ^^ @@ -23,5 +25,6 @@ | | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or - | 2. compiling with the -experimental compiler flag, or - | 3. with a nightly or snapshot version of the compiler. + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag, or + | 4. with a nightly or snapshot version of the compiler. diff --git a/tests/neg/experimental-nested-imports-2.scala b/tests/neg/experimental-nested-imports-2.scala index 4aac719a81d6..9c99e0729a40 100644 --- a/tests/neg/experimental-nested-imports-2.scala +++ b/tests/neg/experimental-nested-imports-2.scala @@ -5,27 +5,27 @@ import annotation.experimental class Class1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def g = 1 object Object1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def g = 1 def fun1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def g = 1 val value1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def g = 1 \ No newline at end of file diff --git a/tests/neg/experimental-nested-imports-3.scala b/tests/neg/experimental-nested-imports-3.scala index 39b548b2586b..f6f4a1360e6b 100644 --- a/tests/neg/experimental-nested-imports-3.scala +++ b/tests/neg/experimental-nested-imports-3.scala @@ -5,19 +5,19 @@ import annotation.experimental class Class1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error object Object1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error def fun1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error val value1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error diff --git a/tests/neg/experimental-nested-imports.scala b/tests/neg/experimental-nested-imports.scala index 91fe3bfeb27b..417f4bf4d9c5 100644 --- a/tests/neg/experimental-nested-imports.scala +++ b/tests/neg/experimental-nested-imports.scala @@ -5,23 +5,23 @@ import annotation.experimental class Class1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 object Object1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def fun1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 val value1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 diff --git a/tests/neg/experimental.scala b/tests/neg/experimental.scala index efca9a26ec14..22f4b5a1a852 100644 --- a/tests/neg/experimental.scala +++ b/tests/neg/experimental.scala @@ -9,7 +9,7 @@ class Test0 { } class Test1 { - import scala.language.experimental.erasedDefinitions + import scala.language.experimental.erasedDefinitions // error import scala.compiletime.erasedValue type UnivEq[A] object UnivEq: diff --git a/tests/neg/experimentalErased.scala b/tests/neg/experimentalErased.scala deleted file mode 100644 index 99de8048c261..000000000000 --- a/tests/neg/experimentalErased.scala +++ /dev/null @@ -1,24 +0,0 @@ -//> using options -Yno-experimental - -import language.experimental.erasedDefinitions -import annotation.experimental - -@experimental -erased class Foo - -erased class Bar // error - -@experimental -erased def foo = 2 - -erased def bar = 2 // error - -@experimental -erased val foo2 = 2 - -erased val bar2 = 2 // error - -@experimental -def foo3(erased a: Int) = 2 - -def bar3(erased a: Int) = 2 // error diff --git a/tests/neg/experimentalOverloads.scala b/tests/neg/experimentalOverloads.scala index 7adaf0b78840..570dcc7c79ce 100644 --- a/tests/neg/experimentalOverloads.scala +++ b/tests/neg/experimentalOverloads.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental trait A: diff --git a/tests/neg/i13091.check b/tests/neg/i13091.check new file mode 100644 index 000000000000..0e2a61e4b429 --- /dev/null +++ b/tests/neg/i13091.check @@ -0,0 +1,16 @@ +-- [E190] Potential Issue Warning: tests/neg/i13091.scala:7:17 --------------------------------------------------------- +7 |def test: Unit = new Foo // error: class Foo is marked @experimental ... + | ^^^^^^^ + | Discarded non-Unit value of type Foo. You may want to use `()`. + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/i13091.scala:7:21 ---------------------------------------------------------------------------------- +7 |def test: Unit = new Foo // error: class Foo is marked @experimental ... + | ^^^ + | class Foo is marked @experimental + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag, or + | 4. with a nightly or snapshot version of the compiler. diff --git a/tests/neg/use-experimental-def.check b/tests/neg/use-experimental-def.check index cb8fc1402b69..eb04a3d9d1d8 100644 --- a/tests/neg/use-experimental-def.check +++ b/tests/neg/use-experimental-def.check @@ -5,5 +5,6 @@ | | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or - | 2. compiling with the -experimental compiler flag, or - | 3. with a nightly or snapshot version of the compiler. + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag, or + | 4. with a nightly or snapshot version of the compiler. diff --git a/tests/pos-custom-args/captures/try.scala b/tests/pos-custom-args/captures/try.scala index 05c41be69001..8ecea9e00359 100644 --- a/tests/pos-custom-args/captures/try.scala +++ b/tests/pos-custom-args/captures/try.scala @@ -18,7 +18,7 @@ def handle[E <: Exception, R](op: (erased CanThrow[E]) -> R)(handler: E -> R): R try op(x) catch case ex: E => handler(ex) -val _ = handle { (erased x) => +val bar = handle { (erased x) => if true then raise(new Exception)(using x) 22 diff --git a/tests/neg/expeimental-flag-with-lang-feature-2.scala b/tests/pos/expeimental-flag-with-lang-feature-2.scala similarity index 71% rename from tests/neg/expeimental-flag-with-lang-feature-2.scala rename to tests/pos/expeimental-flag-with-lang-feature-2.scala index 3e0b9359711a..77f35f663576 100644 --- a/tests/neg/expeimental-flag-with-lang-feature-2.scala +++ b/tests/pos/expeimental-flag-with-lang-feature-2.scala @@ -1,6 +1,6 @@ //> using options -Yno-experimental -import scala.language.experimental.namedTypeArguments // error +import scala.language.experimental.namedTypeArguments def namedTypeArgumentsFun[T, U]: Int = namedTypeArgumentsFun[T = Int, U = Int] diff --git a/tests/neg/experimental-package-imports.scala b/tests/pos/experimental-package-imports.scala similarity index 62% rename from tests/neg/experimental-package-imports.scala rename to tests/pos/experimental-package-imports.scala index 7a4b04606b9d..45a0905ac311 100644 --- a/tests/neg/experimental-package-imports.scala +++ b/tests/pos/experimental-package-imports.scala @@ -3,12 +3,12 @@ import annotation.experimental package foo { - import language.experimental.namedTypeArguments // error - import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.namedTypeArguments + import language.experimental.genericNumberLiterals + import language.experimental.erasedDefinitions package bar { - def foo = 1 + def foo = 1 // marked as @experimental because of the language imports } } diff --git a/tests/pos/interleaving-chainedParams.scala b/tests/pos/interleaving-chainedParams.scala index e502888d97c8..a54885d28002 100644 --- a/tests/pos/interleaving-chainedParams.scala +++ b/tests/pos/interleaving-chainedParams.scala @@ -5,7 +5,7 @@ object chainedParams{ trait Chain{ type Tail <: Chain } - + def f[C1 <: Chain](c1: C1)[C2 <: c1.Tail](c2: C2)[C3 <: c2.Tail](c3: C3): c3.Tail = ??? val self = new Chain{ type Tail = this.type } diff --git a/tests/run-macros/term-show.check b/tests/run-macros/term-show.check index 91ba0308e3db..9733d2ad211b 100644 --- a/tests/run-macros/term-show.check +++ b/tests/run-macros/term-show.check @@ -10,7 +10,7 @@ } () } -@scala.annotation.internal.SourceFile("tests/run-macros/term-show/Test_2.scala") trait A() extends java.lang.Object { +@scala.annotation.internal.SourceFile("tests/run-macros/term-show/Test_2.scala") @scala.annotation.experimental("Added by top level import scala.language.experimental.erasedDefinitions") trait A() extends java.lang.Object { def imp(x: scala.Int)(implicit str: scala.Predef.String): scala.Int def use(`x₂`: scala.Int)(using `str₂`: scala.Predef.String): scala.Int def era(`x₃`: scala.Int)(erased `str₃`: scala.Predef.String): scala.Int From ee8277d4c101c7c18da7e47db612d4072341eaf9 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 8 Mar 2024 10:11:03 +0100 Subject: [PATCH 217/465] Disable experimental by default in unstable builds --- compiler/src/dotty/tools/dotc/config/Feature.scala | 5 +---- compiler/src/dotty/tools/dotc/config/Properties.scala | 6 ------ compiler/src/dotty/tools/dotc/config/ScalaSettings.scala | 2 +- .../dotty/tools/backend/jvm/PublicInBinaryTests.scala | 1 - .../reference/other-new-features/experimental-defs.md | 5 +---- project/Build.scala | 3 +++ scaladoc-testcases/src/tests/hugetype.scala | 2 +- .../src/tests/methodsAndConstructors.scala | 4 ++-- .../dotty/tools/scaladoc/snippets/SnippetCompiler.scala | 1 - tests/neg/experimental-message-experimental-flag.check | 3 +-- tests/neg/experimental-message.check | 9 +++------ tests/neg/i13091.check | 3 +-- tests/neg/overrides.scala | 2 ++ tests/neg/use-experimental-def.check | 3 +-- tests/pos-custom-args/captures/curried-closures.scala | 2 ++ .../annot-dependency-between-modules/Test_2.scala | 1 + tests/pos-macros/erasedArgs/Test_2.scala | 2 ++ .../pos-macros/macro-annot-with-companion/Macro_1.scala | 2 ++ tests/pos-macros/macro-annot-with-companion/Test_2.scala | 1 + tests/pos/TupleReverse.scala | 2 ++ .../{expeimental-flag.scala => experimental-flag.scala} | 0 tests/pos/i13091.scala | 3 --- tests/pos/overrides.scala | 4 +++- tests/pos/phantom-Eq2/Phantom-Eq_2.scala | 1 + tests/run-macros/i17105/Test_3.scala | 2 ++ tests/run-macros/macro-erased/Test_2.scala | 2 ++ tests/run/interleaving.scala | 3 ++- tests/run/publicInBinary/Test_2.scala | 1 + 28 files changed, 38 insertions(+), 37 deletions(-) rename tests/pos/{expeimental-flag.scala => experimental-flag.scala} (100%) delete mode 100644 tests/pos/i13091.scala diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index f4c76d42b25f..e1c137377c2b 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -163,17 +163,14 @@ object Feature: s"""Experimental $which may only be used under experimental mode: | 1. in a definition marked as @experimental, or | 2. an experimental feature is imported at the package level, or - | 3. compiling with the -experimental compiler flag, or - | 4. with a nightly or snapshot version of the compiler. + | 3. compiling with the -experimental compiler flag. |""".stripMargin def isExperimentalEnabled(using Context): Boolean = - (Properties.unstableExperimentalEnabled && !ctx.settings.YnoExperimental.value) || ctx.settings.experimental.value || experimentalAutoEnableFeatures.exists(enabled) def isExperimentalEnabledBySetting(using Context): Boolean = - (Properties.unstableExperimentalEnabled && !ctx.settings.YnoExperimental.value) || ctx.settings.experimental.value || experimentalAutoEnableFeatures.exists(enabledBySetting) diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala index 3392882057e7..2a362a707ade 100644 --- a/compiler/src/dotty/tools/dotc/config/Properties.scala +++ b/compiler/src/dotty/tools/dotc/config/Properties.scala @@ -84,12 +84,6 @@ trait PropertiesTrait { */ val versionString: String = "version " + simpleVersionString - /** Whether the current version of compiler is experimental - * - * Snapshot, nightly releases and non-bootstrapped compiler are experimental. - */ - val unstableExperimentalEnabled: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") - /** Whether the current version of compiler supports research plugins. */ val researchPluginEnabled: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 6520546ec0f9..e245c8ff10e7 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -406,7 +406,7 @@ private sealed trait YSettings: val YretainTrees: Setting[Boolean] = BooleanSetting(ForkSetting, "Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") val YshowTreeIds: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting(ForkSetting, "Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty.") - val YnoExperimental: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-experimental", "Disable experimental language features by default in NIGHTLY/SNAPSHOT versions of the compiler.") + val YnoExperimental: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-experimental", "Disable experimental language features by default in NIGHTLY/SNAPSHOT versions of the compiler (deprecated, no-op).") val YlegacyLazyVals: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") val YcompileScala2Library: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles") diff --git a/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala b/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala index eebb2b23247a..a5463b75804e 100644 --- a/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala @@ -42,7 +42,6 @@ class PublicInBinaryTests extends DottyBytecodeTest { override def initCtx = val ctx0 = super.initCtx ctx0.setSetting(ctx0.settings.experimental, true) - ctx0.setSetting(ctx0.settings.YnoExperimental, true) @Test def publicInBinaryDef(): Unit = { diff --git a/docs/_docs/reference/other-new-features/experimental-defs.md b/docs/_docs/reference/other-new-features/experimental-defs.md index 4741bb31fde2..06cc8860281c 100644 --- a/docs/_docs/reference/other-new-features/experimental-defs.md +++ b/docs/_docs/reference/other-new-features/experimental-defs.md @@ -265,10 +265,7 @@ Experimental definitions can only be referenced in an experimental scope. Experi -6. Any code compiled using a [_Nightly_](https://search.maven.org/artifact/org.scala-lang/scala3-compiler_3) or _Snapshot_ version of the compiler is considered to be in an experimental scope. -Can use the `-Yno-experimental` compiler flag to disable it and run as a proper release. - -7. An experimental language feature is imported in at the package level. All top-level definitions will be marked as `@experimental`. +6. An experimental language feature is imported in at the package level. All top-level definitions will be marked as `@experimental`. In any other situation, a reference to an experimental definition will cause a compilation error. diff --git a/project/Build.scala b/project/Build.scala index dcf6ec13760d..b2c88d1d6ae0 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1757,6 +1757,9 @@ object Build { SourceLinksIntegrationTest / scalaSource := baseDirectory.value / "test-source-links", SourceLinksIntegrationTest / test:= ((SourceLinksIntegrationTest / test) dependsOn generateScalaDocumentation.toTask("")).value, ). + settings( + scalacOptions += "-experimental" // workaround use of experimental .info in Scaladoc2AnchorCreator + ). settings( Compile / resourceGenerators ++= Seq( generateStaticAssetsTask.taskValue, diff --git a/scaladoc-testcases/src/tests/hugetype.scala b/scaladoc-testcases/src/tests/hugetype.scala index fe1905cb87cc..3445764e2462 100644 --- a/scaladoc-testcases/src/tests/hugetype.scala +++ b/scaladoc-testcases/src/tests/hugetype.scala @@ -31,7 +31,7 @@ trait E: @deprecated protected implicit def same[A](a: A): A -trait XD extends E: +@experimental trait XD extends E: /** * Some important information :o * diff --git a/scaladoc-testcases/src/tests/methodsAndConstructors.scala b/scaladoc-testcases/src/tests/methodsAndConstructors.scala index 132d35035b30..cddd0f56e9fe 100644 --- a/scaladoc-testcases/src/tests/methodsAndConstructors.scala +++ b/scaladoc-testcases/src/tests/methodsAndConstructors.scala @@ -1,5 +1,7 @@ package tests.methodsAndConstructors +import scala.language.experimental.clauseInterleaving + class A class B extends A class C @@ -60,8 +62,6 @@ class Methods: def withImplicitParam2(v: String)(implicit ab: Double, a: Int, b: String): String = ??? - import scala.language.experimental.clauseInterleaving - def clauseInterleaving[T](x: T)[U](y: U)(using (T, U)): (T, U) = ??? diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala index b47b15676c57..2d8ca15d9c4f 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala @@ -27,7 +27,6 @@ class SnippetCompiler( object SnippetDriver extends Driver: val currentCtx = val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions).addMode(Mode.Interactive) - rootCtx.setSetting(rootCtx.settings.YnoExperimental, true) rootCtx.setSetting(rootCtx.settings.experimental, true) rootCtx.setSetting(rootCtx.settings.YretainTrees, true) rootCtx.setSetting(rootCtx.settings.YcookComments, true) diff --git a/tests/neg/experimental-message-experimental-flag.check b/tests/neg/experimental-message-experimental-flag.check index 942d9195d3e6..8d2fdb340c75 100644 --- a/tests/neg/experimental-message-experimental-flag.check +++ b/tests/neg/experimental-message-experimental-flag.check @@ -7,5 +7,4 @@ | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or | 2. an experimental feature is imported at the package level, or - | 3. compiling with the -experimental compiler flag, or - | 4. with a nightly or snapshot version of the compiler. + | 3. compiling with the -experimental compiler flag. diff --git a/tests/neg/experimental-message.check b/tests/neg/experimental-message.check index 1850e64bc156..ea26a6c50b51 100644 --- a/tests/neg/experimental-message.check +++ b/tests/neg/experimental-message.check @@ -6,8 +6,7 @@ | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or | 2. an experimental feature is imported at the package level, or - | 3. compiling with the -experimental compiler flag, or - | 4. with a nightly or snapshot version of the compiler. + | 3. compiling with the -experimental compiler flag. -- Error: tests/neg/experimental-message.scala:16:2 -------------------------------------------------------------------- 16 | f2() // error | ^^ @@ -16,8 +15,7 @@ | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or | 2. an experimental feature is imported at the package level, or - | 3. compiling with the -experimental compiler flag, or - | 4. with a nightly or snapshot version of the compiler. + | 3. compiling with the -experimental compiler flag. -- Error: tests/neg/experimental-message.scala:17:2 -------------------------------------------------------------------- 17 | f3() // error | ^^ @@ -26,5 +24,4 @@ | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or | 2. an experimental feature is imported at the package level, or - | 3. compiling with the -experimental compiler flag, or - | 4. with a nightly or snapshot version of the compiler. + | 3. compiling with the -experimental compiler flag. diff --git a/tests/neg/i13091.check b/tests/neg/i13091.check index 0e2a61e4b429..5cd793a9cfcb 100644 --- a/tests/neg/i13091.check +++ b/tests/neg/i13091.check @@ -12,5 +12,4 @@ | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or | 2. an experimental feature is imported at the package level, or - | 3. compiling with the -experimental compiler flag, or - | 4. with a nightly or snapshot version of the compiler. + | 3. compiling with the -experimental compiler flag. diff --git a/tests/neg/overrides.scala b/tests/neg/overrides.scala index ff83b91d26be..8016f5646d09 100644 --- a/tests/neg/overrides.scala +++ b/tests/neg/overrides.scala @@ -1,3 +1,5 @@ +//> using options -experimental + class Foo { type A = Int type B >: Int <: Int diff --git a/tests/neg/use-experimental-def.check b/tests/neg/use-experimental-def.check index eb04a3d9d1d8..a3d9d93f8a7a 100644 --- a/tests/neg/use-experimental-def.check +++ b/tests/neg/use-experimental-def.check @@ -6,5 +6,4 @@ | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or | 2. an experimental feature is imported at the package level, or - | 3. compiling with the -experimental compiler flag, or - | 4. with a nightly or snapshot version of the compiler. + | 3. compiling with the -experimental compiler flag. diff --git a/tests/pos-custom-args/captures/curried-closures.scala b/tests/pos-custom-args/captures/curried-closures.scala index baea8b15075c..0ad729375b3c 100644 --- a/tests/pos-custom-args/captures/curried-closures.scala +++ b/tests/pos-custom-args/captures/curried-closures.scala @@ -1,3 +1,5 @@ +//> using options -experimental + object Test: def map2(xs: List[Int])(f: Int => Int): List[Int] = xs.map(f) val f1 = map2 diff --git a/tests/pos-macros/annot-dependency-between-modules/Test_2.scala b/tests/pos-macros/annot-dependency-between-modules/Test_2.scala index f809330af52d..a3c9d1e86f06 100644 --- a/tests/pos-macros/annot-dependency-between-modules/Test_2.scala +++ b/tests/pos-macros/annot-dependency-between-modules/Test_2.scala @@ -1,3 +1,4 @@ +//> using options -experimental @void @void class Foo \ No newline at end of file diff --git a/tests/pos-macros/erasedArgs/Test_2.scala b/tests/pos-macros/erasedArgs/Test_2.scala index 19f0364d3f71..56a7fe3383fe 100644 --- a/tests/pos-macros/erasedArgs/Test_2.scala +++ b/tests/pos-macros/erasedArgs/Test_2.scala @@ -1 +1,3 @@ +//> using options -experimental + def test: "1abc" = mcr diff --git a/tests/pos-macros/macro-annot-with-companion/Macro_1.scala b/tests/pos-macros/macro-annot-with-companion/Macro_1.scala index 386f5fbd09d5..29b76a47be80 100644 --- a/tests/pos-macros/macro-annot-with-companion/Macro_1.scala +++ b/tests/pos-macros/macro-annot-with-companion/Macro_1.scala @@ -1,3 +1,5 @@ +//> using options -experimental + import scala.annotation.MacroAnnotation import scala.quoted.* diff --git a/tests/pos-macros/macro-annot-with-companion/Test_2.scala b/tests/pos-macros/macro-annot-with-companion/Test_2.scala index ddf73334389b..4ae6d05fcdcf 100644 --- a/tests/pos-macros/macro-annot-with-companion/Test_2.scala +++ b/tests/pos-macros/macro-annot-with-companion/Test_2.scala @@ -1,3 +1,4 @@ +//> using options -experimental @transform class Foo diff --git a/tests/pos/TupleReverse.scala b/tests/pos/TupleReverse.scala index a5ef2ed69f0c..6d70e6759e19 100644 --- a/tests/pos/TupleReverse.scala +++ b/tests/pos/TupleReverse.scala @@ -1,3 +1,5 @@ +//> using options -experimental + import scala.Tuple.* def test[T1, T2, T3, T4] = summon[Reverse[EmptyTuple] =:= EmptyTuple] diff --git a/tests/pos/expeimental-flag.scala b/tests/pos/experimental-flag.scala similarity index 100% rename from tests/pos/expeimental-flag.scala rename to tests/pos/experimental-flag.scala diff --git a/tests/pos/i13091.scala b/tests/pos/i13091.scala deleted file mode 100644 index fa255cd6c08f..000000000000 --- a/tests/pos/i13091.scala +++ /dev/null @@ -1,3 +0,0 @@ -import annotation.experimental -@experimental class Foo -val foo = new Foo diff --git a/tests/pos/overrides.scala b/tests/pos/overrides.scala index 146dc06c76a9..c3b6235d7c1f 100644 --- a/tests/pos/overrides.scala +++ b/tests/pos/overrides.scala @@ -1,7 +1,9 @@ +//> using options -experimental + class A[T] { def f(x: T)(y: T = x) = y - + import scala.language.experimental.clauseInterleaving def b[U <: T](x: Int)[V >: T](y: String) = false diff --git a/tests/pos/phantom-Eq2/Phantom-Eq_2.scala b/tests/pos/phantom-Eq2/Phantom-Eq_2.scala index 87c6cc2275f1..f1535049a514 100644 --- a/tests/pos/phantom-Eq2/Phantom-Eq_2.scala +++ b/tests/pos/phantom-Eq2/Phantom-Eq_2.scala @@ -1,3 +1,4 @@ +//> using options -experimental /* This is a version of ../pos/phantom.scala that tests phantom clases with separate compilation */ object PhantomEq { diff --git a/tests/run-macros/i17105/Test_3.scala b/tests/run-macros/i17105/Test_3.scala index c19ac507e1a4..a3503c5ed8f2 100644 --- a/tests/run-macros/i17105/Test_3.scala +++ b/tests/run-macros/i17105/Test_3.scala @@ -1,3 +1,5 @@ +//> using options -experimental + import reflect.Selectable.reflectiveSelectable class Hoe { def f(x: Int): String = s"Hoe got ${x}" } diff --git a/tests/run-macros/macro-erased/Test_2.scala b/tests/run-macros/macro-erased/Test_2.scala index 1f7f8be436c7..880099021609 100644 --- a/tests/run-macros/macro-erased/Test_2.scala +++ b/tests/run-macros/macro-erased/Test_2.scala @@ -1,3 +1,5 @@ +//> using options -experimental + object Test { def main(args: Array[String]): Unit = { assert(Macro.foo1(1) == 0) diff --git a/tests/run/interleaving.scala b/tests/run/interleaving.scala index 557741032e8a..6749e59168bc 100644 --- a/tests/run/interleaving.scala +++ b/tests/run/interleaving.scala @@ -1,5 +1,6 @@ +import scala.language.experimental.clauseInterleaving + object Test extends App { - import scala.language.experimental.clauseInterleaving trait Key { type Value } trait DB { def getOrElse(k: Key)[V >: k.Value](default: V): V // dependent type parameter diff --git a/tests/run/publicInBinary/Test_2.scala b/tests/run/publicInBinary/Test_2.scala index 3c3e89419057..26829d32653a 100644 --- a/tests/run/publicInBinary/Test_2.scala +++ b/tests/run/publicInBinary/Test_2.scala @@ -1,3 +1,4 @@ +//> using options -experimental import foo.* @main def Test: Unit = From b92e62dde6951d6dc6af810639993e239c104ecd Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 17 Apr 2024 11:53:38 +0200 Subject: [PATCH 218/465] Optimize checkAndAdaptExperimentalImports to avoid O(n^2) behavior --- .../src/dotty/tools/dotc/typer/Checking.scala | 44 ++++++++++++------- 1 file changed, 27 insertions(+), 17 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 9aa80311d853..a88274443271 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -821,29 +821,39 @@ object Checking { case Nil => Nil - for case imp @ Import(qual, selectors) <- trees do + def unitExperimentalLanguageImports = def isAllowedImport(sel: untpd.ImportSelector) = val name = Feature.experimental(sel.name) name == Feature.scala2macros || name == Feature.captureChecking + trees.filter { + case Import(qual, selectors) => + languageImport(qual) match + case Some(nme.experimental) => + !selectors.forall(isAllowedImport) && !ctx.owner.isInExperimentalScope + case _ => false + case _ => false + } - languageImport(qual) match - case Some(nme.experimental) - if !ctx.owner.isInExperimentalScope && !selectors.forall(isAllowedImport) => - if ctx.owner.is(Package) || ctx.owner.name.startsWith(str.REPL_SESSION_LINE) then - // mark all top-level definitions as @experimental - for tree <- nonExperimentalStats(trees) do - tree match - case tree: MemberDef => - // TODO move this out of checking (into posttyper?) - val sym = tree.symbol - if !sym.isExperimental then - sym.addAnnotation(ExperimentalAnnotation(i"Added by top level $imp", sym.span)) - case tree => - // There is no definition to attach the @experimental annotation - report.error("Implementation restriction: top-level `val _ = ...` is not supported with experimental language imports.", tree.srcPos) - else Feature.checkExperimentalFeature("feature local import", imp.srcPos) + if ctx.owner.is(Package) || ctx.owner.name.startsWith(str.REPL_SESSION_LINE) then + unitExperimentalLanguageImports match + case imp :: _ => + // mark all top-level definitions as @experimental + for tree <- nonExperimentalStats(trees) do + tree match + case tree: MemberDef => + // TODO move this out of checking (into posttyper?) + val sym = tree.symbol + if !sym.isExperimental then + sym.addAnnotation(ExperimentalAnnotation(i"Added by top level $imp", sym.span)) + case tree => + // There is no definition to attach the @experimental annotation + report.error("Implementation restriction: top-level `val _ = ...` is not supported with experimental language imports.", tree.srcPos) case _ => + else + for imp <- unitExperimentalLanguageImports do + Feature.checkExperimentalFeature("feature local import", imp.srcPos) + end checkAndAdaptExperimentalImports /** Checks that PolyFunction only have valid refinements. From 1ebae879cf7775b06a55b71e28b47bc80ecf1355 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 17 Apr 2024 13:15:32 +0200 Subject: [PATCH 219/465] Refactor the way we mark experimental top-level definition Move similar logic from PostTyper into `checkAndAdaptExperimentalImports`. Also make the message of `@experimental` more precise for experimental language settings. --- .../src/dotty/tools/dotc/config/Feature.scala | 5 ++-- .../tools/dotc/transform/PostTyper.scala | 14 +++------- .../src/dotty/tools/dotc/typer/Checking.scala | 27 ++++++++++--------- tests/neg-macros/i18677-a.check | 4 +-- tests/neg-macros/i18677-b.check | 4 +-- ...import-with-top-level-val-underscore.check | 2 +- ...perimental-message-experimental-flag.check | 2 +- 7 files changed, 26 insertions(+), 32 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index e1c137377c2b..4852eaba9334 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -170,9 +170,8 @@ object Feature: ctx.settings.experimental.value || experimentalAutoEnableFeatures.exists(enabled) - def isExperimentalEnabledBySetting(using Context): Boolean = - ctx.settings.experimental.value || - experimentalAutoEnableFeatures.exists(enabledBySetting) + def experimentalEnabledByLanguageSetting(using Context): Option[TermName] = + experimentalAutoEnableFeatures.find(enabledBySetting) def isExperimentalEnabledByImport(using Context): Boolean = experimentalAutoEnableFeatures.exists(enabledByImport) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 3d189567a5cb..d107de31829f 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -417,7 +417,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => ) } case tree: ValDef => - annotateExperimental(tree.symbol) + annotateExperimentalCompanion(tree.symbol) registerIfHasMacroAnnotations(tree) checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) @@ -426,7 +426,6 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => checkStableSelection(tree.rhs) processValOrDefDef(super.transform(tree1)) case tree: DefDef => - annotateExperimental(tree.symbol) registerIfHasMacroAnnotations(tree) checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) @@ -438,7 +437,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => val sym = tree.symbol if (sym.isClass) VarianceChecker.check(tree) - annotateExperimental(sym) + annotateExperimentalCompanion(sym) checkMacroAnnotation(sym) if sym.isOneOf(GivenOrImplicit) then sym.keepAnnotationsCarrying(thisPhase, Set(defn.CompanionClassMetaAnnot), orNoneOf = defn.MetaAnnots) @@ -584,16 +583,9 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => else if tpe.derivesFrom(defn.NullClass) then report.error("`erased` definition cannot be implemented with en expression of type Null", tree.srcPos) - private def annotateExperimental(sym: Symbol)(using Context): Unit = - def isTopLevelDefinitionInSource(sym: Symbol) = - !sym.is(Package) && !sym.name.isPackageObjectName && - (sym.owner.is(Package) || (sym.owner.isPackageObject && !sym.isConstructor)) + private def annotateExperimentalCompanion(sym: Symbol)(using Context): Unit = if sym.is(Module) then ExperimentalAnnotation.copy(sym.companionClass).foreach(sym.addAnnotation) - if !sym.hasAnnotation(defn.ExperimentalAnnot) - && Feature.isExperimentalEnabledBySetting && isTopLevelDefinitionInSource(sym) - then - sym.addAnnotation(ExperimentalAnnotation("Added by -experimental or -language:experimental.*", sym.span)) // It needs to run at the phase of the postTyper --- otherwise, the test of the symbols will use // the transformed denotation with added `Serializable` and `AbstractFunction1`. diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index a88274443271..2b58039d1972 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -836,20 +836,23 @@ object Checking { } if ctx.owner.is(Package) || ctx.owner.name.startsWith(str.REPL_SESSION_LINE) then + def markTopLevelDefsAsExperimental(why: String): Unit = + for tree <- nonExperimentalStats(trees) do + tree match + case tree: MemberDef => + val sym = tree.symbol + if !sym.isExperimental then + sym.addAnnotation(ExperimentalAnnotation(s"Added by $why", sym.span)) + case tree => + // There is no definition to attach the @experimental annotation + report.error(s"Implementation restriction: top-level `val _ = ...` is not supported with $why.", tree.srcPos) unitExperimentalLanguageImports match - case imp :: _ => - // mark all top-level definitions as @experimental - for tree <- nonExperimentalStats(trees) do - tree match - case tree: MemberDef => - // TODO move this out of checking (into posttyper?) - val sym = tree.symbol - if !sym.isExperimental then - sym.addAnnotation(ExperimentalAnnotation(i"Added by top level $imp", sym.span)) - case tree => - // There is no definition to attach the @experimental annotation - report.error("Implementation restriction: top-level `val _ = ...` is not supported with experimental language imports.", tree.srcPos) + case imp :: _ => markTopLevelDefsAsExperimental(i"top level $imp") case _ => + Feature.experimentalEnabledByLanguageSetting match + case Some(sel) => markTopLevelDefsAsExperimental(i"-language:experimental.$sel") + case _ if ctx.settings.experimental.value => markTopLevelDefsAsExperimental(i"-experimental") + case _ => else for imp <- unitExperimentalLanguageImports do Feature.checkExperimentalFeature("feature local import", imp.srcPos) diff --git a/tests/neg-macros/i18677-a.check b/tests/neg-macros/i18677-a.check index 07f0a66cec81..d190ce36318a 100644 --- a/tests/neg-macros/i18677-a.check +++ b/tests/neg-macros/i18677-a.check @@ -7,10 +7,10 @@ |The tree does not conform to the compiler's tree invariants. | |Macro was: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental or -language:experimental.*") @extendFoo class AFoo() + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() | |The macro returned: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental or -language:experimental.*") @extendFoo class AFoo() extends Foo + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() extends Foo | |Error: |assertion failed: Parents of class symbol differs from the parents in the tree for class AFoo diff --git a/tests/neg-macros/i18677-b.check b/tests/neg-macros/i18677-b.check index 28cbc0521c30..eca2bdcde726 100644 --- a/tests/neg-macros/i18677-b.check +++ b/tests/neg-macros/i18677-b.check @@ -7,10 +7,10 @@ |The tree does not conform to the compiler's tree invariants. | |Macro was: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental or -language:experimental.*") @extendFoo class AFoo() + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() | |The macro returned: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental or -language:experimental.*") @extendFoo class AFoo() extends Foo + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() extends Foo | |Error: |assertion failed: Parents of class symbol differs from the parents in the tree for class AFoo diff --git a/tests/neg/experimental-import-with-top-level-val-underscore.check b/tests/neg/experimental-import-with-top-level-val-underscore.check index 7d595949cdeb..505492583abb 100644 --- a/tests/neg/experimental-import-with-top-level-val-underscore.check +++ b/tests/neg/experimental-import-with-top-level-val-underscore.check @@ -1,6 +1,6 @@ -- Error: tests/neg/experimental-import-with-top-level-val-underscore.scala:4:4 ---------------------------------------- 4 |val _ = // error | ^ - | Implementation restriction: top-level `val _ = ...` is not supported with experimental language imports. + |Implementation restriction: top-level `val _ = ...` is not supported with top level import language.experimental.erasedDefinitions. 5 | println("Hello, world!") 6 | 42 diff --git a/tests/neg/experimental-message-experimental-flag.check b/tests/neg/experimental-message-experimental-flag.check index 8d2fdb340c75..69174eaa789f 100644 --- a/tests/neg/experimental-message-experimental-flag.check +++ b/tests/neg/experimental-message-experimental-flag.check @@ -2,7 +2,7 @@ -- Error: tests/neg/experimental-message-experimental-flag/Test_2.scala:3:10 ------------------------------------------- 3 |def g() = f() // error | ^ - | method f is marked @experimental: Added by -experimental or -language:experimental.* + | method f is marked @experimental: Added by -experimental | | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or From b21523bc78de2f2820b34698b358e04fa71451c8 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Mon, 29 Apr 2024 11:40:01 +0200 Subject: [PATCH 220/465] Remove restriction on `val _ = ..` with top-level experimental imports --- compiler/src/dotty/tools/dotc/typer/Checking.scala | 5 ++--- .../experimental-import-with-top-level-val-underscore.check | 6 ------ tests/pos-custom-args/captures/try.scala | 2 +- .../experimental-import-with-top-level-val-underscore.scala | 6 ++++-- 4 files changed, 7 insertions(+), 12 deletions(-) delete mode 100644 tests/neg/experimental-import-with-top-level-val-underscore.check rename tests/{neg => pos}/experimental-import-with-top-level-val-underscore.scala (55%) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 2b58039d1972..dbac925fe05c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -843,9 +843,8 @@ object Checking { val sym = tree.symbol if !sym.isExperimental then sym.addAnnotation(ExperimentalAnnotation(s"Added by $why", sym.span)) - case tree => - // There is no definition to attach the @experimental annotation - report.error(s"Implementation restriction: top-level `val _ = ...` is not supported with $why.", tree.srcPos) + case _ => + // statements from a `val _ = ...` unitExperimentalLanguageImports match case imp :: _ => markTopLevelDefsAsExperimental(i"top level $imp") case _ => diff --git a/tests/neg/experimental-import-with-top-level-val-underscore.check b/tests/neg/experimental-import-with-top-level-val-underscore.check deleted file mode 100644 index 505492583abb..000000000000 --- a/tests/neg/experimental-import-with-top-level-val-underscore.check +++ /dev/null @@ -1,6 +0,0 @@ --- Error: tests/neg/experimental-import-with-top-level-val-underscore.scala:4:4 ---------------------------------------- -4 |val _ = // error - | ^ - |Implementation restriction: top-level `val _ = ...` is not supported with top level import language.experimental.erasedDefinitions. -5 | println("Hello, world!") -6 | 42 diff --git a/tests/pos-custom-args/captures/try.scala b/tests/pos-custom-args/captures/try.scala index 8ecea9e00359..05c41be69001 100644 --- a/tests/pos-custom-args/captures/try.scala +++ b/tests/pos-custom-args/captures/try.scala @@ -18,7 +18,7 @@ def handle[E <: Exception, R](op: (erased CanThrow[E]) -> R)(handler: E -> R): R try op(x) catch case ex: E => handler(ex) -val bar = handle { (erased x) => +val _ = handle { (erased x) => if true then raise(new Exception)(using x) 22 diff --git a/tests/neg/experimental-import-with-top-level-val-underscore.scala b/tests/pos/experimental-import-with-top-level-val-underscore.scala similarity index 55% rename from tests/neg/experimental-import-with-top-level-val-underscore.scala rename to tests/pos/experimental-import-with-top-level-val-underscore.scala index 69db6892b5ee..306979634d71 100644 --- a/tests/neg/experimental-import-with-top-level-val-underscore.scala +++ b/tests/pos/experimental-import-with-top-level-val-underscore.scala @@ -1,6 +1,8 @@ import language.experimental.erasedDefinitions -val _ = // error - println("Hello, world!") +def test() = () + +val _ = + test() 42 From 32f5a1747dbcc9a2f213b8bfcfa88865163ca43b Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 30 Apr 2024 09:33:02 +0200 Subject: [PATCH 221/465] Compute non-experimental top-level definition from symbols --- .../src/dotty/tools/dotc/typer/Checking.scala | 45 +++++++++---------- 1 file changed, 21 insertions(+), 24 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index dbac925fe05c..5d2f2a5a1088 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -804,22 +804,24 @@ object Checking { * */ def checkAndAdaptExperimentalImports(trees: List[Tree])(using Context): Unit = - def nonExperimentalStats(trees: List[Tree]): List[Tree] = trees match - case (_: ImportOrExport | EmptyTree) :: rest => - nonExperimentalStats(rest) - case (tree @ TypeDef(_, impl: Template)) :: rest if tree.symbol.isPackageObject => - nonExperimentalStats(impl.body) ::: nonExperimentalStats(rest) - case (tree: PackageDef) :: rest => - nonExperimentalStats(tree.stats) ::: nonExperimentalStats(rest) - case (tree: MemberDef) :: rest => - if tree.symbol.isExperimental || tree.symbol.is(Synthetic) then - nonExperimentalStats(rest) - else - tree :: nonExperimentalStats(rest) - case tree :: rest => - tree :: nonExperimentalStats(rest) - case Nil => - Nil + def nonExperimentalTopLevelDefs(pack: Symbol): Iterator[Symbol] = + def isNonExperimentalTopLevelDefinition(sym: Symbol) = + !sym.isExperimental + && sym.source == ctx.compilationUnit.source + && !sym.isConstructor // not constructor of package object + && !sym.is(Package) && !sym.isPackageObject && !sym.name.endsWith(str.TOPLEVEL_SUFFIX) + + val packageMembers = + pack.info.decls + .toList.iterator + .filter(isNonExperimentalTopLevelDefinition) + val packageObjectMembers = + pack.info.decls + .toList.iterator + .filter(sym => sym.isClass && (sym.is(Package) || sym.isPackageObject)) + .flatMap(nonExperimentalTopLevelDefs) + + packageMembers ++ packageObjectMembers def unitExperimentalLanguageImports = def isAllowedImport(sel: untpd.ImportSelector) = @@ -837,14 +839,9 @@ object Checking { if ctx.owner.is(Package) || ctx.owner.name.startsWith(str.REPL_SESSION_LINE) then def markTopLevelDefsAsExperimental(why: String): Unit = - for tree <- nonExperimentalStats(trees) do - tree match - case tree: MemberDef => - val sym = tree.symbol - if !sym.isExperimental then - sym.addAnnotation(ExperimentalAnnotation(s"Added by $why", sym.span)) - case _ => - // statements from a `val _ = ...` + for sym <- nonExperimentalTopLevelDefs(ctx.owner) do + sym.addAnnotation(ExperimentalAnnotation(s"Added by $why", sym.span)) + unitExperimentalLanguageImports match case imp :: _ => markTopLevelDefsAsExperimental(i"top level $imp") case _ => From df85c6ef6d0f6453d2171367279d8c651ed6e4dd Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 30 Apr 2024 11:33:47 +0200 Subject: [PATCH 222/465] Remove no-op `-Yno-experimental` --- compiler/src/dotty/tools/dotc/config/ScalaSettings.scala | 1 - project/Build.scala | 4 ---- tests/init-global/pos/global-region1.scala | 2 +- tests/init-global/warn/i18628_3.scala | 2 +- tests/neg-macros/i18677-a/Macro_1.scala | 2 +- tests/neg-macros/i18677-a/Test_2.scala | 2 +- tests/neg-macros/i18677-b/Macro_1.scala | 2 +- tests/neg-macros/i18677-b/Test_2.scala | 2 +- tests/neg-macros/i19676/Macro_1.scala | 2 +- tests/neg-macros/i19676/Test_2.scala | 2 +- tests/neg-macros/i19842-a/Macro.scala | 2 +- tests/neg-macros/i19842-b/Macro.scala | 2 +- tests/neg-macros/macro-experimental.scala | 2 +- tests/neg-macros/newClassExtendsNoParents/Macro_1.scala | 2 +- tests/neg-macros/newClassExtendsNoParents/Test_2.scala | 2 +- tests/neg-macros/newClassExtendsOnlyTrait/Macro_1.scala | 2 +- tests/neg-macros/newClassExtendsOnlyTrait/Test_2.scala | 2 +- tests/neg/14034.scala | 2 +- tests/neg/expeimental-flag.scala | 2 +- tests/neg/experimental-2.scala | 2 +- tests/neg/experimental-imports.scala | 2 +- tests/neg/experimental-message-experimental-flag/Lib_1.scala | 2 +- tests/neg/experimental-message-experimental-flag/Test_2.scala | 2 +- tests/neg/experimental-message.scala | 2 +- tests/neg/experimental-nested-imports-2.scala | 2 +- tests/neg/experimental-nested-imports-3.scala | 2 +- tests/neg/experimental-nested-imports.scala | 2 +- tests/neg/experimental.scala | 2 +- tests/neg/experimentalAnnot.scala | 2 +- tests/neg/experimentalCaseClass.scala | 2 +- tests/neg/experimentalDefaultParams.scala | 2 +- tests/neg/experimentalEnum.scala | 2 +- tests/neg/experimentalInline.scala | 2 +- tests/neg/experimentalInline2.scala | 2 +- tests/neg/experimentalMembers.scala | 2 +- tests/neg/experimentalOverloads.scala | 2 +- tests/neg/experimentalOverride.scala | 2 +- tests/neg/experimentalRHS.scala | 2 +- tests/neg/experimentalSam.scala | 2 +- tests/neg/experimentalSignature.scala | 2 +- tests/neg/experimentalTerms.scala | 2 +- tests/neg/experimentalTests.scala | 2 +- tests/neg/experimentalType.scala | 2 +- tests/neg/experimentalTypeRHS.scala | 2 +- tests/neg/experimentalTypes2.scala | 2 +- tests/neg/experimentalUnapply.scala | 2 +- tests/neg/i13091.scala | 2 +- tests/neg/i13848.scala | 2 +- tests/neg/i17292.scala | 2 +- tests/neg/i17292b.scala | 2 +- tests/neg/inline-unstable-accessors.scala | 2 +- tests/neg/publicInBinaryOverride.scala | 2 +- tests/neg/use-experimental-def.scala | 2 +- tests/pos-macros/annot-in-object/Macro_1.scala | 2 +- tests/pos-macros/annot-in-object/Test_2.scala | 2 +- tests/pos-macros/annot-suspend/Macro_1.scala | 2 +- tests/pos-macros/annot-suspend/Test_2.scala | 2 +- tests/pos-macros/annot-then-inline/Macro_1.scala | 2 +- tests/pos-macros/annot-then-inline/Test_2.scala | 2 +- tests/pos-macros/i15413/Macro_1.scala | 2 +- tests/pos-macros/i15413/Test_2.scala | 2 +- tests/pos-macros/i15413b/Macro_1.scala | 2 +- tests/pos-macros/i15413b/Test_2.scala | 2 +- tests/pos-macros/i19526b/Test.scala | 2 +- tests/pos/TupleReverseOnto.scala | 2 +- tests/pos/cc-experimental.scala | 2 +- tests/pos/dotty-experimental.scala | 2 +- tests/pos/expeimental-flag-with-lang-feature-2.scala | 2 +- tests/pos/expeimental-flag-with-lang-feature.scala | 2 +- tests/pos/experimental-flag.scala | 2 +- tests/pos/experimental-imports-empty.scala | 2 +- tests/pos/experimental-imports-top.scala | 2 +- tests/pos/experimental-package-imports.scala | 2 +- tests/pos/i13848.scala | 2 +- tests/pos/i15133a.scala | 2 +- tests/pos/i15133b.scala | 2 +- tests/pos/i16091.scala | 2 +- tests/pos/i20206.scala | 2 +- tests/pos/i7851.scala | 2 +- tests/pos/i8945.scala | 2 +- tests/pos/tupled-function-instances.scala | 2 +- tests/run-deep-subtype/Tuple-reverse.scala | 2 +- tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala | 2 +- tests/run-macros/Xmacro-settings-simple/M1.scala | 2 +- tests/run-macros/annot-add-global-class/Macro_1.scala | 2 +- tests/run-macros/annot-add-global-class/Test_2.scala | 2 +- tests/run-macros/annot-add-global-object/Macro_1.scala | 2 +- tests/run-macros/annot-add-global-object/Test_2.scala | 2 +- tests/run-macros/annot-add-local-class/Macro_1.scala | 2 +- tests/run-macros/annot-add-local-class/Test_2.scala | 2 +- tests/run-macros/annot-add-local-object/Macro_1.scala | 2 +- tests/run-macros/annot-add-local-object/Test_2.scala | 2 +- tests/run-macros/annot-add-nested-class/Macro_1.scala | 2 +- tests/run-macros/annot-add-nested-class/Test_2.scala | 2 +- tests/run-macros/annot-add-nested-object/Macro_1.scala | 2 +- tests/run-macros/annot-add-nested-object/Test_2.scala | 2 +- tests/run-macros/annot-annot-order/Macro_1.scala | 2 +- tests/run-macros/annot-annot-order/Test_2.scala | 2 +- tests/run-macros/annot-bind/Macro_1.scala | 2 +- tests/run-macros/annot-bind/Test_2.scala | 2 +- tests/run-macros/annot-changeVal/Macro_1.scala | 2 +- tests/run-macros/annot-changeVal/Test_2.scala | 2 +- tests/run-macros/annot-concrete-class/Macro_1.scala | 2 +- tests/run-macros/annot-concrete-class/Test_2.scala | 2 +- tests/run-macros/annot-export/Macro_1.scala | 2 +- tests/run-macros/annot-export/Test_2.scala | 2 +- tests/run-macros/annot-gen2/Macro_1.scala | 2 +- tests/run-macros/annot-gen2/Macro_2.scala | 2 +- tests/run-macros/annot-gen2/Test_3.scala | 2 +- tests/run-macros/annot-generate/Macro_1.scala | 2 +- tests/run-macros/annot-generate/Macro_2.scala | 2 +- tests/run-macros/annot-generate/Test_3.scala | 2 +- tests/run-macros/annot-macro-main/Macro_1.scala | 2 +- tests/run-macros/annot-macro-main/Test_2.scala | 2 +- tests/run-macros/annot-memo/Macro_1.scala | 2 +- tests/run-macros/annot-memo/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-add-def/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-add-def/Test_2.scala | 2 +- .../run-macros/annot-mod-class-add-inner-class/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-add-inner-class/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-add-lazy-val/Test_2.scala | 2 +- .../run-macros/annot-mod-class-add-local-class/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-add-local-class/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-add-val/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-add-val/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-add-var/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-add-var/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-data/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-data/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-equals/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-equals/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-mod-def/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-mod-def/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-mod-val/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-mod-val/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-override-def/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-override-def/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-override-val/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-override-val/Test_2.scala | 2 +- tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala | 2 +- tests/run-macros/annot-mod-class-unused-new-sym/Test_2.scala | 2 +- tests/run-macros/annot-result-order/Macro_1.scala | 2 +- tests/run-macros/annot-result-order/Test_2.scala | 2 +- tests/run-macros/annot-simple-fib/Macro_1.scala | 2 +- tests/run-macros/annot-simple-fib/Test_2.scala | 2 +- tests/run-macros/annot-unrollLast/Macro_1.scala | 2 +- tests/run-macros/annot-unrollLast/Test_2.scala | 2 +- tests/run-macros/i11685/Macro_1.scala | 2 +- tests/run-macros/i11685/Test_2.scala | 2 +- tests/run-macros/i12021/Macro_1.scala | 2 +- tests/run-macros/i16734b/Macro_1.scala | 2 +- tests/run-macros/i16734b/Test_2.scala | 2 +- tests/run-macros/i19676/Macro_1.scala | 2 +- tests/run-macros/i19676/Test_2.scala | 2 +- tests/run-macros/newClass/Macro_1.scala | 2 +- tests/run-macros/newClass/Test_2.scala | 2 +- tests/run-macros/newClassExtends/Macro_1.scala | 2 +- tests/run-macros/newClassExtends/Test_2.scala | 2 +- tests/run-macros/newClassExtendsClassParams/Macro_1.scala | 2 +- tests/run-macros/newClassExtendsClassParams/Test_2.scala | 2 +- tests/run-macros/newClassSelf/Macro_1.scala | 2 +- tests/run-macros/newClassSelf/Test_2.scala | 2 +- tests/run/i13215.scala | 2 +- tests/run/noProtectedSuper.scala | 2 +- tests/run/publicInBinary/Lib_1.scala | 2 +- tests/run/quotes-add-erased/Macro_1.scala | 2 +- tests/run/quotes-reflection/Macros_1.scala | 2 +- tests/run/tupled-function-andThen.scala | 2 +- tests/run/tupled-function-apply.scala | 2 +- tests/run/tupled-function-compose.scala | 2 +- tests/run/tupled-function-extension-method.scala | 2 +- tests/run/tupled-function-tupled.scala | 2 +- tests/run/tupled-function-untupled.scala | 2 +- 174 files changed, 172 insertions(+), 177 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index e245c8ff10e7..94b6d8023c34 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -406,7 +406,6 @@ private sealed trait YSettings: val YretainTrees: Setting[Boolean] = BooleanSetting(ForkSetting, "Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") val YshowTreeIds: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting(ForkSetting, "Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty.") - val YnoExperimental: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-experimental", "Disable experimental language features by default in NIGHTLY/SNAPSHOT versions of the compiler (deprecated, no-op).") val YlegacyLazyVals: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") val YcompileScala2Library: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles") diff --git a/project/Build.scala b/project/Build.scala index b2c88d1d6ae0..6985560c90b7 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1083,7 +1083,6 @@ object Build { Compile / doc / scalacOptions += "-Ydocument-synthetic-types", scalacOptions += "-Ycompile-scala2-library", scalacOptions += "-Yscala2Unpickler:never", - scalacOptions += "-Yno-experimental", scalacOptions -= "-Xfatal-warnings", Compile / compile / logLevel.withRank(KeyRanks.Invisible) := Level.Error, ivyConfigurations += SourceDeps.hide, @@ -2173,9 +2172,6 @@ object Build { settings( versionScheme := Some("semver-spec"), libraryDependencies += "org.scala-lang" % "scala-library" % stdlibVersion, - // Make sure we do not refer to experimental features outside an experimental scope. - // In other words, disable NIGHTLY/SNAPSHOT experimental scope. - scalacOptions += "-Yno-experimental", ). settings(dottyLibrarySettings) if (mode == Bootstrapped) { diff --git a/tests/init-global/pos/global-region1.scala b/tests/init-global/pos/global-region1.scala index 0797d22aa5bf..85e925967337 100644 --- a/tests/init-global/pos/global-region1.scala +++ b/tests/init-global/pos/global-region1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.init.region diff --git a/tests/init-global/warn/i18628_3.scala b/tests/init-global/warn/i18628_3.scala index e7a057ded0d8..84ab1d9f6ef3 100644 --- a/tests/init-global/warn/i18628_3.scala +++ b/tests/init-global/warn/i18628_3.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.init.widen diff --git a/tests/neg-macros/i18677-a/Macro_1.scala b/tests/neg-macros/i18677-a/Macro_1.scala index ecd90869e081..69f2a521e012 100644 --- a/tests/neg-macros/i18677-a/Macro_1.scala +++ b/tests/neg-macros/i18677-a/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import annotation.MacroAnnotation import quoted.* diff --git a/tests/neg-macros/i18677-a/Test_2.scala b/tests/neg-macros/i18677-a/Test_2.scala index 5fb6680df0ca..511cad28f4fb 100644 --- a/tests/neg-macros/i18677-a/Test_2.scala +++ b/tests/neg-macros/i18677-a/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @extendFoo class AFoo // error \ No newline at end of file diff --git a/tests/neg-macros/i18677-b/Macro_1.scala b/tests/neg-macros/i18677-b/Macro_1.scala index 9895b15942b9..02c683b00b4b 100644 --- a/tests/neg-macros/i18677-b/Macro_1.scala +++ b/tests/neg-macros/i18677-b/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import annotation.MacroAnnotation import quoted.* diff --git a/tests/neg-macros/i18677-b/Test_2.scala b/tests/neg-macros/i18677-b/Test_2.scala index 5fb6680df0ca..511cad28f4fb 100644 --- a/tests/neg-macros/i18677-b/Test_2.scala +++ b/tests/neg-macros/i18677-b/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @extendFoo class AFoo // error \ No newline at end of file diff --git a/tests/neg-macros/i19676/Macro_1.scala b/tests/neg-macros/i19676/Macro_1.scala index a5598c448a1c..44988760f535 100644 --- a/tests/neg-macros/i19676/Macro_1.scala +++ b/tests/neg-macros/i19676/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.MacroAnnotation import scala.quoted.* diff --git a/tests/neg-macros/i19676/Test_2.scala b/tests/neg-macros/i19676/Test_2.scala index 0142d263f103..b80589adaf35 100644 --- a/tests/neg-macros/i19676/Test_2.scala +++ b/tests/neg-macros/i19676/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @buggy // error case class Foo() \ No newline at end of file diff --git a/tests/neg-macros/i19842-a/Macro.scala b/tests/neg-macros/i19842-a/Macro.scala index 936e08b02592..18a1bc16045f 100644 --- a/tests/neg-macros/i19842-a/Macro.scala +++ b/tests/neg-macros/i19842-a/Macro.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, targetName} import scala.quoted.* diff --git a/tests/neg-macros/i19842-b/Macro.scala b/tests/neg-macros/i19842-b/Macro.scala index d46ff758342b..f1399d328f49 100644 --- a/tests/neg-macros/i19842-b/Macro.scala +++ b/tests/neg-macros/i19842-b/Macro.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, targetName} import scala.quoted.* diff --git a/tests/neg-macros/macro-experimental.scala b/tests/neg-macros/macro-experimental.scala index eaa822d4b541..6dc552d74102 100644 --- a/tests/neg-macros/macro-experimental.scala +++ b/tests/neg-macros/macro-experimental.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.quoted.* import scala.annotation.experimental diff --git a/tests/neg-macros/newClassExtendsNoParents/Macro_1.scala b/tests/neg-macros/newClassExtendsNoParents/Macro_1.scala index 663e5e959e87..0a64bf320b72 100644 --- a/tests/neg-macros/newClassExtendsNoParents/Macro_1.scala +++ b/tests/neg-macros/newClassExtendsNoParents/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/neg-macros/newClassExtendsNoParents/Test_2.scala b/tests/neg-macros/newClassExtendsNoParents/Test_2.scala index 733879441e91..da8b3d84b522 100644 --- a/tests/neg-macros/newClassExtendsNoParents/Test_2.scala +++ b/tests/neg-macros/newClassExtendsNoParents/Test_2.scala @@ -1,3 +1,3 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental def test: Any = makeClass("foo") // error diff --git a/tests/neg-macros/newClassExtendsOnlyTrait/Macro_1.scala b/tests/neg-macros/newClassExtendsOnlyTrait/Macro_1.scala index a6a4753185ba..6ced5bf34462 100644 --- a/tests/neg-macros/newClassExtendsOnlyTrait/Macro_1.scala +++ b/tests/neg-macros/newClassExtendsOnlyTrait/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/neg-macros/newClassExtendsOnlyTrait/Test_2.scala b/tests/neg-macros/newClassExtendsOnlyTrait/Test_2.scala index b9aaa2a87c60..7bb465eae617 100644 --- a/tests/neg-macros/newClassExtendsOnlyTrait/Test_2.scala +++ b/tests/neg-macros/newClassExtendsOnlyTrait/Test_2.scala @@ -1,3 +1,3 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental def test: Foo = makeClass("foo") // error diff --git a/tests/neg/14034.scala b/tests/neg/14034.scala index bdb09a011777..bbb633726523 100644 --- a/tests/neg/14034.scala +++ b/tests/neg/14034.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/expeimental-flag.scala b/tests/neg/expeimental-flag.scala index 8b2e729ea8da..dff7efa92c50 100644 --- a/tests/neg/expeimental-flag.scala +++ b/tests/neg/expeimental-flag.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimental-2.scala b/tests/neg/experimental-2.scala index f2d0262d83f5..38e814247031 100644 --- a/tests/neg/experimental-2.scala +++ b/tests/neg/experimental-2.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + class Test7 { import scala.language.experimental diff --git a/tests/neg/experimental-imports.scala b/tests/neg/experimental-imports.scala index 9b79fbabcd15..e3a91be45f08 100644 --- a/tests/neg/experimental-imports.scala +++ b/tests/neg/experimental-imports.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/experimental-message-experimental-flag/Lib_1.scala b/tests/neg/experimental-message-experimental-flag/Lib_1.scala index dc0d774092a2..d95aaadf2fe5 100644 --- a/tests/neg/experimental-message-experimental-flag/Lib_1.scala +++ b/tests/neg/experimental-message-experimental-flag/Lib_1.scala @@ -1,3 +1,3 @@ -//> using options -Yno-experimental -experimental +//> using options -experimental def f() = ??? diff --git a/tests/neg/experimental-message-experimental-flag/Test_2.scala b/tests/neg/experimental-message-experimental-flag/Test_2.scala index 3c6309486778..a039d32081bc 100644 --- a/tests/neg/experimental-message-experimental-flag/Test_2.scala +++ b/tests/neg/experimental-message-experimental-flag/Test_2.scala @@ -1,3 +1,3 @@ -//> using options -Yno-experimental + def g() = f() // error diff --git a/tests/neg/experimental-message.scala b/tests/neg/experimental-message.scala index 1fe9b7f0d793..8521873a6800 100644 --- a/tests/neg/experimental-message.scala +++ b/tests/neg/experimental-message.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimental-nested-imports-2.scala b/tests/neg/experimental-nested-imports-2.scala index 9c99e0729a40..f969a2a4b5a6 100644 --- a/tests/neg/experimental-nested-imports-2.scala +++ b/tests/neg/experimental-nested-imports-2.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/experimental-nested-imports-3.scala b/tests/neg/experimental-nested-imports-3.scala index f6f4a1360e6b..c3fbbc3676a5 100644 --- a/tests/neg/experimental-nested-imports-3.scala +++ b/tests/neg/experimental-nested-imports-3.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/experimental-nested-imports.scala b/tests/neg/experimental-nested-imports.scala index 417f4bf4d9c5..b2a8f29d76eb 100644 --- a/tests/neg/experimental-nested-imports.scala +++ b/tests/neg/experimental-nested-imports.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/experimental.scala b/tests/neg/experimental.scala index 22f4b5a1a852..f35a7ca19d7f 100644 --- a/tests/neg/experimental.scala +++ b/tests/neg/experimental.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + class Test0 { import language.experimental.namedTypeArguments // error diff --git a/tests/neg/experimentalAnnot.scala b/tests/neg/experimentalAnnot.scala index e50d9165b5a4..e14b35fffc5d 100644 --- a/tests/neg/experimentalAnnot.scala +++ b/tests/neg/experimentalAnnot.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalCaseClass.scala b/tests/neg/experimentalCaseClass.scala index 383824954041..1d72738c6f91 100644 --- a/tests/neg/experimentalCaseClass.scala +++ b/tests/neg/experimentalCaseClass.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalDefaultParams.scala b/tests/neg/experimentalDefaultParams.scala index fb9ffa282e60..f2648e0eaae6 100644 --- a/tests/neg/experimentalDefaultParams.scala +++ b/tests/neg/experimentalDefaultParams.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalEnum.scala b/tests/neg/experimentalEnum.scala index 14ced7ca22bb..9ab7013a147e 100644 --- a/tests/neg/experimentalEnum.scala +++ b/tests/neg/experimentalEnum.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalInline.scala b/tests/neg/experimentalInline.scala index b837ad498492..ffe4f5271749 100644 --- a/tests/neg/experimentalInline.scala +++ b/tests/neg/experimentalInline.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalInline2.scala b/tests/neg/experimentalInline2.scala index 8bf6a82fba2e..575947e3216c 100644 --- a/tests/neg/experimentalInline2.scala +++ b/tests/neg/experimentalInline2.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalMembers.scala b/tests/neg/experimentalMembers.scala index fe4adbfcf44d..aae0e59bb9e5 100644 --- a/tests/neg/experimentalMembers.scala +++ b/tests/neg/experimentalMembers.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalOverloads.scala b/tests/neg/experimentalOverloads.scala index 570dcc7c79ce..1bc40bbe7631 100644 --- a/tests/neg/experimentalOverloads.scala +++ b/tests/neg/experimentalOverloads.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalOverride.scala b/tests/neg/experimentalOverride.scala index adc8b919dc6b..2372242f6577 100644 --- a/tests/neg/experimentalOverride.scala +++ b/tests/neg/experimentalOverride.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalRHS.scala b/tests/neg/experimentalRHS.scala index cffa35ed4ba9..a682a64ef1e0 100644 --- a/tests/neg/experimentalRHS.scala +++ b/tests/neg/experimentalRHS.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalSam.scala b/tests/neg/experimentalSam.scala index ab86cabc6816..e262e3202261 100644 --- a/tests/neg/experimentalSam.scala +++ b/tests/neg/experimentalSam.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalSignature.scala b/tests/neg/experimentalSignature.scala index 479f9140d0a0..3c8dd3d97d33 100644 --- a/tests/neg/experimentalSignature.scala +++ b/tests/neg/experimentalSignature.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalTerms.scala b/tests/neg/experimentalTerms.scala index 10776e78bae7..6a9aa7a7e95b 100644 --- a/tests/neg/experimentalTerms.scala +++ b/tests/neg/experimentalTerms.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalTests.scala b/tests/neg/experimentalTests.scala index a45809c099c4..df8dcf370c93 100644 --- a/tests/neg/experimentalTests.scala +++ b/tests/neg/experimentalTests.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalType.scala b/tests/neg/experimentalType.scala index 22bdecf415e3..b380d1d6a36f 100644 --- a/tests/neg/experimentalType.scala +++ b/tests/neg/experimentalType.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalTypeRHS.scala b/tests/neg/experimentalTypeRHS.scala index f5801ea2f11d..e995e0736902 100644 --- a/tests/neg/experimentalTypeRHS.scala +++ b/tests/neg/experimentalTypeRHS.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalTypes2.scala b/tests/neg/experimentalTypes2.scala index 3d042792c4de..c175eab4c389 100644 --- a/tests/neg/experimentalTypes2.scala +++ b/tests/neg/experimentalTypes2.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalUnapply.scala b/tests/neg/experimentalUnapply.scala index 049577bf63fd..89a2e2950f45 100644 --- a/tests/neg/experimentalUnapply.scala +++ b/tests/neg/experimentalUnapply.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/i13091.scala b/tests/neg/i13091.scala index 208d05d6886d..549fdf6d0fae 100644 --- a/tests/neg/i13091.scala +++ b/tests/neg/i13091.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/i13848.scala b/tests/neg/i13848.scala index fcc519c47592..298985e4da9e 100644 --- a/tests/neg/i13848.scala +++ b/tests/neg/i13848.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/i17292.scala b/tests/neg/i17292.scala index 9cf7fc7b1c30..e99cb5e9a75e 100644 --- a/tests/neg/i17292.scala +++ b/tests/neg/i17292.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/i17292b.scala b/tests/neg/i17292b.scala index b89a64439699..08fbda18a886 100644 --- a/tests/neg/i17292b.scala +++ b/tests/neg/i17292b.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental type A[T] = Int diff --git a/tests/neg/inline-unstable-accessors.scala b/tests/neg/inline-unstable-accessors.scala index d40f69819a2e..c02097f1921a 100644 --- a/tests/neg/inline-unstable-accessors.scala +++ b/tests/neg/inline-unstable-accessors.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors -explain +//> using options -experimental -Werror -WunstableInlineAccessors -explain package foo import scala.annotation.publicInBinary diff --git a/tests/neg/publicInBinaryOverride.scala b/tests/neg/publicInBinaryOverride.scala index 342e4773c56f..6529bf09736a 100644 --- a/tests/neg/publicInBinaryOverride.scala +++ b/tests/neg/publicInBinaryOverride.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.publicInBinary diff --git a/tests/neg/use-experimental-def.scala b/tests/neg/use-experimental-def.scala index 68ce0d6987ab..ff7b95567a60 100644 --- a/tests/neg/use-experimental-def.scala +++ b/tests/neg/use-experimental-def.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/pos-macros/annot-in-object/Macro_1.scala b/tests/pos-macros/annot-in-object/Macro_1.scala index 2086f9ec4cb1..cc2727fc6cce 100644 --- a/tests/pos-macros/annot-in-object/Macro_1.scala +++ b/tests/pos-macros/annot-in-object/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/pos-macros/annot-in-object/Test_2.scala b/tests/pos-macros/annot-in-object/Test_2.scala index 146b426b0bf8..382685873adb 100644 --- a/tests/pos-macros/annot-in-object/Test_2.scala +++ b/tests/pos-macros/annot-in-object/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @Foo.void @Foo.Bar.void diff --git a/tests/pos-macros/annot-suspend/Macro_1.scala b/tests/pos-macros/annot-suspend/Macro_1.scala index 4ae96186b777..7a7cbdb10f01 100644 --- a/tests/pos-macros/annot-suspend/Macro_1.scala +++ b/tests/pos-macros/annot-suspend/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/pos-macros/annot-suspend/Test_2.scala b/tests/pos-macros/annot-suspend/Test_2.scala index 8014af03235c..4fdbcf919373 100644 --- a/tests/pos-macros/annot-suspend/Test_2.scala +++ b/tests/pos-macros/annot-suspend/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @void def test = 0 diff --git a/tests/pos-macros/annot-then-inline/Macro_1.scala b/tests/pos-macros/annot-then-inline/Macro_1.scala index f3226e24d4da..aef00d9cce58 100644 --- a/tests/pos-macros/annot-then-inline/Macro_1.scala +++ b/tests/pos-macros/annot-then-inline/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/pos-macros/annot-then-inline/Test_2.scala b/tests/pos-macros/annot-then-inline/Test_2.scala index 99cb2e4e4d5b..1798fde62676 100644 --- a/tests/pos-macros/annot-then-inline/Test_2.scala +++ b/tests/pos-macros/annot-then-inline/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @useInlinedIdentity def test = 0 diff --git a/tests/pos-macros/i15413/Macro_1.scala b/tests/pos-macros/i15413/Macro_1.scala index 6166a5d6f55d..f451742dff9e 100644 --- a/tests/pos-macros/i15413/Macro_1.scala +++ b/tests/pos-macros/i15413/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors +//> using options -experimental -Werror -WunstableInlineAccessors import scala.quoted.* import scala.annotation.publicInBinary diff --git a/tests/pos-macros/i15413/Test_2.scala b/tests/pos-macros/i15413/Test_2.scala index 7f8c90161c66..0cdfb8fc8186 100644 --- a/tests/pos-macros/i15413/Test_2.scala +++ b/tests/pos-macros/i15413/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental def test = new Macro().foo diff --git a/tests/pos-macros/i15413b/Macro_1.scala b/tests/pos-macros/i15413b/Macro_1.scala index b8a91eaba0fe..df27b6267915 100644 --- a/tests/pos-macros/i15413b/Macro_1.scala +++ b/tests/pos-macros/i15413b/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors +//> using options -experimental -Werror -WunstableInlineAccessors package bar diff --git a/tests/pos-macros/i15413b/Test_2.scala b/tests/pos-macros/i15413b/Test_2.scala index f03559b9adb9..6c5b86487cd6 100644 --- a/tests/pos-macros/i15413b/Test_2.scala +++ b/tests/pos-macros/i15413b/Test_2.scala @@ -1,3 +1,3 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental def test = bar.foo diff --git a/tests/pos-macros/i19526b/Test.scala b/tests/pos-macros/i19526b/Test.scala index ede2db53c5b0..96274091218f 100644 --- a/tests/pos-macros/i19526b/Test.scala +++ b/tests/pos-macros/i19526b/Test.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package crash.test diff --git a/tests/pos/TupleReverseOnto.scala b/tests/pos/TupleReverseOnto.scala index eca8a3e3033c..4d4a40f60cec 100644 --- a/tests/pos/TupleReverseOnto.scala +++ b/tests/pos/TupleReverseOnto.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.Tuple.* diff --git a/tests/pos/cc-experimental.scala b/tests/pos/cc-experimental.scala index 4ee1f6732356..4027779a9036 100644 --- a/tests/pos/cc-experimental.scala +++ b/tests/pos/cc-experimental.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + package scala.runtime diff --git a/tests/pos/dotty-experimental.scala b/tests/pos/dotty-experimental.scala index 9cffddc0b8ba..ee9a84a1b497 100644 --- a/tests/pos/dotty-experimental.scala +++ b/tests/pos/dotty-experimental.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import language.experimental.captureChecking object test { diff --git a/tests/pos/expeimental-flag-with-lang-feature-2.scala b/tests/pos/expeimental-flag-with-lang-feature-2.scala index 77f35f663576..7985f9db3a29 100644 --- a/tests/pos/expeimental-flag-with-lang-feature-2.scala +++ b/tests/pos/expeimental-flag-with-lang-feature-2.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.language.experimental.namedTypeArguments diff --git a/tests/pos/expeimental-flag-with-lang-feature.scala b/tests/pos/expeimental-flag-with-lang-feature.scala index 9cfb716b1015..96069c332e02 100644 --- a/tests/pos/expeimental-flag-with-lang-feature.scala +++ b/tests/pos/expeimental-flag-with-lang-feature.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.language.experimental.erasedDefinitions import scala.language.experimental.namedTypeArguments diff --git a/tests/pos/experimental-flag.scala b/tests/pos/experimental-flag.scala index 9d3daf12fddc..e5d6274c4c80 100644 --- a/tests/pos/experimental-flag.scala +++ b/tests/pos/experimental-flag.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.experimental diff --git a/tests/pos/experimental-imports-empty.scala b/tests/pos/experimental-imports-empty.scala index 18d83839e7e7..8729905494ff 100644 --- a/tests/pos/experimental-imports-empty.scala +++ b/tests/pos/experimental-imports-empty.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental import language.experimental.namedTypeArguments diff --git a/tests/pos/experimental-imports-top.scala b/tests/pos/experimental-imports-top.scala index 16f44e48eb32..9ba2b5cd2c99 100644 --- a/tests/pos/experimental-imports-top.scala +++ b/tests/pos/experimental-imports-top.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import language.experimental.erasedDefinitions import annotation.experimental diff --git a/tests/pos/experimental-package-imports.scala b/tests/pos/experimental-package-imports.scala index 45a0905ac311..b816130c18d7 100644 --- a/tests/pos/experimental-package-imports.scala +++ b/tests/pos/experimental-package-imports.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/pos/i13848.scala b/tests/pos/i13848.scala index 266f3edcf7ae..36fac091023a 100644 --- a/tests/pos/i13848.scala +++ b/tests/pos/i13848.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/pos/i15133a.scala b/tests/pos/i15133a.scala index 1aff3a5c1cfc..002eec13c05e 100644 --- a/tests/pos/i15133a.scala +++ b/tests/pos/i15133a.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/pos/i15133b.scala b/tests/pos/i15133b.scala index 4c235d37c698..5b1fd62981ba 100644 --- a/tests/pos/i15133b.scala +++ b/tests/pos/i15133b.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/pos/i16091.scala b/tests/pos/i16091.scala index 349e16e6d7e6..37afe19ab065 100644 --- a/tests/pos/i16091.scala +++ b/tests/pos/i16091.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/pos/i20206.scala b/tests/pos/i20206.scala index 690142140d23..07ef3dc0ba73 100644 --- a/tests/pos/i20206.scala +++ b/tests/pos/i20206.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import language.experimental.erasedDefinitions diff --git a/tests/pos/i7851.scala b/tests/pos/i7851.scala index 5a6408cbe12a..16d28ad353f9 100644 --- a/tests/pos/i7851.scala +++ b/tests/pos/i7851.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental trait Wrappable[T] { } given Wrappable[Float] with { } diff --git a/tests/pos/i8945.scala b/tests/pos/i8945.scala index 2ae8fc268cbf..d0a25b474f28 100644 --- a/tests/pos/i8945.scala +++ b/tests/pos/i8945.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + // src-2/MacroImpl.scala trait Context { diff --git a/tests/pos/tupled-function-instances.scala b/tests/pos/tupled-function-instances.scala index 3a3bc81b7426..b0af67aabfe4 100644 --- a/tests/pos/tupled-function-instances.scala +++ b/tests/pos/tupled-function-instances.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction object Test { diff --git a/tests/run-deep-subtype/Tuple-reverse.scala b/tests/run-deep-subtype/Tuple-reverse.scala index 230800062bb5..c611ba504d82 100644 --- a/tests/run-deep-subtype/Tuple-reverse.scala +++ b/tests/run-deep-subtype/Tuple-reverse.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.reflect.ClassTag diff --git a/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala b/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala index aee87a46ce81..d34cadf91765 100644 --- a/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala +++ b/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO +//> using options -experimental -Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO import scala.compiletime.* diff --git a/tests/run-macros/Xmacro-settings-simple/M1.scala b/tests/run-macros/Xmacro-settings-simple/M1.scala index 57a8be886ba1..487ad5baec46 100644 --- a/tests/run-macros/Xmacro-settings-simple/M1.scala +++ b/tests/run-macros/Xmacro-settings-simple/M1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package x diff --git a/tests/run-macros/annot-add-global-class/Macro_1.scala b/tests/run-macros/annot-add-global-class/Macro_1.scala index a6ebf1c64d9d..247829954218 100644 --- a/tests/run-macros/annot-add-global-class/Macro_1.scala +++ b/tests/run-macros/annot-add-global-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package mymacro diff --git a/tests/run-macros/annot-add-global-class/Test_2.scala b/tests/run-macros/annot-add-global-class/Test_2.scala index 824dd2381760..6fa058060906 100644 --- a/tests/run-macros/annot-add-global-class/Test_2.scala +++ b/tests/run-macros/annot-add-global-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import mymacro.addClass diff --git a/tests/run-macros/annot-add-global-object/Macro_1.scala b/tests/run-macros/annot-add-global-object/Macro_1.scala index c2fe5bbfe9e5..031d6e33fefe 100644 --- a/tests/run-macros/annot-add-global-object/Macro_1.scala +++ b/tests/run-macros/annot-add-global-object/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-add-global-object/Test_2.scala b/tests/run-macros/annot-add-global-object/Test_2.scala index 181bc4e935ea..b3c7dbff1558 100644 --- a/tests/run-macros/annot-add-global-object/Test_2.scala +++ b/tests/run-macros/annot-add-global-object/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addClass def foo(): Unit = println("macro generated main") diff --git a/tests/run-macros/annot-add-local-class/Macro_1.scala b/tests/run-macros/annot-add-local-class/Macro_1.scala index 07f6d98e7c99..e9c8eaa9318d 100644 --- a/tests/run-macros/annot-add-local-class/Macro_1.scala +++ b/tests/run-macros/annot-add-local-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-add-local-class/Test_2.scala b/tests/run-macros/annot-add-local-class/Test_2.scala index 8fe4f9db5656..ece281dcb56c 100644 --- a/tests/run-macros/annot-add-local-class/Test_2.scala +++ b/tests/run-macros/annot-add-local-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test(): Unit = @addClass def foo(): Unit = diff --git a/tests/run-macros/annot-add-local-object/Macro_1.scala b/tests/run-macros/annot-add-local-object/Macro_1.scala index 18534730e16b..3d47fafd599a 100644 --- a/tests/run-macros/annot-add-local-object/Macro_1.scala +++ b/tests/run-macros/annot-add-local-object/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-add-local-object/Test_2.scala b/tests/run-macros/annot-add-local-object/Test_2.scala index 0ff7862fb338..2dfcc801d61a 100644 --- a/tests/run-macros/annot-add-local-object/Test_2.scala +++ b/tests/run-macros/annot-add-local-object/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test(): Unit = @addClass def foo(): Unit = diff --git a/tests/run-macros/annot-add-nested-class/Macro_1.scala b/tests/run-macros/annot-add-nested-class/Macro_1.scala index 631e3044a39c..ecdd6ae35cb0 100644 --- a/tests/run-macros/annot-add-nested-class/Macro_1.scala +++ b/tests/run-macros/annot-add-nested-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-add-nested-class/Test_2.scala b/tests/run-macros/annot-add-nested-class/Test_2.scala index b92225b7f107..e328f97218d3 100644 --- a/tests/run-macros/annot-add-nested-class/Test_2.scala +++ b/tests/run-macros/annot-add-nested-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Foo(): @addClass def foo(): Unit = diff --git a/tests/run-macros/annot-add-nested-object/Macro_1.scala b/tests/run-macros/annot-add-nested-object/Macro_1.scala index 0069a1010a78..ce6cbaa67a57 100644 --- a/tests/run-macros/annot-add-nested-object/Macro_1.scala +++ b/tests/run-macros/annot-add-nested-object/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-add-nested-object/Test_2.scala b/tests/run-macros/annot-add-nested-object/Test_2.scala index cf6b3a8400fe..f466cf59980b 100644 --- a/tests/run-macros/annot-add-nested-object/Test_2.scala +++ b/tests/run-macros/annot-add-nested-object/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Foo(): @addClass def foo(): Unit = diff --git a/tests/run-macros/annot-annot-order/Macro_1.scala b/tests/run-macros/annot-annot-order/Macro_1.scala index a177225d635f..ab48d6622d6e 100644 --- a/tests/run-macros/annot-annot-order/Macro_1.scala +++ b/tests/run-macros/annot-annot-order/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-annot-order/Test_2.scala b/tests/run-macros/annot-annot-order/Test_2.scala index 2c073bdcbb7b..9f312c47c6af 100644 --- a/tests/run-macros/annot-annot-order/Test_2.scala +++ b/tests/run-macros/annot-annot-order/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @print("foo") def foo(): Unit = () diff --git a/tests/run-macros/annot-bind/Macro_1.scala b/tests/run-macros/annot-bind/Macro_1.scala index fd0aaf1343a0..ce79560833d9 100644 --- a/tests/run-macros/annot-bind/Macro_1.scala +++ b/tests/run-macros/annot-bind/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-bind/Test_2.scala b/tests/run-macros/annot-bind/Test_2.scala index 60895adabc24..44726e69adc7 100644 --- a/tests/run-macros/annot-bind/Test_2.scala +++ b/tests/run-macros/annot-bind/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @bind("a") val foo: String = "foo" diff --git a/tests/run-macros/annot-changeVal/Macro_1.scala b/tests/run-macros/annot-changeVal/Macro_1.scala index d32cbe85ef35..35d1edf684a4 100644 --- a/tests/run-macros/annot-changeVal/Macro_1.scala +++ b/tests/run-macros/annot-changeVal/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.experimental import scala.quoted.* diff --git a/tests/run-macros/annot-changeVal/Test_2.scala b/tests/run-macros/annot-changeVal/Test_2.scala index 8e048e885651..6816712a43a0 100644 --- a/tests/run-macros/annot-changeVal/Test_2.scala +++ b/tests/run-macros/annot-changeVal/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import ChangeVal._ diff --git a/tests/run-macros/annot-concrete-class/Macro_1.scala b/tests/run-macros/annot-concrete-class/Macro_1.scala index 326b450088e6..bda268891b44 100644 --- a/tests/run-macros/annot-concrete-class/Macro_1.scala +++ b/tests/run-macros/annot-concrete-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.MacroAnnotation import scala.quoted.* diff --git a/tests/run-macros/annot-concrete-class/Test_2.scala b/tests/run-macros/annot-concrete-class/Test_2.scala index 7b8fc5cd0f94..6f30a68b1561 100644 --- a/tests/run-macros/annot-concrete-class/Test_2.scala +++ b/tests/run-macros/annot-concrete-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental trait AFoo: def foo: String diff --git a/tests/run-macros/annot-export/Macro_1.scala b/tests/run-macros/annot-export/Macro_1.scala index b135245790ba..68ecb6c5e451 100644 --- a/tests/run-macros/annot-export/Macro_1.scala +++ b/tests/run-macros/annot-export/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-export/Test_2.scala b/tests/run-macros/annot-export/Test_2.scala index 86bbee9c93fd..25954967b953 100644 --- a/tests/run-macros/annot-export/Test_2.scala +++ b/tests/run-macros/annot-export/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental object Bar: @returnClassName diff --git a/tests/run-macros/annot-gen2/Macro_1.scala b/tests/run-macros/annot-gen2/Macro_1.scala index 503d6e192cdd..edc709ca8172 100644 --- a/tests/run-macros/annot-gen2/Macro_1.scala +++ b/tests/run-macros/annot-gen2/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-gen2/Macro_2.scala b/tests/run-macros/annot-gen2/Macro_2.scala index 9cb734dfdb00..6260dbd785a7 100644 --- a/tests/run-macros/annot-gen2/Macro_2.scala +++ b/tests/run-macros/annot-gen2/Macro_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-gen2/Test_3.scala b/tests/run-macros/annot-gen2/Test_3.scala index 08abafdb2741..2b2a86aabfac 100644 --- a/tests/run-macros/annot-gen2/Test_3.scala +++ b/tests/run-macros/annot-gen2/Test_3.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Bar: @foo def bar(s: String) = s diff --git a/tests/run-macros/annot-generate/Macro_1.scala b/tests/run-macros/annot-generate/Macro_1.scala index b64d888aab80..e1c66b5c2127 100644 --- a/tests/run-macros/annot-generate/Macro_1.scala +++ b/tests/run-macros/annot-generate/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-generate/Macro_2.scala b/tests/run-macros/annot-generate/Macro_2.scala index 37c88459e3e1..0a2c2e34b5f7 100644 --- a/tests/run-macros/annot-generate/Macro_2.scala +++ b/tests/run-macros/annot-generate/Macro_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-generate/Test_3.scala b/tests/run-macros/annot-generate/Test_3.scala index 591b3e864f31..8c8509c81c46 100644 --- a/tests/run-macros/annot-generate/Test_3.scala +++ b/tests/run-macros/annot-generate/Test_3.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Bar: @foo def bar(x: Int) = x + 1 diff --git a/tests/run-macros/annot-macro-main/Macro_1.scala b/tests/run-macros/annot-macro-main/Macro_1.scala index 4470febe1244..93e312459810 100644 --- a/tests/run-macros/annot-macro-main/Macro_1.scala +++ b/tests/run-macros/annot-macro-main/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-macro-main/Test_2.scala b/tests/run-macros/annot-macro-main/Test_2.scala index a6733ec1c220..45c1b7b0ebfc 100644 --- a/tests/run-macros/annot-macro-main/Test_2.scala +++ b/tests/run-macros/annot-macro-main/Test_2.scala @@ -1,3 +1,3 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @mainMacro def Test(): Unit = println("macro generated main") diff --git a/tests/run-macros/annot-memo/Macro_1.scala b/tests/run-macros/annot-memo/Macro_1.scala index 492f1e337dfb..c0a59b57864a 100644 --- a/tests/run-macros/annot-memo/Macro_1.scala +++ b/tests/run-macros/annot-memo/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-memo/Test_2.scala b/tests/run-macros/annot-memo/Test_2.scala index da58e550adda..ad39ec43a6c3 100644 --- a/tests/run-macros/annot-memo/Test_2.scala +++ b/tests/run-macros/annot-memo/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Bar: @memoize diff --git a/tests/run-macros/annot-mod-class-add-def/Macro_1.scala b/tests/run-macros/annot-mod-class-add-def/Macro_1.scala index acd878f7d556..2003ebd837df 100644 --- a/tests/run-macros/annot-mod-class-add-def/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-def/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-add-def/Test_2.scala b/tests/run-macros/annot-mod-class-add-def/Test_2.scala index d6aae4e90cf3..61eabc28878d 100644 --- a/tests/run-macros/annot-mod-class-add-def/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-def/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addIndirectToString("This is Foo") class Foo diff --git a/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala b/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala index 4294e44dd45b..40697d1027fe 100644 --- a/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-add-inner-class/Test_2.scala b/tests/run-macros/annot-mod-class-add-inner-class/Test_2.scala index b90d237b60d4..bba522b5f2c4 100644 --- a/tests/run-macros/annot-mod-class-add-inner-class/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-inner-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addInnerClass class Foo diff --git a/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala b/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala index c772f6cf43a6..642f84f4cbce 100644 --- a/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-add-lazy-val/Test_2.scala b/tests/run-macros/annot-mod-class-add-lazy-val/Test_2.scala index 5aa0601e037e..3dcab1f0d535 100644 --- a/tests/run-macros/annot-mod-class-add-lazy-val/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-lazy-val/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addMemoToString("This is Foo") class Foo diff --git a/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala b/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala index 50df9e86446d..116a60d4a40c 100644 --- a/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-add-local-class/Test_2.scala b/tests/run-macros/annot-mod-class-add-local-class/Test_2.scala index db4c975992d0..f313900635c9 100644 --- a/tests/run-macros/annot-mod-class-add-local-class/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-local-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addInnerClass class Foo diff --git a/tests/run-macros/annot-mod-class-add-val/Macro_1.scala b/tests/run-macros/annot-mod-class-add-val/Macro_1.scala index cf69949dd8ad..7a9e824edc2a 100644 --- a/tests/run-macros/annot-mod-class-add-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-val/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-add-val/Test_2.scala b/tests/run-macros/annot-mod-class-add-val/Test_2.scala index f6ea732f3084..8c4af8f79ce9 100644 --- a/tests/run-macros/annot-mod-class-add-val/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-val/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addMemoToString("This is Foo") class Foo diff --git a/tests/run-macros/annot-mod-class-add-var/Macro_1.scala b/tests/run-macros/annot-mod-class-add-var/Macro_1.scala index 0eb8592accbf..bb9ff164bfaa 100644 --- a/tests/run-macros/annot-mod-class-add-var/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-var/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-add-var/Test_2.scala b/tests/run-macros/annot-mod-class-add-var/Test_2.scala index c2ee86fbced5..cb8ea1db8313 100644 --- a/tests/run-macros/annot-mod-class-add-var/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-var/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addCountToString("This is Foo: ") class Foo: diff --git a/tests/run-macros/annot-mod-class-data/Macro_1.scala b/tests/run-macros/annot-mod-class-data/Macro_1.scala index 938b4d7d4bd6..f2275546e27a 100644 --- a/tests/run-macros/annot-mod-class-data/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-data/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted.* diff --git a/tests/run-macros/annot-mod-class-data/Test_2.scala b/tests/run-macros/annot-mod-class-data/Test_2.scala index 5def49c2daf8..b6007562f820 100644 --- a/tests/run-macros/annot-mod-class-data/Test_2.scala +++ b/tests/run-macros/annot-mod-class-data/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @data class Foo(val a: String, val b: Int) //> override def toString(): String = Seq(this.a, this.b).mkString("Foo(", ", ", ")") diff --git a/tests/run-macros/annot-mod-class-equals/Macro_1.scala b/tests/run-macros/annot-mod-class-equals/Macro_1.scala index 625598d3911e..c500d7a909ab 100644 --- a/tests/run-macros/annot-mod-class-equals/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-equals/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted.* diff --git a/tests/run-macros/annot-mod-class-equals/Test_2.scala b/tests/run-macros/annot-mod-class-equals/Test_2.scala index 1e5287743c8b..82981f303568 100644 --- a/tests/run-macros/annot-mod-class-equals/Test_2.scala +++ b/tests/run-macros/annot-mod-class-equals/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @equals class Foo(val a: String, val b: Int) //> override def equals(that: Any): Boolean = diff --git a/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala b/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala index 9c28d7427f9c..657f75d3213f 100644 --- a/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-mod-def/Test_2.scala b/tests/run-macros/annot-mod-class-mod-def/Test_2.scala index b0f2b4531986..d7d03fcb661c 100644 --- a/tests/run-macros/annot-mod-class-mod-def/Test_2.scala +++ b/tests/run-macros/annot-mod-class-mod-def/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @modToString("This is Foo") class Foo: diff --git a/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala b/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala index 0d07a6bd2006..03b9ffce7035 100644 --- a/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-mod-val/Test_2.scala b/tests/run-macros/annot-mod-class-mod-val/Test_2.scala index 862977e2aa31..17e5cae85155 100644 --- a/tests/run-macros/annot-mod-class-mod-val/Test_2.scala +++ b/tests/run-macros/annot-mod-class-mod-val/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @setValue("valDef", "a") @setValue("varDef", "b") diff --git a/tests/run-macros/annot-mod-class-override-def/Macro_1.scala b/tests/run-macros/annot-mod-class-override-def/Macro_1.scala index 2e9b9356d845..5a013c8ed8b3 100644 --- a/tests/run-macros/annot-mod-class-override-def/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-override-def/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-override-def/Test_2.scala b/tests/run-macros/annot-mod-class-override-def/Test_2.scala index 8c6121664208..8aa04610d039 100644 --- a/tests/run-macros/annot-mod-class-override-def/Test_2.scala +++ b/tests/run-macros/annot-mod-class-override-def/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @genToString("This is Foo") class Foo diff --git a/tests/run-macros/annot-mod-class-override-val/Macro_1.scala b/tests/run-macros/annot-mod-class-override-val/Macro_1.scala index 6ec02e349051..bfbbbf690072 100644 --- a/tests/run-macros/annot-mod-class-override-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-override-val/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-override-val/Test_2.scala b/tests/run-macros/annot-mod-class-override-val/Test_2.scala index f067ba678af8..b633031b9f6d 100644 --- a/tests/run-macros/annot-mod-class-override-val/Test_2.scala +++ b/tests/run-macros/annot-mod-class-override-val/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Foo: val val1: String = "?" diff --git a/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala b/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala index 6760714aaa5e..7f0c5ee51f3a 100644 --- a/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-mod-class-unused-new-sym/Test_2.scala b/tests/run-macros/annot-mod-class-unused-new-sym/Test_2.scala index 340b7503ff71..598f8fa1fc09 100644 --- a/tests/run-macros/annot-mod-class-unused-new-sym/Test_2.scala +++ b/tests/run-macros/annot-mod-class-unused-new-sym/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @newUnusedSymbol class Foo diff --git a/tests/run-macros/annot-result-order/Macro_1.scala b/tests/run-macros/annot-result-order/Macro_1.scala index cb9121eb8d01..1e4d21cc4fcb 100644 --- a/tests/run-macros/annot-result-order/Macro_1.scala +++ b/tests/run-macros/annot-result-order/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-result-order/Test_2.scala b/tests/run-macros/annot-result-order/Test_2.scala index 06b050d11c29..0f356a447772 100644 --- a/tests/run-macros/annot-result-order/Test_2.scala +++ b/tests/run-macros/annot-result-order/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @print("foo") def foo(): Unit = () diff --git a/tests/run-macros/annot-simple-fib/Macro_1.scala b/tests/run-macros/annot-simple-fib/Macro_1.scala index f8a74663f775..0e95ddb80ff7 100644 --- a/tests/run-macros/annot-simple-fib/Macro_1.scala +++ b/tests/run-macros/annot-simple-fib/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ diff --git a/tests/run-macros/annot-simple-fib/Test_2.scala b/tests/run-macros/annot-simple-fib/Test_2.scala index 534f4569b619..ace473d848a0 100644 --- a/tests/run-macros/annot-simple-fib/Test_2.scala +++ b/tests/run-macros/annot-simple-fib/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Bar: @memoize diff --git a/tests/run-macros/annot-unrollLast/Macro_1.scala b/tests/run-macros/annot-unrollLast/Macro_1.scala index bcb2a94d5ac3..974eab1ea037 100644 --- a/tests/run-macros/annot-unrollLast/Macro_1.scala +++ b/tests/run-macros/annot-unrollLast/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package example diff --git a/tests/run-macros/annot-unrollLast/Test_2.scala b/tests/run-macros/annot-unrollLast/Test_2.scala index b45b6aecd751..c05bd8a121ed 100644 --- a/tests/run-macros/annot-unrollLast/Test_2.scala +++ b/tests/run-macros/annot-unrollLast/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import example.{unrollHelper, unrollLast} diff --git a/tests/run-macros/i11685/Macro_1.scala b/tests/run-macros/i11685/Macro_1.scala index 72965266dddd..2723d5898050 100644 --- a/tests/run-macros/i11685/Macro_1.scala +++ b/tests/run-macros/i11685/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package test diff --git a/tests/run-macros/i11685/Test_2.scala b/tests/run-macros/i11685/Test_2.scala index eaab3af7acb5..32278480a2df 100644 --- a/tests/run-macros/i11685/Test_2.scala +++ b/tests/run-macros/i11685/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import test.MyMacro diff --git a/tests/run-macros/i12021/Macro_1.scala b/tests/run-macros/i12021/Macro_1.scala index 4c36c1fc4cf0..5592da075e5b 100644 --- a/tests/run-macros/i12021/Macro_1.scala +++ b/tests/run-macros/i12021/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/i16734b/Macro_1.scala b/tests/run-macros/i16734b/Macro_1.scala index e080193c398a..cbfe82425c01 100644 --- a/tests/run-macros/i16734b/Macro_1.scala +++ b/tests/run-macros/i16734b/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/i16734b/Test_2.scala b/tests/run-macros/i16734b/Test_2.scala index 81316875d561..bc2a7ae4ef44 100644 --- a/tests/run-macros/i16734b/Test_2.scala +++ b/tests/run-macros/i16734b/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental type F1Inv[A] type F1Cov[+A] diff --git a/tests/run-macros/i19676/Macro_1.scala b/tests/run-macros/i19676/Macro_1.scala index 4c9007d84bbb..82a5ca718477 100644 --- a/tests/run-macros/i19676/Macro_1.scala +++ b/tests/run-macros/i19676/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.MacroAnnotation import scala.quoted.* diff --git a/tests/run-macros/i19676/Test_2.scala b/tests/run-macros/i19676/Test_2.scala index 4110d3c7f482..2baa650f138a 100644 --- a/tests/run-macros/i19676/Test_2.scala +++ b/tests/run-macros/i19676/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @companionToString("transformed by class") class InPackage diff --git a/tests/run-macros/newClass/Macro_1.scala b/tests/run-macros/newClass/Macro_1.scala index 75f757c038dc..0ad619c3d9c4 100644 --- a/tests/run-macros/newClass/Macro_1.scala +++ b/tests/run-macros/newClass/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/newClass/Test_2.scala b/tests/run-macros/newClass/Test_2.scala index b126e90a5e41..8cf726d89d0b 100644 --- a/tests/run-macros/newClass/Test_2.scala +++ b/tests/run-macros/newClass/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test: Unit = { val foo = makeClass("foo") diff --git a/tests/run-macros/newClassExtends/Macro_1.scala b/tests/run-macros/newClassExtends/Macro_1.scala index d23b8fba88b6..8b817f2674b2 100644 --- a/tests/run-macros/newClassExtends/Macro_1.scala +++ b/tests/run-macros/newClassExtends/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/newClassExtends/Test_2.scala b/tests/run-macros/newClassExtends/Test_2.scala index 8d782e346a68..6e902825fdc6 100644 --- a/tests/run-macros/newClassExtends/Test_2.scala +++ b/tests/run-macros/newClassExtends/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test: Unit = { val foo: Foo = makeClass("foo") diff --git a/tests/run-macros/newClassExtendsClassParams/Macro_1.scala b/tests/run-macros/newClassExtendsClassParams/Macro_1.scala index e5d28c0ceb9b..99e639e0aa4f 100644 --- a/tests/run-macros/newClassExtendsClassParams/Macro_1.scala +++ b/tests/run-macros/newClassExtendsClassParams/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/newClassExtendsClassParams/Test_2.scala b/tests/run-macros/newClassExtendsClassParams/Test_2.scala index 8d782e346a68..6e902825fdc6 100644 --- a/tests/run-macros/newClassExtendsClassParams/Test_2.scala +++ b/tests/run-macros/newClassExtendsClassParams/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test: Unit = { val foo: Foo = makeClass("foo") diff --git a/tests/run-macros/newClassSelf/Macro_1.scala b/tests/run-macros/newClassSelf/Macro_1.scala index 46871d4d6b4c..8562b814677d 100644 --- a/tests/run-macros/newClassSelf/Macro_1.scala +++ b/tests/run-macros/newClassSelf/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/newClassSelf/Test_2.scala b/tests/run-macros/newClassSelf/Test_2.scala index 437be3ca519d..1c1f64fd20e3 100644 --- a/tests/run-macros/newClassSelf/Test_2.scala +++ b/tests/run-macros/newClassSelf/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test: Unit = { val a: Bar = makeClass("A") diff --git a/tests/run/i13215.scala b/tests/run/i13215.scala index 56cd3517d6ac..f43e9aa1e38a 100644 --- a/tests/run/i13215.scala +++ b/tests/run/i13215.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors +//> using options -experimental -Werror -WunstableInlineAccessors import scala.annotation.publicInBinary diff --git a/tests/run/noProtectedSuper.scala b/tests/run/noProtectedSuper.scala index 999a8a06c4fa..d05c13d90c9f 100644 --- a/tests/run/noProtectedSuper.scala +++ b/tests/run/noProtectedSuper.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.publicInBinary diff --git a/tests/run/publicInBinary/Lib_1.scala b/tests/run/publicInBinary/Lib_1.scala index 86895ba40706..d9936670a458 100644 --- a/tests/run/publicInBinary/Lib_1.scala +++ b/tests/run/publicInBinary/Lib_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors +//> using options -experimental -Werror -WunstableInlineAccessors package foo diff --git a/tests/run/quotes-add-erased/Macro_1.scala b/tests/run/quotes-add-erased/Macro_1.scala index 66ec1c3642d8..66619237e346 100644 --- a/tests/run/quotes-add-erased/Macro_1.scala +++ b/tests/run/quotes-add-erased/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.MacroAnnotation import scala.annotation.internal.ErasedParam diff --git a/tests/run/quotes-reflection/Macros_1.scala b/tests/run/quotes-reflection/Macros_1.scala index c9fe6eb38c99..92ac4e53e262 100644 --- a/tests/run/quotes-reflection/Macros_1.scala +++ b/tests/run/quotes-reflection/Macros_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run/tupled-function-andThen.scala b/tests/run/tupled-function-andThen.scala index 0068143f9d3f..5dd0a75d41d3 100644 --- a/tests/run/tupled-function-andThen.scala +++ b/tests/run/tupled-function-andThen.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction diff --git a/tests/run/tupled-function-apply.scala b/tests/run/tupled-function-apply.scala index 69cfeef91dd1..8fb68308deb5 100644 --- a/tests/run/tupled-function-apply.scala +++ b/tests/run/tupled-function-apply.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction diff --git a/tests/run/tupled-function-compose.scala b/tests/run/tupled-function-compose.scala index d984b8a9184a..84ca06103537 100644 --- a/tests/run/tupled-function-compose.scala +++ b/tests/run/tupled-function-compose.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction object Test { diff --git a/tests/run/tupled-function-extension-method.scala b/tests/run/tupled-function-extension-method.scala index fc3319aa4c15..216d91ada605 100644 --- a/tests/run/tupled-function-extension-method.scala +++ b/tests/run/tupled-function-extension-method.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction object Test { diff --git a/tests/run/tupled-function-tupled.scala b/tests/run/tupled-function-tupled.scala index 5a799be167c3..77ee8f44a0e1 100644 --- a/tests/run/tupled-function-tupled.scala +++ b/tests/run/tupled-function-tupled.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction diff --git a/tests/run/tupled-function-untupled.scala b/tests/run/tupled-function-untupled.scala index 34b81c74c4f6..3ef86b4cac2e 100644 --- a/tests/run/tupled-function-untupled.scala +++ b/tests/run/tupled-function-untupled.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction object Test { From c1291d3ef844b7c7e4ed88797bf1d7f818fffd88 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 30 Apr 2024 13:36:25 +0200 Subject: [PATCH 223/465] Update library/src/scala/runtime/stdLibPatches/Predef.scala Co-authored-by: Nicolas Stucki --- library/src/scala/runtime/stdLibPatches/Predef.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index ca9978ea347f..7684ca2700f4 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -65,5 +65,5 @@ object Predef: extension (inline opt: Option.type) @experimental - inline def fromNullable[T](t: T | Null): Option[T] = Option(t).asInstanceOf + inline def fromNullable[T](t: T | Null): Option[T] = Option(t).asInstanceOf[Option[T]] end Predef From 803603be0ca797cd268ce180fa5d1cc59617b98e Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 30 Apr 2024 15:40:27 +0200 Subject: [PATCH 224/465] Optimize nonExperimentalTopLevelDefs --- .../src/dotty/tools/dotc/typer/Checking.scala | 23 ++++++++----------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 5d2f2a5a1088..3c74e9f4ed90 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -798,7 +798,7 @@ object Checking { tree /** Check that experimental language imports in `trees` - * are done only in experimental scopes. For for top-level + * are done only in experimental scopes. For top-level * experimental imports, all top-level definitions are transformed * to @experimental definitions. * @@ -809,19 +809,14 @@ object Checking { !sym.isExperimental && sym.source == ctx.compilationUnit.source && !sym.isConstructor // not constructor of package object - && !sym.is(Package) && !sym.isPackageObject && !sym.name.endsWith(str.TOPLEVEL_SUFFIX) - - val packageMembers = - pack.info.decls - .toList.iterator - .filter(isNonExperimentalTopLevelDefinition) - val packageObjectMembers = - pack.info.decls - .toList.iterator - .filter(sym => sym.isClass && (sym.is(Package) || sym.isPackageObject)) - .flatMap(nonExperimentalTopLevelDefs) - - packageMembers ++ packageObjectMembers + && !sym.is(Package) && !sym.name.isPackageObjectName + + pack.info.decls.toList.iterator.flatMap: sym => + if sym.isClass && (sym.is(Package) || sym.isPackageObject) then + nonExperimentalTopLevelDefs(sym) + else if isNonExperimentalTopLevelDefinition(sym) then + sym :: Nil + else Nil def unitExperimentalLanguageImports = def isAllowedImport(sel: untpd.ImportSelector) = From 748596a894aca7c8ecca47499ac738b448280054 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Tue, 30 Apr 2024 19:14:17 +0200 Subject: [PATCH 225/465] Remove message last line --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 3 +-- tests/neg/19680.check | 3 +-- tests/neg/19680b.check | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 05175900e04e..980ba9bd16d2 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1042,8 +1042,7 @@ trait Applications extends Compatibility { case Some(arg) if tree.args.exists(_.span == arg.span) => val noteText = i"""The required type comes from a parameter of the automatically - |inserted `apply` method of `${qualifier.tpe}`, - |which is the type of `${qualifier.show}`.""".stripMargin + |inserted `apply` method of `${qualifier.tpe}`.""".stripMargin Diagnostic.Error(msg.appendExplanation("\n\n" + noteText), dia.pos) case _ => dia case msg => dia diff --git a/tests/neg/19680.check b/tests/neg/19680.check index 8372d5129960..5bdaaad99c2a 100644 --- a/tests/neg/19680.check +++ b/tests/neg/19680.check @@ -19,6 +19,5 @@ | The tests were made under the empty constraint | | The required type comes from a parameter of the automatically - | inserted `apply` method of `scala.collection.StringOps`, - | which is the type of `augmentString(renderWebsite("/tmp")(x$1))`. + | inserted `apply` method of `scala.collection.StringOps`. --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/19680b.check b/tests/neg/19680b.check index 14f2a30c5caa..06ff26ee3289 100644 --- a/tests/neg/19680b.check +++ b/tests/neg/19680b.check @@ -20,6 +20,5 @@ | The tests were made under the empty constraint | | The required type comes from a parameter of the automatically - | inserted `apply` method of `List[Int]`, - | which is the type of `List.apply[Int]([1,2 : Int]*)`. + | inserted `apply` method of `List[Int]`. --------------------------------------------------------------------------------------------------------------------- From ebf58bbfcae00a86a7247cb18e031611eccec918 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Tue, 30 Apr 2024 19:31:56 +0200 Subject: [PATCH 226/465] Output "no best given instance" for ambiguous implicit error messages --- .../src/dotty/tools/dotc/reporting/messages.scala | 11 ++++++++++- tests/neg/19414-desugared.check | 2 +- tests/neg/19414.check | 2 +- tests/neg/given-ambiguous-1.check | 2 +- tests/neg/given-ambiguous-default-1.check | 2 +- tests/neg/given-ambiguous-default-2.check | 2 +- 6 files changed, 15 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 1ebdc741f54f..2b4deef72362 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -2938,11 +2938,20 @@ class MissingImplicitArgument( def location(preposition: String) = if (where.isEmpty) "" else s" $preposition $where" + /** Default error message for non-nested ambiguous implicits. */ def defaultAmbiguousImplicitMsg(ambi: AmbiguousImplicits) = s"Ambiguous given instances: ${ambi.explanation}${location("of")}" + /** Default error messages for non-ambiguous implicits, or nested ambiguous + * implicits. + * + * The default message is shown for ambiguous implicits only if they have + * the `nested` flag set. In this case, we output "no best given instance" + * instead of "no given instance". + */ def defaultImplicitNotFoundMessage = - i"No given instance of type $pt was found${location("for")}" + val bestStr = if arg.tpe.isInstanceOf[AmbiguousImplicits] then " best" else "" + i"No$bestStr given instance of type $pt was found${location("for")}" /** Construct a custom error message given an ambiguous implicit * candidate `alt` and a user defined message `raw`. diff --git a/tests/neg/19414-desugared.check b/tests/neg/19414-desugared.check index e126404e3e4f..c21806e16c2c 100644 --- a/tests/neg/19414-desugared.check +++ b/tests/neg/19414-desugared.check @@ -1,7 +1,7 @@ -- [E172] Type Error: tests/neg/19414-desugared.scala:22:34 ------------------------------------------------------------ 22 | summon[BodySerializer[JsObject]] // error: Ambiguous given instances | ^ - |No given instance of type BodySerializer[JsObject] was found for parameter x of method summon in object Predef. + |No best given instance of type BodySerializer[JsObject] was found for parameter x of method summon in object Predef. |I found: | | given_BodySerializer_B[B]( diff --git a/tests/neg/19414.check b/tests/neg/19414.check index f80f1681739d..6804546df037 100644 --- a/tests/neg/19414.check +++ b/tests/neg/19414.check @@ -1,7 +1,7 @@ -- [E172] Type Error: tests/neg/19414.scala:15:34 ---------------------------------------------------------------------- 15 | summon[BodySerializer[JsObject]] // error: Ambiguous given instances | ^ - |No given instance of type BodySerializer[JsObject] was found for parameter x of method summon in object Predef. + |No best given instance of type BodySerializer[JsObject] was found for parameter x of method summon in object Predef. |I found: | | given_BodySerializer_B[B]( diff --git a/tests/neg/given-ambiguous-1.check b/tests/neg/given-ambiguous-1.check index 017714de2ea7..ed64164b351f 100644 --- a/tests/neg/given-ambiguous-1.check +++ b/tests/neg/given-ambiguous-1.check @@ -1,7 +1,7 @@ -- [E172] Type Error: tests/neg/given-ambiguous-1.scala:12:23 ---------------------------------------------------------- 12 |def f: Unit = summon[B] // error: Ambiguous given instances | ^ - | No given instance of type B was found for parameter x of method summon in object Predef. + | No best given instance of type B was found for parameter x of method summon in object Predef. | I found: | | given_B(/* ambiguous: both given instance a1 and given instance a2 match type A */summon[A]) diff --git a/tests/neg/given-ambiguous-default-1.check b/tests/neg/given-ambiguous-default-1.check index 0b24a89b82cf..1a5006c23055 100644 --- a/tests/neg/given-ambiguous-default-1.check +++ b/tests/neg/given-ambiguous-default-1.check @@ -1,7 +1,7 @@ -- [E172] Type Error: tests/neg/given-ambiguous-default-1.scala:18:23 -------------------------------------------------- 18 |def f: Unit = summon[B] // error: Ambiguous given instances | ^ - | No given instance of type B was found for parameter x of method summon in object Predef. + | No best given instance of type B was found for parameter x of method summon in object Predef. | I found: | | given_B(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A]) diff --git a/tests/neg/given-ambiguous-default-2.check b/tests/neg/given-ambiguous-default-2.check index 10094f652485..cbe8b972a389 100644 --- a/tests/neg/given-ambiguous-default-2.check +++ b/tests/neg/given-ambiguous-default-2.check @@ -1,7 +1,7 @@ -- [E172] Type Error: tests/neg/given-ambiguous-default-2.scala:18:23 -------------------------------------------------- 18 |def f: Unit = summon[C] // error: Ambiguous given instances | ^ - |No given instance of type C was found for parameter x of method summon in object Predef. + |No best given instance of type C was found for parameter x of method summon in object Predef. |I found: | | given_C(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A], this.given_C$default$2) From d4de2cbae94eb36e37277d4cac5a20356e01bb36 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 20 Dec 2023 18:09:33 +0100 Subject: [PATCH 227/465] Fix shapeless 3 deriving test. Typo: mkInstances instead of mkProductInstances, previously got healed by accident because if most specific rule. Change rules for given prioritization Consider the following program: ```scala class A class B extends A class C extends A given A = A() given B = B() given C = C() def f(using a: A, b: B, c: C) = println(a.getClass) println(b.getClass) println(c.getClass) @main def Test = f ``` With the current rules, this would fail with an ambiguity error between B and C when trying to synthesize the A parameter. This is a problem without an easy remedy. We can fix this problem by flipping the priority for implicit arguments. Instead of requiring an argument to be most _specific_, we now require it to be most _general_ while still conforming to the formal parameter. There are three justifications for this change, which at first glance seems quite drastic: - It gives us a natural way to deal with inheritance triangles like the one in the code above. Such triangles are quite common. - Intuitively, we want to get the closest possible match between required formal parameter type and synthetisized argument. The "most general" rule provides that. - We already do a crucial part of this. Namely, with current rules we interpolate all type variables in an implicit argument downwards, no matter what their variance is. This makes no sense in theory, but solves hairy problems with contravariant typeclasses like `Comparable`. Instead of this hack, we now do something more principled, by flipping the direction everywhere, preferring general over specific, instead of just flipping contravariant type parameters. Don't flip contravariant type arguments for overloading resolution Flipping contravariant type arguments was needed for implicit search where it will be replaced by a more general scheme. But it makes no sense for overloading resolution. For overloading resolution, we want to pick the most specific alternative, analogous to us picking the most specific instantiation when we force a fully defined type. Disable implicit search everywhere for disambiaguation Previously, one disambiguation step missed that, whereas implicits were turned off everywhere else. --- .../community-projects/shapeless-3 | 2 +- compiler/src/dotty/tools/dotc/core/Mode.scala | 6 +- .../dotty/tools/dotc/typer/Applications.scala | 30 +++++++--- .../dotty/tools/dotc/typer/Implicits.scala | 15 +++-- .../tools/dotc/typer/ImportSuggestions.scala | 2 +- tests/neg/i15264.scala | 58 +++++++++++++++++++ tests/pos/i15264.scala | 5 +- tests/pos/overload-disambiguation.scala | 13 +++++ tests/run/given-triangle.check | 3 + tests/run/given-triangle.scala | 14 +++++ tests/run/implicit-specifity.scala | 2 +- tests/run/implied-for.scala | 2 +- tests/run/implied-priority.scala | 10 ++-- 13 files changed, 135 insertions(+), 27 deletions(-) create mode 100644 tests/neg/i15264.scala create mode 100644 tests/pos/overload-disambiguation.scala create mode 100644 tests/run/given-triangle.check create mode 100644 tests/run/given-triangle.scala diff --git a/community-build/community-projects/shapeless-3 b/community-build/community-projects/shapeless-3 index d27c5ba1ae51..90f0c977b536 160000 --- a/community-build/community-projects/shapeless-3 +++ b/community-build/community-projects/shapeless-3 @@ -1 +1 @@ -Subproject commit d27c5ba1ae5111b85df2cfb65a26b9246c52570c +Subproject commit 90f0c977b536c06305496600b8b2014c9e8e3d86 diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 71b49394ae14..c3405160bc18 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -41,6 +41,8 @@ object Mode { val Pattern: Mode = newMode(0, "Pattern") val Type: Mode = newMode(1, "Type") + val PatternOrTypeBits: Mode = Pattern | Type + val ImplicitsEnabled: Mode = newMode(2, "ImplicitsEnabled") val InferringReturnType: Mode = newMode(3, "InferringReturnType") @@ -120,8 +122,6 @@ object Mode { /** Read original positions when unpickling from TASTY */ val ReadPositions: Mode = newMode(17, "ReadPositions") - val PatternOrTypeBits: Mode = Pattern | Type - /** We are elaborating the fully qualified name of a package clause. * In this case, identifiers should never be imported. */ @@ -133,6 +133,8 @@ object Mode { /** We are typing the body of an inline method */ val InlineableBody: Mode = newMode(21, "InlineableBody") + val NewGivenRules: Mode = newMode(22, "NewGivenRules") + /** We are synthesizing the receiver of an extension method */ val SynthesizeExtMethodReceiver: Mode = newMode(23, "SynthesizeExtMethodReceiver") diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index ffbb223a923a..e484bef612ed 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1724,9 +1724,12 @@ trait Applications extends Compatibility { * an alternative that takes more implicit parameters wins over one * that takes fewer. */ - def compare(alt1: TermRef, alt2: TermRef)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { + def compare(alt1: TermRef, alt2: TermRef, preferGeneral: Boolean = false)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { record("resolveOverloaded.compare") + val newGivenRules = + ctx.mode.is(Mode.NewGivenRules) && alt1.symbol.is(Given) + /** Is alternative `alt1` with type `tp1` as specific as alternative * `alt2` with type `tp2` ? * @@ -1809,9 +1812,11 @@ trait Applications extends Compatibility { * the intersection of its parent classes instead. */ def isAsSpecificValueType(tp1: Type, tp2: Type)(using Context) = - if (ctx.mode.is(Mode.OldOverloadingResolution)) + if !preferGeneral || ctx.mode.is(Mode.OldOverloadingResolution) then + // Normal specificity test for overloading resultion (where `preferGeneral` is false) + // and in mode Scala3-migration when we compare with the old Scala 2 rules. isCompatible(tp1, tp2) - else { + else val flip = new TypeMap { def apply(t: Type) = t match { case t @ AppliedType(tycon, args) => @@ -1822,13 +1827,20 @@ trait Applications extends Compatibility { case _ => mapOver(t) } } - def prepare(tp: Type) = tp.stripTypeVar match { + + def prepare(tp: Type) = tp.stripTypeVar match case tp: NamedType if tp.symbol.is(Module) && tp.symbol.sourceModule.is(Given) => - flip(tp.widen.widenToParents) - case _ => flip(tp) - } - (prepare(tp1) relaxed_<:< prepare(tp2)) || viewExists(tp1, tp2) - } + tp.widen.widenToParents + case _ => + tp + + val tp1p = prepare(tp1) + val tp2p = prepare(tp2) + if newGivenRules then + (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) + else + (flip(tp1p) relaxed_<:< flip(tp2p)) || viewExists(tp1, tp2) + end isAsSpecificValueType /** Widen the result type of synthetic given methods from the implementation class to the * type that's implemented. Example diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5880a659a301..dbfcb6d26e76 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1226,7 +1226,7 @@ trait Implicits: assert(argument.isEmpty || argument.tpe.isValueType || argument.tpe.isInstanceOf[ExprType], em"found: $argument: ${argument.tpe}, expected: $pt") - private def nestedContext() = + private def searchContext() = ctx.fresh.setMode(ctx.mode &~ Mode.ImplicitsEnabled) private def isCoherent = pt.isRef(defn.CanEqualClass) @@ -1270,7 +1270,7 @@ trait Implicits: else val history = ctx.searchHistory.nest(cand, pt) val typingCtx = - nestedContext().setNewTyperState().setFreshGADTBounds.setSearchHistory(history) + searchContext().setNewTyperState().setFreshGADTBounds.setSearchHistory(history) val result = typedImplicit(cand, pt, argument, span)(using typingCtx) result match case res: SearchSuccess => @@ -1297,7 +1297,12 @@ trait Implicits: def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level - else explore(compare(alt1.ref, alt2.ref))(using nestedContext()) + else + val was = explore(compare(alt1.ref, alt2.ref, preferGeneral = true))(using searchContext()) + val now = explore(compare(alt1.ref, alt2.ref, preferGeneral = true))(using searchContext().addMode(Mode.NewGivenRules)) + if was != now then + println(i"change in preference for $pt between ${alt1.ref} and ${alt2.ref}, was: $was, now: $now at $srcPos") + now /** If `alt1` is also a search success, try to disambiguate as follows: * - If alt2 is preferred over alt1, pick alt2, otherwise return an @@ -1333,8 +1338,8 @@ trait Implicits: else ctx.typerState - diff = inContext(ctx.withTyperState(comparisonState)): - compare(ref1, ref2) + diff = inContext(searchContext().withTyperState(comparisonState)): + compare(ref1, ref2, preferGeneral = true) else // alt1 is a conversion, prefer extension alt2 over it diff = -1 if diff < 0 then alt2 diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index 33643a0fae2f..5ab6a4a5fae6 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -296,7 +296,7 @@ trait ImportSuggestions: var i = 0 var diff = 0 while i < filled && diff == 0 do - diff = compare(ref, top(i))(using noImplicitsCtx) + diff = compare(ref, top(i), preferGeneral = true)(using noImplicitsCtx) if diff > 0 then rest += top(i) top(i) = ref diff --git a/tests/neg/i15264.scala b/tests/neg/i15264.scala new file mode 100644 index 000000000000..9dbc253cf33e --- /dev/null +++ b/tests/neg/i15264.scala @@ -0,0 +1,58 @@ +object priority: + // lower number = higher priority + class Prio0 extends Prio1 + object Prio0 { given Prio0() } + + class Prio1 extends Prio2 + object Prio1 { given Prio1() } + + class Prio2 + object Prio2 { given Prio2() } + +object repro: + // analogous to cats Eq, Hash, Order: + class A[V] + class B[V] extends A[V] + class C[V] extends A[V] + + class Q[V] + + object context: + // prios work here, which is cool + given[V](using priority.Prio0): C[V] = new C[V] + given[V](using priority.Prio1): B[V] = new B[V] + given[V](using priority.Prio2): A[V] = new A[V] + + object exports: + // so will these exports + export context.given + + // if you import these don't import from 'context' above + object qcontext: + // base defs, like what you would get from cats + given gb: B[Int] = new B[Int] + given gc: C[Int] = new C[Int] + + // these seem like they should work but don't + given gcq[V](using p0: priority.Prio0)(using c: C[V]): C[Q[V]] = new C[Q[V]] + given gbq[V](using p1: priority.Prio1)(using b: B[V]): B[Q[V]] = new B[Q[V]] + given gaq[V](using p2: priority.Prio2)(using a: A[V]): A[Q[V]] = new A[Q[V]] + +object test1: + import repro.* + import repro.exports.given + + // these will work + val a = summon[A[Int]] + +object test2: + import repro.* + import repro.qcontext.given + + // This one will fail as ambiguous - prios aren't having an effect. + // Priorities indeed don't have an effect if the result is already decided + // without using clauses, they onyl act as a tie breaker. + // With the new resolution rules, it's ambiguous since we pick `gaq` for + // summon, and that needs an A[Int], but there are only the two competing choices + // qb and qc. + val a = summon[A[Q[Int]]] // error: ambiguous between qb and qc for A[Int] diff --git a/tests/pos/i15264.scala b/tests/pos/i15264.scala index 05992df61b94..5be8436c12ba 100644 --- a/tests/pos/i15264.scala +++ b/tests/pos/i15264.scala @@ -30,6 +30,7 @@ object repro: // if you import these don't import from 'context' above object qcontext: // base defs, like what you would get from cats + given ga: A[Int] = new B[Int] // added so that we don't get an ambiguity in test2 given gb: B[Int] = new B[Int] given gc: C[Int] = new C[Int] @@ -45,9 +46,9 @@ object test1: // these will work val a = summon[A[Int]] + object test2: import repro.* import repro.qcontext.given - // this one will fail as ambiguous - prios aren't having an effect - val a = summon[A[Q[Int]]] \ No newline at end of file + val a = summon[A[Q[Int]]] diff --git a/tests/pos/overload-disambiguation.scala b/tests/pos/overload-disambiguation.scala new file mode 100644 index 000000000000..58b085758d92 --- /dev/null +++ b/tests/pos/overload-disambiguation.scala @@ -0,0 +1,13 @@ +class A +class B +class C[-T] + +def foo(using A): C[Any] = ??? +def foo(using B): C[Int] = ??? + + +@main def Test = + given A = A() + given B = B() + val x = foo + val _: C[Any] = x diff --git a/tests/run/given-triangle.check b/tests/run/given-triangle.check new file mode 100644 index 000000000000..5ba9e6a1e8b9 --- /dev/null +++ b/tests/run/given-triangle.check @@ -0,0 +1,3 @@ +class A +class B +class C diff --git a/tests/run/given-triangle.scala b/tests/run/given-triangle.scala new file mode 100644 index 000000000000..9d39689996ba --- /dev/null +++ b/tests/run/given-triangle.scala @@ -0,0 +1,14 @@ +class A +class B extends A +class C extends A + +given A = A() +given B = B() +given C = C() + +def f(using a: A, b: B, c: C) = + println(a.getClass) + println(b.getClass) + println(c.getClass) + +@main def Test = f diff --git a/tests/run/implicit-specifity.scala b/tests/run/implicit-specifity.scala index 51fa02d91cfd..fb8f84d9f94d 100644 --- a/tests/run/implicit-specifity.scala +++ b/tests/run/implicit-specifity.scala @@ -38,5 +38,5 @@ object Test extends App { assert(Show[Int] == 0) assert(Show[String] == 1) assert(Show[Generic] == 1) // showGen loses against fallback due to longer argument list - assert(Show[Generic2] == 2) // ... but the opaque type intersection trick works. + assert(Show[Generic2] == 1) // ... and the opaque type intersection trick no longer works with new resolution rules. } diff --git a/tests/run/implied-for.scala b/tests/run/implied-for.scala index c7789ce570e4..a55d59e89505 100644 --- a/tests/run/implied-for.scala +++ b/tests/run/implied-for.scala @@ -20,7 +20,7 @@ object Test extends App { val x2: T = t val x3: D[Int] = d - assert(summon[T].isInstanceOf[B]) + assert(summon[T].isInstanceOf[T]) assert(summon[D[Int]].isInstanceOf[D[_]]) } diff --git a/tests/run/implied-priority.scala b/tests/run/implied-priority.scala index 0822fae6778f..b02412ddaf0c 100644 --- a/tests/run/implied-priority.scala +++ b/tests/run/implied-priority.scala @@ -72,16 +72,16 @@ def test2a = { } /* If that solution is not applicable, we can define an override by refining the - * result type of the given instance, e.g. like this: + * result type of all lower-priority instances, e.g. like this: */ object Impl3 { - given t1[T]: E[T]("low") + trait LowPriority // A marker trait to indicate a lower priority + given t1[T]: E[T]("low") with LowPriority } object Override { - trait HighestPriority // A marker trait to indicate a higher priority - given over[T]: E[T]("hi") with HighestPriority() + given over[T]: E[T]("hi") with {} } def test3 = { @@ -90,7 +90,7 @@ def test3 = { { import Override.given import Impl3.given - assert(summon[E[String]].str == "hi") // `over` takes priority since its result type is a subtype of t1's. + assert(summon[E[String]].str == "hi", summon[E[String]].str) // `Impl3` takes priority since its result type is a subtype of t1's. } } From 1b9a7e0978e1d5e16d750000aac05cac5207ba32 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 21 Dec 2023 11:32:24 +0100 Subject: [PATCH 228/465] Change rules for given prioritization Consider the following program: ```scala class A class B extends A class C extends A given A = A() given B = B() given C = C() def f(using a: A, b: B, c: C) = println(a.getClass) println(b.getClass) println(c.getClass) @main def Test = f ``` With the current rules, this would fail with an ambiguity error between B and C when trying to synthesize the A parameter. This is a problem without an easy remedy. We can fix this problem by flipping the priority for implicit arguments. Instead of requiring an argument to be most _specific_, we now require it to be most _general_ while still conforming to the formal parameter. There are three justifications for this change, which at first glance seems quite drastic: - It gives us a natural way to deal with inheritance triangles like the one in the code above. Such triangles are quite common. - Intuitively, we want to get the closest possible match between required formal parameter type and synthetisized argument. The "most general" rule provides that. - We already do a crucial part of this. Namely, with current rules we interpolate all type variables in an implicit argument downwards, no matter what their variance is. This makes no sense in theory, but solves hairy problems with contravariant typeclasses like `Comparable`. Instead of this hack, we now do something more principled, by flipping the direction everywhere, preferring general over specific, instead of just flipping contravariant type parameters. The behavior is dependent on the Scala version - Old behavior: up to 3.4 - New behavior: from 3.5, 3.5-migration warns on behavior change The CB builds under the new rules. One fix was needed for a shapeless 3 deriving test. There was a typo: mkInstances instead of mkProductInstances, which previously got healed by accident because of the most specific rule. Also: Don't flip contravariant type arguments for overloading resolution Flipping contravariant type arguments was needed for implicit search where it will be replaced by a more general scheme. But it makes no sense for overloading resolution. For overloading resolution, we want to pick the most specific alternative, analogous to us picking the most specific instantiation when we force a fully defined type. Also: Disable implicit search everywhere for disambiaguation Previously, one disambiguation step missed that, whereas implicits were turned off everywhere else. --- compiler/src/dotty/tools/dotc/core/Mode.scala | 13 +- .../dotty/tools/dotc/typer/Applications.scala | 120 ++++++++++-------- .../dotty/tools/dotc/typer/Implicits.scala | 26 ++-- .../changed-features/implicit-resolution.md | 17 ++- tests/neg/i15264.scala | 1 + tests/run/given-triangle.scala | 2 + tests/run/implicit-specifity.scala | 2 + tests/run/implied-priority.scala | 1 + tests/warn/given-triangle.check | 6 + tests/warn/given-triangle.scala | 16 +++ 10 files changed, 137 insertions(+), 67 deletions(-) create mode 100644 tests/warn/given-triangle.check create mode 100644 tests/warn/given-triangle.scala diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index c3405160bc18..5dab5631c62a 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -103,16 +103,19 @@ object Mode { */ val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") - /** Use Scala2 scheme for overloading and implicit resolution */ - val OldOverloadingResolution: Mode = newMode(15, "OldOverloadingResolution") + /** Use previous Scheme for implicit resolution. Currently significant + * in 3.0-migration where we use Scala-2's scheme instead and in 3.5-migration + * where we use the previous scheme up to 3.4 instead. + */ + val OldImplicitResolution: Mode = newMode(15, "OldImplicitResolution") /** Treat CapturingTypes as plain AnnotatedTypes even in phase CheckCaptures. - * Reuses the value of OldOverloadingResolution to save Mode bits. - * This is OK since OldOverloadingResolution only affects implicit search, which + * Reuses the value of OldImplicitResolution to save Mode bits. + * This is OK since OldImplicitResolution only affects implicit search, which * is done during phases Typer and Inlinig, and IgnoreCaptures only has an * effect during phase CheckCaptures. */ - val IgnoreCaptures = OldOverloadingResolution + val IgnoreCaptures = OldImplicitResolution /** Allow hk applications of type lambdas to wildcard arguments; * used for checking that such applications do not normally arise diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index e484bef612ed..0b4ad5ff9bc8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -22,7 +22,7 @@ import ProtoTypes.* import Inferencing.* import reporting.* import Nullables.*, NullOpsDecorator.* -import config.Feature +import config.{Feature, SourceVersion} import collection.mutable import config.Printers.{overload, typr, unapp} @@ -1709,6 +1709,12 @@ trait Applications extends Compatibility { /** Compare two alternatives of an overloaded call or an implicit search. * * @param alt1, alt2 Non-overloaded references indicating the two choices + * @param preferGeneral When comparing two value types, prefer the more general one + * over the more specific one iff `preferGeneral` is true. + * `preferGeneral` is set to `true` when we compare two given values, since + * then we want the most general evidence that matches the target + * type. It is set to `false` for overloading resolution, when we want the + * most specific type instead. * @return 1 if 1st alternative is preferred over 2nd * -1 if 2nd alternative is preferred over 1st * 0 if neither alternative is preferred over the other @@ -1727,27 +1733,25 @@ trait Applications extends Compatibility { def compare(alt1: TermRef, alt2: TermRef, preferGeneral: Boolean = false)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { record("resolveOverloaded.compare") - val newGivenRules = - ctx.mode.is(Mode.NewGivenRules) && alt1.symbol.is(Given) + val compareGivens = alt1.symbol.is(Given) || alt2.symbol.is(Given) - /** Is alternative `alt1` with type `tp1` as specific as alternative + /** Is alternative `alt1` with type `tp1` as good as alternative * `alt2` with type `tp2` ? * - * 1. A method `alt1` of type `(p1: T1, ..., pn: Tn)U` is as specific as `alt2` + * 1. A method `alt1` of type `(p1: T1, ..., pn: Tn)U` is as good as `alt2` * if `alt1` is nullary or `alt2` is applicable to arguments (p1, ..., pn) of * types T1,...,Tn. If the last parameter `pn` has a vararg type T*, then * `alt1` must be applicable to arbitrary numbers of `T` parameters (which * implies that it must be a varargs method as well). * 2. A polymorphic member of type [a1 >: L1 <: U1, ..., an >: Ln <: Un]T is as - * specific as `alt2` of type `tp2` if T is as specific as `tp2` under the + * good as `alt2` of type `tp2` if T is as good as `tp2` under the * assumption that for i = 1,...,n each ai is an abstract type name bounded * from below by Li and from above by Ui. * 3. A member of any other type `tp1` is: - * a. always as specific as a method or a polymorphic method. - * b. as specific as a member of any other type `tp2` if `tp1` is compatible - * with `tp2`. + * a. always as good as a method or a polymorphic method. + * b. as good as a member of any other type `tp2` is `asGoodValueType(tp1, tp2) = true` */ - def isAsSpecific(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { + def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { tp1 match case tp1: MethodType => // (1) tp1.paramInfos.isEmpty && tp2.isInstanceOf[LambdaType] @@ -1769,65 +1773,60 @@ trait Applications extends Compatibility { fullyDefinedType(tp1Params, "type parameters of alternative", alt1.symbol.srcPos) val tparams = newTypeParams(alt1.symbol, tp1.paramNames, EmptyFlags, tp1.instantiateParamInfos(_)) - isAsSpecific(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) + isAsGood(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) } case _ => // (3) tp2 match case tp2: MethodType => true // (3a) case tp2: PolyType if tp2.resultType.isInstanceOf[MethodType] => true // (3a) case tp2: PolyType => // (3b) - explore(isAsSpecificValueType(tp1, instantiateWithTypeVars(tp2))) + explore(isAsGoodValueType(tp1, instantiateWithTypeVars(tp2))) case _ => // 3b) - isAsSpecificValueType(tp1, tp2) + isAsGoodValueType(tp1, tp2) } - /** Test whether value type `tp1` is as specific as value type `tp2`. - * Let's abbreviate this to `tp1 <:s tp2`. - * Previously, `<:s` was the same as `<:`. This behavior is still - * available under mode `Mode.OldOverloadingResolution`. The new behavior - * is different, however. Here, `T <:s U` iff + /** Test whether value type `tp1` is as good as value type `tp2`. + * Let's abbreviate this to `tp1 <:p tp2`. The behavior depends on the Scala version + * and mode. * - * flip(T) <: flip(U) + * - In Scala 2, `<:p` was the same as `<:`. This behavior is still + * available in 3.0-migration if mode `Mode.OldImplicitResolution` is turned on as well. + * It is used to highlight differences between Scala 2 and 3 behavior. * - * where `flip` changes covariant occurrences of contravariant type parameters to - * covariant ones. Intuitively `<:s` means subtyping `<:`, except that all arguments - * to contravariant parameters are compared as if they were covariant. E.g. given class + * - In Scala 3.0-3.4, the behavior is as follows: `T <:p U` iff there is an impliit conversion + * from `T` to `U`, or * - * class Cmp[-X] + * flip(T) <: flip(U) * - * `Cmp[T] <:s Cmp[U]` if `T <: U`. On the other hand, non-variant occurrences - * of parameters are not affected. So `T <: U` would imply `Set[Cmp[U]] <:s Set[Cmp[T]]`, - * as usual, because `Set` is non-variant. + * where `flip` changes covariant occurrences of contravariant type parameters to + * covariant ones. Intuitively `<:p` means subtyping `<:`, except that all arguments + * to contravariant parameters are compared as if they were covariant. E.g. given class * - * This relation might seem strange, but it models closely what happens for methods. - * Indeed, if we integrate the existing rules for methods into `<:s` we have now that + * class Cmp[-X] * - * (T)R <:s (U)R + * `Cmp[T] <:p Cmp[U]` if `T <: U`. On the other hand, non-variant occurrences + * of parameters are not affected. So `T <: U` would imply `Set[Cmp[U]] <:p Set[Cmp[T]]`, + * as usual, because `Set` is non-variant. * - * iff + * - From Scala 3.5, `T <:p U` means `T <: U` or `T` convertible to `U` + * for overloading resolution (when `preferGeneral is false), and the opposite relation + * `U <: T` or `U convertible to `T` for implicit disambiguation between givens + * (when `preferGeneral` is true). For old-style implicit values, the 3.4 behavior is kept. * - * T => R <:s U => R + * - In Scala 3.5-migration, use the 3.5 scheme normally, and the 3.4 scheme if + * `Mode.OldImplicitResolution` is on. This is used to highlight differences in the + * two resolution schemes. * - * Also: If a compared type refers to a given or its module class, use + * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. */ - def isAsSpecificValueType(tp1: Type, tp2: Type)(using Context) = - if !preferGeneral || ctx.mode.is(Mode.OldOverloadingResolution) then - // Normal specificity test for overloading resultion (where `preferGeneral` is false) + def isAsGoodValueType(tp1: Type, tp2: Type)(using Context) = + val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) + if !preferGeneral || Feature.migrateTo3 && oldResolution then + // Normal specificity test for overloading resolution (where `preferGeneral` is false) // and in mode Scala3-migration when we compare with the old Scala 2 rules. isCompatible(tp1, tp2) else - val flip = new TypeMap { - def apply(t: Type) = t match { - case t @ AppliedType(tycon, args) => - def mapArg(arg: Type, tparam: TypeParamInfo) = - if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionNOf(arg :: Nil, defn.UnitType) - else arg - mapOver(t.derivedAppliedType(tycon, args.zipWithConserve(tycon.typeParams)(mapArg))) - case _ => mapOver(t) - } - } - def prepare(tp: Type) = tp.stripTypeVar match case tp: NamedType if tp.symbol.is(Module) && tp.symbol.sourceModule.is(Given) => tp.widen.widenToParents @@ -1836,11 +1835,27 @@ trait Applications extends Compatibility { val tp1p = prepare(tp1) val tp2p = prepare(tp2) - if newGivenRules then - (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) - else + + if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) + || oldResolution + || !compareGivens + then + // Intermediate rules: better means specialize, but map all type arguments downwards + // These are enabled for 3.0-3.4, and for all comparisons between old-style implicits, + // and in 3.5-migration when we compare with previous rules. + val flip = new TypeMap: + def apply(t: Type) = t match + case t @ AppliedType(tycon, args) => + def mapArg(arg: Type, tparam: TypeParamInfo) = + if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionNOf(arg :: Nil, defn.UnitType) + else arg + mapOver(t.derivedAppliedType(tycon, args.zipWithConserve(tycon.typeParams)(mapArg))) + case _ => mapOver(t) (flip(tp1p) relaxed_<:< flip(tp2p)) || viewExists(tp1, tp2) - end isAsSpecificValueType + else + // New rules: better means generalize + (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) + end isAsGoodValueType /** Widen the result type of synthetic given methods from the implementation class to the * type that's implemented. Example @@ -1900,9 +1915,8 @@ trait Applications extends Compatibility { def compareWithTypes(tp1: Type, tp2: Type) = val ownerScore = compareOwner(alt1.symbol.maybeOwner, alt2.symbol.maybeOwner) - - val winsType1 = isAsSpecific(alt1, tp1, alt2, tp2) - val winsType2 = isAsSpecific(alt2, tp2, alt1, tp1) + val winsType1 = isAsGood(alt1, tp1, alt2, tp2) + val winsType2 = isAsGood(alt2, tp2, alt1, tp1) overload.println(i"compare($alt1, $alt2)? $tp1 $tp2 $ownerScore $winsType1 $winsType2") if winsType1 && winsType2 diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index dbfcb6d26e76..818512a4fa6f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -531,7 +531,7 @@ object Implicits: |must be more specific than $target""" :: Nil override def msg(using Context) = - super.msg.append(i"\nThe expected type $target is not specific enough, so no search was attempted") + super.msg.append("\nThe expected type $target is not specific enough, so no search was attempted") override def toString = s"TooUnspecific" end TooUnspecific @@ -1110,8 +1110,8 @@ trait Implicits: case result: SearchFailure if result.isAmbiguous => val deepPt = pt.deepenProto if (deepPt ne pt) inferImplicit(deepPt, argument, span) - else if (migrateTo3 && !ctx.mode.is(Mode.OldOverloadingResolution)) - withMode(Mode.OldOverloadingResolution)(inferImplicit(pt, argument, span)) match { + else if (migrateTo3 && !ctx.mode.is(Mode.OldImplicitResolution)) + withMode(Mode.OldImplicitResolution)(inferImplicit(pt, argument, span)) match { case altResult: SearchSuccess => report.migrationWarning( result.reason.msg @@ -1295,14 +1295,24 @@ trait Implicits: * 0 if neither alternative is preferred over the other */ def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = + def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else - val was = explore(compare(alt1.ref, alt2.ref, preferGeneral = true))(using searchContext()) - val now = explore(compare(alt1.ref, alt2.ref, preferGeneral = true))(using searchContext().addMode(Mode.NewGivenRules)) - if was != now then - println(i"change in preference for $pt between ${alt1.ref} and ${alt2.ref}, was: $was, now: $now at $srcPos") - now + val cmp = comp(using searchContext()) + if Feature.sourceVersion == SourceVersion.`3.5-migration` then + val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) + if cmp != prev then + def choice(c: Int) = c match + case -1 => "the second alternative" + case 1 => "the first alternative" + case _ => "none - it's ambiguous" + report.warning( + em"""Change in given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} + |Previous choice: ${choice(prev)} + |New choice : ${choice(cmp)}""", srcPos) + cmp + end compareAlternatives /** If `alt1` is also a search success, try to disambiguate as follows: * - If alt2 is preferred over alt1, pick alt2, otherwise return an diff --git a/docs/_docs/reference/changed-features/implicit-resolution.md b/docs/_docs/reference/changed-features/implicit-resolution.md index 1396ed04b6d3..0df8d2d60a7a 100644 --- a/docs/_docs/reference/changed-features/implicit-resolution.md +++ b/docs/_docs/reference/changed-features/implicit-resolution.md @@ -165,7 +165,22 @@ Condition (*) is new. It is necessary to ensure that the defined relation is tra [//]: # todo: expand with precise rules -**9.** The following change is currently enabled in `-source future`: + +**9.** Given disambiguation has changed. When comparing two givens that both match an expected type, we used to pick the most specific one, in alignment with +overloading resolution. From Scala 3.5 on, we pick the most general one instead. Compiling with Scala 3.5-migration will print a warning in all cases where the preference has changed. Example: +```scala +class A +class B extends A +class C extends A + +given A = A() +given B = B() +given C = C() + +summon[A] // was ambiguous, will now return `given_A` +``` + +**10.** The following change is currently enabled in `-source future`: Implicit resolution now avoids generating recursive givens that can lead to an infinite loop at runtime. Here is an example: diff --git a/tests/neg/i15264.scala b/tests/neg/i15264.scala index 9dbc253cf33e..e13e1089dba3 100644 --- a/tests/neg/i15264.scala +++ b/tests/neg/i15264.scala @@ -1,3 +1,4 @@ +import language.`3.5` object priority: // lower number = higher priority class Prio0 extends Prio1 diff --git a/tests/run/given-triangle.scala b/tests/run/given-triangle.scala index 9d39689996ba..5ddba8df8b7b 100644 --- a/tests/run/given-triangle.scala +++ b/tests/run/given-triangle.scala @@ -1,3 +1,5 @@ +import language.future + class A class B extends A class C extends A diff --git a/tests/run/implicit-specifity.scala b/tests/run/implicit-specifity.scala index fb8f84d9f94d..14954eddf2ef 100644 --- a/tests/run/implicit-specifity.scala +++ b/tests/run/implicit-specifity.scala @@ -1,3 +1,5 @@ +import language.`3.5` + case class Show[T](val i: Int) object Show { def apply[T](implicit st: Show[T]): Int = st.i diff --git a/tests/run/implied-priority.scala b/tests/run/implied-priority.scala index b02412ddaf0c..61049de8e43e 100644 --- a/tests/run/implied-priority.scala +++ b/tests/run/implied-priority.scala @@ -1,5 +1,6 @@ /* These tests show various mechanisms available for implicit prioritization. */ +import language.`3.5` class E[T](val str: String) // The type for which we infer terms below diff --git a/tests/warn/given-triangle.check b/tests/warn/given-triangle.check new file mode 100644 index 000000000000..69583830c2bc --- /dev/null +++ b/tests/warn/given-triangle.check @@ -0,0 +1,6 @@ +-- Warning: tests/warn/given-triangle.scala:16:18 ---------------------------------------------------------------------- +16 |@main def Test = f // warn + | ^ + | Change in given search preference for A between alternatives (given_A : A) and (given_B : B) + | Previous choice: the second alternative + | New choice : the first alternative diff --git a/tests/warn/given-triangle.scala b/tests/warn/given-triangle.scala new file mode 100644 index 000000000000..bc1a5c774f4f --- /dev/null +++ b/tests/warn/given-triangle.scala @@ -0,0 +1,16 @@ +//> using options -source 3.5-migration + +class A +class B extends A +class C extends A + +given A = A() +given B = B() +given C = C() + +def f(using a: A, b: B, c: C) = + println(a.getClass) + println(b.getClass) + println(c.getClass) + +@main def Test = f // warn From 77218332e5b49f81d8d50a5e54869120919e2d16 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 16 Apr 2024 14:47:37 +0200 Subject: [PATCH 229/465] Fix rebase breakage --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 0b4ad5ff9bc8..b023e421f570 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1907,8 +1907,8 @@ trait Applications extends Compatibility { def comparePrefixes = val pre1 = widenPrefix(alt1) val pre2 = widenPrefix(alt2) - val winsPrefix1 = isAsSpecificValueType(pre1, pre2) - val winsPrefix2 = isAsSpecificValueType(pre2, pre1) + val winsPrefix1 = isAsGoodValueType(pre1, pre2) + val winsPrefix2 = isAsGoodValueType(pre2, pre1) if winsPrefix1 == winsPrefix2 then 0 else if winsPrefix1 then 1 else -1 From c54dbbf570503ed8b5299691be6fb08e7e4679be Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 27 Apr 2024 22:37:52 +0200 Subject: [PATCH 230/465] Switch to new rules only if both sides are givens (rather than implicits). --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index b023e421f570..d949090dd514 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1733,7 +1733,7 @@ trait Applications extends Compatibility { def compare(alt1: TermRef, alt2: TermRef, preferGeneral: Boolean = false)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { record("resolveOverloaded.compare") - val compareGivens = alt1.symbol.is(Given) || alt2.symbol.is(Given) + val compareGivens = alt1.symbol.is(Given) && alt2.symbol.is(Given) /** Is alternative `alt1` with type `tp1` as good as alternative * `alt2` with type `tp2` ? From 8f75eabde8ae7a9f5bcfe009f9e0f8885fb2dabf Mon Sep 17 00:00:00 2001 From: rochala Date: Wed, 1 May 2024 11:27:40 +0200 Subject: [PATCH 231/465] Add regression test for issue 18632 --- tests/neg/i18632.check | 5 +++++ tests/neg/i18632.scala | 14 ++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 tests/neg/i18632.check create mode 100644 tests/neg/i18632.scala diff --git a/tests/neg/i18632.check b/tests/neg/i18632.check new file mode 100644 index 000000000000..a0fa733cf8e3 --- /dev/null +++ b/tests/neg/i18632.check @@ -0,0 +1,5 @@ +-- [E176] Potential Issue Warning: tests/neg/i18632.scala:12:2 --------------------------------------------------------- +12 | bar // warn + | ^^^ + | unused value of type String +No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i18632.scala b/tests/neg/i18632.scala new file mode 100644 index 000000000000..9253951b166a --- /dev/null +++ b/tests/neg/i18632.scala @@ -0,0 +1,14 @@ +//> using options -Wnonunit-statement -Werror + +class Context + +object Foo { + def run(program: Context ?=> String): Unit = ??? +} + +def bar(using Context): String = ??? + +@main def run = Foo.run: + bar // warn + bar +// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) From 751cc2ffd0f2bac65ccc79565deb20f64b08cbd5 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Wed, 1 May 2024 12:02:40 +0200 Subject: [PATCH 232/465] Add warning for synchronized calls in value classes Co-Authored-By: Yoonjae Jeon <18438185+nox213@users.noreply.github.com> --- compiler/src/dotty/tools/dotc/typer/RefChecks.scala | 4 +++- tests/warn/i17493.check | 11 +++++++++++ tests/warn/i17493.scala | 4 ++++ 3 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 tests/warn/i17493.check create mode 100644 tests/warn/i17493.scala diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index cdfd137e5661..09ed2d91a788 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -1239,7 +1239,9 @@ object RefChecks { def checkAnyRefMethodCall(tree: Tree)(using Context) = if tree.symbol.exists && defn.topClasses.contains(tree.symbol.owner) - && (!ctx.owner.enclosingClass.exists || ctx.owner.enclosingClass.isPackageObject) then + && (!ctx.owner.enclosingClass.exists + || ctx.owner.enclosingClass.isPackageObject + || ctx.owner.enclosingClass.isValueClass) then report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) } diff --git a/tests/warn/i17493.check b/tests/warn/i17493.check new file mode 100644 index 000000000000..af5a0b8ad115 --- /dev/null +++ b/tests/warn/i17493.check @@ -0,0 +1,11 @@ +-- [E181] Potential Issue Warning: tests/warn/i17493.scala:3:11 -------------------------------------------------------- +3 | def g = synchronized { println("hello, world") } // warn + | ^^^^^^^^^^^^ + | Suspicious top-level unqualified call to synchronized + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as synchronized are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/warn/i17493.scala b/tests/warn/i17493.scala new file mode 100644 index 000000000000..74f2039b81b5 --- /dev/null +++ b/tests/warn/i17493.scala @@ -0,0 +1,4 @@ +//> using options -explain + class A(val s: String) extends AnyVal { + def g = synchronized { println("hello, world") } // warn + } From 509c2356a532c3bb3f1569dbbfb8bf6f695b2f4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Wed, 1 May 2024 13:29:06 +0200 Subject: [PATCH 233/465] Fix #18383: Never consider top-level `import`s as unused in the repl. --- .../dotty/tools/dotc/transform/CheckUnused.scala | 15 +++++++++------ compiler/test-resources/repl/i18383 | 14 ++++++++++++++ .../test/dotty/tools/repl/ReplCompilerTests.scala | 9 ++++++++- 3 files changed, 31 insertions(+), 7 deletions(-) create mode 100644 compiler/test-resources/repl/i18383 diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 7cff6fa5f1f0..c547188c50a1 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -20,6 +20,7 @@ import dotty.tools.dotc.core.Types.{AnnotatedType, ConstantType, NoType, TermRef import dotty.tools.dotc.core.Flags.flagsString import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Names.Name +import dotty.tools.dotc.core.NameOps.isReplWrapperName import dotty.tools.dotc.transform.MegaPhase.MiniPhase import dotty.tools.dotc.core.Annotations import dotty.tools.dotc.core.Definitions @@ -423,9 +424,11 @@ object CheckUnused: def registerImport(imp: tpd.Import)(using Context): Unit = if !tpd.languageImport(imp.expr).nonEmpty && !imp.isGeneratedByEnum && !isTransparentAndInline(imp) then impInScope.top += imp - unusedImport ++= imp.selectors.filter { s => - !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) && !isImportIgnored(imp, s) - } + if currScopeType.top != ScopeType.ReplWrapper then // #18383 Do not report top-level import's in the repl as unused + unusedImport ++= imp.selectors.filter { s => + !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) && !isImportIgnored(imp, s) + } + end registerImport /** Register (or not) some `val` or `def` according to the context, scope and flags */ def registerDef(memDef: tpd.MemberDef)(using Context): Unit = @@ -794,12 +797,13 @@ object CheckUnused: enum ScopeType: case Local case Template + case ReplWrapper case Other object ScopeType: /** return the scope corresponding to the enclosing scope of the given tree */ - def fromTree(tree: tpd.Tree): ScopeType = tree match - case _:tpd.Template => Template + def fromTree(tree: tpd.Tree)(using Context): ScopeType = tree match + case tree: tpd.Template => if tree.symbol.name.isReplWrapperName then ReplWrapper else Template case _:tpd.Block => Local case _ => Other @@ -810,4 +814,3 @@ object CheckUnused: val Empty = UnusedResult(Set.empty) end CheckUnused - diff --git a/compiler/test-resources/repl/i18383 b/compiler/test-resources/repl/i18383 new file mode 100644 index 000000000000..81d3c9d5a7fd --- /dev/null +++ b/compiler/test-resources/repl/i18383 @@ -0,0 +1,14 @@ +scala>:settings -Wunused:all + +scala> import scala.collection.* + +scala> class Foo { import scala.util.*; println("foo") } +1 warning found +-- Warning: -------------------------------------------------------------------- +1 | class Foo { import scala.util.*; println("foo") } + | ^ + | unused import +// defined class Foo + +scala> { import scala.util.*; "foo" } +val res0: String = foo diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index ecae111604cf..67e63d0156a5 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -20,6 +20,14 @@ class ReplCompilerTests extends ReplTest: assertEquals("def foo: 1", storedOutput().trim) } + @Test def i18383NoWarnOnUnusedImport: Unit = { + initially { + run("import scala.collection.*") + } andThen { + println(lines().mkString("* ", "\n * ", "")) + } + } + @Test def compileTwo = initially { run("def foo: 1 = 1") @@ -509,4 +517,3 @@ class ReplHighlightTests extends ReplTest(ReplTest.defaultOptions.filterNot(_.st case class Tree(left: Tree, right: Tree) def deepTree(depth: Int): Tree deepTree(300)""") - From 7a9102a215b4f83547077f24ca92a5aba55a3744 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Wed, 1 May 2024 14:49:04 +0200 Subject: [PATCH 234/465] Generalize checkAnyRefMethodCall --- .../dotty/tools/dotc/typer/RefChecks.scala | 14 +++++------ tests/warn/i17266.check | 11 +++++++++ tests/warn/i17266.scala | 24 ++++++++++++------- tests/warn/i17493.check | 8 +++---- tests/warn/i17493.scala | 3 ++- 5 files changed, 38 insertions(+), 22 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 09ed2d91a788..2bf4b959ebca 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -1236,14 +1236,12 @@ object RefChecks { end checkImplicitNotFoundAnnotation - def checkAnyRefMethodCall(tree: Tree)(using Context) = - if tree.symbol.exists - && defn.topClasses.contains(tree.symbol.owner) - && (!ctx.owner.enclosingClass.exists - || ctx.owner.enclosingClass.isPackageObject - || ctx.owner.enclosingClass.isValueClass) then - report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) - + def checkAnyRefMethodCall(tree: Tree)(using Context): Unit = + if tree.symbol.exists && defn.topClasses.contains(tree.symbol.owner) then + tree.tpe match + case tp: NamedType if tp.prefix.typeSymbol != ctx.owner.enclosingClass => + report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) + case _ => () } import RefChecks.* diff --git a/tests/warn/i17266.check b/tests/warn/i17266.check index 716cd531dd0a..ce8626b14225 100644 --- a/tests/warn/i17266.check +++ b/tests/warn/i17266.check @@ -96,3 +96,14 @@ | resolved to calls on Predef or on imported methods. This might not be what | you intended. ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Warning: tests/warn/i17266.scala:148:2 ------------------------------------------------------- +148 | synchronized { // warn + | ^^^^^^^^^^^^ + | Suspicious top-level unqualified call to synchronized + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as synchronized are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- diff --git a/tests/warn/i17266.scala b/tests/warn/i17266.scala index 7e0c9f1b653b..f5d3d02b3661 100644 --- a/tests/warn/i17266.scala +++ b/tests/warn/i17266.scala @@ -43,13 +43,13 @@ object Test6: object Test7: import MyLib.* def test7 = - synchronized { // not an error + synchronized { // not an error; resolves to `Test7.synchronized` println("hello") } /* object Test7b: - def test8 = + def test7b = import MyLib.* synchronized { // already an error: Reference to synchronized is ambiguous. println("hello") @@ -62,21 +62,21 @@ class Test8: } class Test9: - def test5 = + def test9 = synchronized { // not an error println("hello") } class Test10: import MyLib.* - synchronized { // not an error + synchronized { // not an error; resolves to `this.synchronized` println("hello") } class Test11: import MyLib.* - def test7 = - synchronized { // not an error + def test11 = + synchronized { // not an error; resolves to `this.synchronized` println("hello") } @@ -86,14 +86,14 @@ trait Test12: } trait Test13: - def test5 = + def test13 = synchronized { // not an error println("hello") } trait Test14: import MyLib.* - synchronized { // not an error + synchronized { // not an error; resolves to `this.synchronized` println("hello") } @@ -141,4 +141,10 @@ def test26 = hashCode() // warn def test27 = - 1.hashCode()// not an error (should be? probably not) \ No newline at end of file + 1.hashCode()// not an error (should be? probably not) + +def test28 = + import MyLib.* + synchronized { // warn + println("hello") + } diff --git a/tests/warn/i17493.check b/tests/warn/i17493.check index af5a0b8ad115..8a4c102980fe 100644 --- a/tests/warn/i17493.check +++ b/tests/warn/i17493.check @@ -1,7 +1,7 @@ --- [E181] Potential Issue Warning: tests/warn/i17493.scala:3:11 -------------------------------------------------------- -3 | def g = synchronized { println("hello, world") } // warn - | ^^^^^^^^^^^^ - | Suspicious top-level unqualified call to synchronized +-- [E181] Potential Issue Warning: tests/warn/i17493.scala:4:10 -------------------------------------------------------- +4 | def g = synchronized { println("hello, world") } // warn + | ^^^^^^^^^^^^ + | Suspicious top-level unqualified call to synchronized |--------------------------------------------------------------------------------------------------------------------- | Explanation (enabled by `-explain`) |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/warn/i17493.scala b/tests/warn/i17493.scala index 74f2039b81b5..69816467d3ae 100644 --- a/tests/warn/i17493.scala +++ b/tests/warn/i17493.scala @@ -1,4 +1,5 @@ //> using options -explain class A(val s: String) extends AnyVal { - def g = synchronized { println("hello, world") } // warn + // def f = eq("hello, world") // no warning for now because `eq` is inlined + def g = synchronized { println("hello, world") } // warn } From 27cc89d8b8a315649e980158fe4ff2dfb2eda5e4 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Wed, 1 May 2024 16:45:09 +0200 Subject: [PATCH 235/465] Fix indentation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sébastien Doeraene --- tests/warn/i17493.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/warn/i17493.scala b/tests/warn/i17493.scala index 69816467d3ae..f76f3aeb02af 100644 --- a/tests/warn/i17493.scala +++ b/tests/warn/i17493.scala @@ -1,5 +1,5 @@ //> using options -explain - class A(val s: String) extends AnyVal { +class A(val s: String) extends AnyVal { // def f = eq("hello, world") // no warning for now because `eq` is inlined def g = synchronized { println("hello, world") } // warn - } +} From ae82716a207e07443392ff37392b9c3213ddbbce Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 May 2024 18:11:38 +0200 Subject: [PATCH 236/465] Fix outerSelect in Inliner Fixes #20300 --- compiler/src/dotty/tools/dotc/inlines/Inliner.scala | 2 +- tests/pos/i20300.scala | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i20300.scala diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 4ee3682626c4..7c79e972c126 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -315,7 +315,7 @@ class Inliner(val call: tpd.Tree)(using Context): case Super(qual, _) => qual case pre => pre val preLevel = classNestingLevel(inlinedMethod.owner) - if preLevel > level then outerSelect(pre, inlinedMethod.owner, preLevel - level, selfSym.info) + if preLevel > level then outerSelect(pre, inlinedMethod.owner.enclosingClass, preLevel - level, selfSym.info) else pre val binding = accountForOpaques( diff --git a/tests/pos/i20300.scala b/tests/pos/i20300.scala new file mode 100644 index 000000000000..721b79940ba1 --- /dev/null +++ b/tests/pos/i20300.scala @@ -0,0 +1,8 @@ +trait T: + + def v() = () + + trait F: + def f(): Unit = + inline def op(): Unit = v() + op() \ No newline at end of file From 4d07f58c8540a76feb25bb706ddf063dfde3c8a6 Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Wed, 1 May 2024 16:39:19 +0000 Subject: [PATCH 237/465] Add a run test --- tests/explicit-nulls/run/from-nullable.check | 2 ++ tests/explicit-nulls/run/from-nullable.scala | 17 +++++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 tests/explicit-nulls/run/from-nullable.check create mode 100644 tests/explicit-nulls/run/from-nullable.scala diff --git a/tests/explicit-nulls/run/from-nullable.check b/tests/explicit-nulls/run/from-nullable.check new file mode 100644 index 000000000000..43d418e64a03 --- /dev/null +++ b/tests/explicit-nulls/run/from-nullable.check @@ -0,0 +1,2 @@ +hello +None diff --git a/tests/explicit-nulls/run/from-nullable.scala b/tests/explicit-nulls/run/from-nullable.scala new file mode 100644 index 000000000000..6f01e402e790 --- /dev/null +++ b/tests/explicit-nulls/run/from-nullable.scala @@ -0,0 +1,17 @@ +object Test: + import scala.annotation.experimental + + @experimental def main(args: Array[String]): Unit = + val s1: String | Null = "hello" + val s2: String | Null = null + + val opts1: Option[String] = Option.fromNullable(s1) + val opts2: Option[String] = Option.fromNullable(s2) + + opts1 match + case Some(s) => println(s) + case None => println("None") + + opts2 match + case Some(s) => println(s) + case None => println("None") From 850f77ed8da368af47763c1e6e8767f4334ae688 Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Wed, 1 May 2024 17:59:30 +0000 Subject: [PATCH 238/465] Make opt non-inlined --- library/src/scala/runtime/stdLibPatches/Predef.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 7684ca2700f4..7abd92e408f8 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -63,7 +63,7 @@ object Predef: inline def ne(inline y: AnyRef | Null): Boolean = !(x eq y) - extension (inline opt: Option.type) + extension (opt: Option.type) @experimental inline def fromNullable[T](t: T | Null): Option[T] = Option(t).asInstanceOf[Option[T]] end Predef From 86031461d1936679cbbafc956250dcfbc2c11615 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 2 May 2024 11:08:26 +0200 Subject: [PATCH 239/465] remove non-deterministic cancellation of async TASTy in sbt-test how-i-fixed-it: Originally these tests were written before we implemented async TASTy writing. This meant that we blocked the main thread at the end of ExtractAPI until TASTy was written. This meant that -Ystop-after:firstTransform would prevent the compiler reaching the backend, but stop after we knew that TASTy was written to a-early.jar Originally we did this to explicitly communicate that TASTy comes from a-early.jar, rather than genBCode output. In reality, it doesn't assert anything stronger than a comment would, because we manually fix the classpath to only be a-early.jar. After we added async TASTy writing, this test became non-deterministic, because we cancel async TASTy writing at the end of a run without synchronizing. So its possible TASTy isn't written by the time we cancel after firstTransform. So instead, we remove -Ystop-after, guaranteeing that 'a/compile' does not finish until we synchronize async TASTy in genBCode. --- sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt | 6 ++++-- sbt-test/pipelining/Yearly-tasty-output/build.sbt | 5 +++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt b/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt index c0c726ce6a02..e350fa7e7def 100644 --- a/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt @@ -1,8 +1,10 @@ -// defines a inline method +// NOTE: in this test, we are explictly fixing the classpath of project `b` to be `a-early.jar` +// to manually test pipelining without sbt/zinc managing the classpath. + +// defines a inline method. lazy val a = project.in(file("a")) .settings( scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), - scalacOptions += "-Ystop-after:firstTransform", scalacOptions += "-Ycheck:all", ) diff --git a/sbt-test/pipelining/Yearly-tasty-output/build.sbt b/sbt-test/pipelining/Yearly-tasty-output/build.sbt index 62990c616071..5cfe30936ea9 100644 --- a/sbt-test/pipelining/Yearly-tasty-output/build.sbt +++ b/sbt-test/pipelining/Yearly-tasty-output/build.sbt @@ -1,8 +1,10 @@ +// NOTE: in this test, we are explictly fixing the classpath of project `c` to be `a-early.jar:b-early-out` +// to manually test pipelining without sbt/zinc managing the classpath. + // early out is a jar lazy val a = project.in(file("a")) .settings( scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), - scalacOptions += "-Ystop-after:firstTransform", scalacOptions += "-Ycheck:all", ) @@ -10,7 +12,6 @@ lazy val a = project.in(file("a")) lazy val b = project.in(file("b")) .settings( scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "b-early-out").toString), - scalacOptions += "-Ystop-after:firstTransform", scalacOptions += "-Ycheck:all", ) From be3458a68784e051b324771db5b5059eb419f4f5 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 2 May 2024 15:36:53 +0200 Subject: [PATCH 240/465] Add FlexibleType to quote library --- .../src/dotty/tools/dotc/core/Types.scala | 5 +-- .../quoted/runtime/impl/QuotesImpl.scala | 23 ++++++++++++- .../runtime/impl/printers/Extractors.scala | 2 ++ .../runtime/impl/printers/SourceCode.scala | 5 +++ docs/_docs/internals/type-system.md | 1 + library/src/scala/quoted/Quotes.scala | 34 +++++++++++++++++-- project/MiMaFilters.scala | 3 ++ .../run/tasty-flexible-type.check | 7 ++++ .../run/tasty-flexible-type/quoted_1.scala | 34 +++++++++++++++++++ .../run/tasty-flexible-type/quoted_2.scala | 10 ++++++ 10 files changed, 119 insertions(+), 5 deletions(-) create mode 100644 tests/explicit-nulls/run/tasty-flexible-type.check create mode 100644 tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala create mode 100644 tests/explicit-nulls/run/tasty-flexible-type/quoted_2.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 3dcbf2536509..a6136a20cf32 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -77,6 +77,7 @@ object Types extends TypeUtils { * | +- TypeVar * | +- HKTypeLambda * | +- MatchType + * | +- FlexibleType * | * +- GroundType -+- AndType * +- OrType @@ -3468,7 +3469,7 @@ object Types extends TypeUtils { * `T | Null .. T`, so that `T | Null <: FlexibleType(T) <: T`. * A flexible type will be erased to its original type `T`. */ - case class FlexibleType(lo: Type, hi: Type) extends CachedProxyType with ValueType { + case class FlexibleType protected(lo: Type, hi: Type) extends CachedProxyType with ValueType { override def underlying(using Context): Type = hi @@ -3481,7 +3482,7 @@ object Types extends TypeUtils { } object FlexibleType { - def apply(tp: Type)(using Context): Type = tp match { + def apply(tp: Type)(using Context): FlexibleType = tp match { case ft: FlexibleType => ft case _ => // val tp1 = tp.stripNull() diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 5d51c6e274f7..eb300a0512b5 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2230,7 +2230,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def isErased: Boolean = false def isImplicit: Boolean = self.isImplicitMethod def isContextual: Boolean = self.isContextualMethod - def methodTypeKind: MethodTypeKind = + def methodTypeKind: MethodTypeKind = self.companion match case Types.ContextualMethodType => MethodTypeKind.Contextual case Types.ImplicitMethodType => MethodTypeKind.Implicit @@ -2343,6 +2343,27 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def unapply(x: NoPrefix): true = true end NoPrefix + type FlexibleType = dotc.core.Types.FlexibleType + + object FlexibleTypeTypeTest extends TypeTest[TypeRepr, FlexibleType]: + def unapply(x: TypeRepr): Option[FlexibleType & x.type] = x match + case x: (Types.FlexibleType & x.type) => Some(x) + case _ => None + end FlexibleTypeTypeTest + + object FlexibleType extends FlexibleTypeModule: + def apply(tp: TypeRepr): FlexibleType = Types.FlexibleType(tp) + def unapply(x: FlexibleType): Some[TypeRepr] = Some(x.hi) + end FlexibleType + + given FlexibleTypeMethods: FlexibleTypeMethods with + extension (self: FlexibleType) + def underlying: TypeRepr = self.hi + def lo: TypeRepr = self.lo + def hi: TypeRepr = self.hi + end extension + end FlexibleTypeMethods + type Constant = dotc.core.Constants.Constant object Constant extends ConstantModule diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index eac85244d97b..acf66fcf2009 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -239,6 +239,8 @@ object Extractors { this += "NoPrefix()" case MatchCase(pat, rhs) => this += "MatchCase(" += pat += ", " += rhs += ")" + case FlexibleType(tp) => + this += "FlexibleType(" += tp += ")" } def visitSignature(sig: Signature): this.type = { diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index 9aec7fc17ed7..9503177ff738 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -1247,6 +1247,11 @@ object SourceCode { this += " => " printType(rhs) + case FlexibleType(tp) => + this += "(" + printType(tp) + this += ")?" + case _ => cannotBeShownAsSource(tpe.show(using Printer.TypeReprStructure)) } diff --git a/docs/_docs/internals/type-system.md b/docs/_docs/internals/type-system.md index d2c0cd869e61..e3f02654953e 100644 --- a/docs/_docs/internals/type-system.md +++ b/docs/_docs/internals/type-system.md @@ -36,6 +36,7 @@ Type -+- ProxyType --+- NamedType ----+--- TypeRef | +- TypeVar | +- HKTypeLambda | +- MatchType + | +- FlexibleType | +- GroundType -+- AndType +- OrType diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index fa194f99143a..d048d8d728d5 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -211,7 +211,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * +- MatchCase * +- TypeBounds * +- NoPrefix - * + * +- FlexibleType + * * +- MethodTypeKind -+- Contextual * +- Implicit * +- Plain @@ -3273,7 +3274,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def isImplicit: Boolean /** Is this the type of parameter clause like `(using X1, ..., Xn)` or `(using x1: X1, x2: X2, ... )` */ def isContextual: Boolean - /** Returns a MethodTypeKind object representing the implicitness of the MethodType parameter clause. */ + /** Returns a MethodTypeKind object representing the implicitness of the MethodType parameter clause. */ def methodTypeKind: MethodTypeKind /** Is this the type of erased parameter clause `(erased x1: X1, ..., xn: Xn)` */ @deprecated("Use `hasErasedParams` and `erasedParams`", "3.4") @@ -3428,6 +3429,35 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def unapply(x: NoPrefix): true } + // ----- Flexible Type -------------------------------------------- + + /** Flexible types for explicit nulls */ + type FlexibleType <: TypeRepr + + /** `TypeTest` that allows testing at runtime in a pattern match if a `TypeRepr` is a `FlexibleType` */ + given FlexibleTypeTypeTest: TypeTest[TypeRepr, FlexibleType] + + /** Module object of `type FlexibleType` */ + val FlexibleType: FlexibleTypeModule + + /** Methods of the module object `val FlexibleType` */ + trait FlexibleTypeModule { this: FlexibleType.type => + def apply(tp: TypeRepr): FlexibleType + def unapply(x: FlexibleType): Option[TypeRepr] + } + + /** Makes extension methods on `FlexibleType` available without any imports */ + given FlexibleTypeMethods: FlexibleTypeMethods + + /** Extension methods of `FlexibleType` */ + trait FlexibleTypeMethods: + extension (self: FlexibleType) + def underlying: TypeRepr + def lo: TypeRepr + def hi: TypeRepr + end extension + end FlexibleTypeMethods + /////////////// // CONSTANTS // /////////////// diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 5ccb70ad6fdf..85dfc467e3e5 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -52,6 +52,9 @@ object MiMaFilters { Build.mimaPreviousDottyVersion -> // Seq.empty, // We should never break backwards compatibility Seq( // `ReversedMissingMethodProblem`s are acceptable. See comment in `Breaking changes since last LTS`. + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleType"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleTypeTypeTest"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleTypeMethods"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#SymbolMethods.isSuperAccessor"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.MethodTypeKind"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeModule.apply"), diff --git a/tests/explicit-nulls/run/tasty-flexible-type.check b/tests/explicit-nulls/run/tasty-flexible-type.check new file mode 100644 index 000000000000..dcb4db1129d0 --- /dev/null +++ b/tests/explicit-nulls/run/tasty-flexible-type.check @@ -0,0 +1,7 @@ +Inlined(None, Nil, Literal(StringConstant("hello"))) +ConstantType(StringConstant("hello")) + +Inlined(None, Nil, Apply(Select(Literal(StringConstant("world")), "trim"), Nil)) +FlexibleType(TypeRef(ThisType(TypeRef(NoPrefix(), "lang")), "String")) + +FlexibleType(TypeRef(TermRef(ThisType(TypeRef(NoPrefix(), "java")), "lang"), "String")) diff --git a/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala b/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala new file mode 100644 index 000000000000..3ab5eac144f0 --- /dev/null +++ b/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala @@ -0,0 +1,34 @@ +import scala.quoted.* + +object Macros { + + implicit inline def printTree[T](inline x: T): Unit = + ${ impl('x) } + + def impl[T](x: Expr[T])(using Quotes) : Expr[Unit] = { + import quotes.reflect.* + + val tree = x.asTerm + val treeStr = Expr(tree.show(using Printer.TreeStructure)) + val treeTpeStr = Expr(tree.tpe.show(using Printer.TypeReprStructure)) + + '{ + println(${treeStr}) + println(${treeTpeStr}) + println() + } + } + + inline def theTestBlock : Unit = ${ theTestBlockImpl } + + def theTestBlockImpl(using qctx : Quotes) : Expr[Unit] = { + import quotes.reflect.* + + val ft1 = FlexibleType(TypeRepr.of[String]) + val ft1e = Expr(ft1.show(using Printer.TypeReprStructure)) + + '{ + println(${ft1e}) + } + } +} diff --git a/tests/explicit-nulls/run/tasty-flexible-type/quoted_2.scala b/tests/explicit-nulls/run/tasty-flexible-type/quoted_2.scala new file mode 100644 index 000000000000..7b22c541ee58 --- /dev/null +++ b/tests/explicit-nulls/run/tasty-flexible-type/quoted_2.scala @@ -0,0 +1,10 @@ + +import Macros.* + +object Test { + def main(args: Array[String]): Unit = { + printTree("hello") + printTree("world".trim()) + theTestBlock + } +} From b644d303e08bfa4b70498df7d3c170ec3889376c Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 5 May 2024 22:25:08 +0200 Subject: [PATCH 241/465] Refine prioritization rules between givens and implicits In the new system, givens always beat implicits when comparing value types. This is necessary to maintain two invariants in the new system: - When comparing old-style implicits nothing changes, we still use the old rules. - The isAsGood comparison is transitive. Exception: NotGiven has to be treated at lower priority. --- .../dotty/tools/dotc/typer/Applications.scala | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index d949090dd514..aa7659e78146 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1733,8 +1733,6 @@ trait Applications extends Compatibility { def compare(alt1: TermRef, alt2: TermRef, preferGeneral: Boolean = false)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { record("resolveOverloaded.compare") - val compareGivens = alt1.symbol.is(Given) && alt2.symbol.is(Given) - /** Is alternative `alt1` with type `tp1` as good as alternative * `alt2` with type `tp2` ? * @@ -1749,7 +1747,7 @@ trait Applications extends Compatibility { * from below by Li and from above by Ui. * 3. A member of any other type `tp1` is: * a. always as good as a method or a polymorphic method. - * b. as good as a member of any other type `tp2` is `asGoodValueType(tp1, tp2) = true` + * b. as good as a member of any other type `tp2` if `asGoodValueType(tp1, tp2) = true` */ def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { tp1 match @@ -1776,13 +1774,17 @@ trait Applications extends Compatibility { isAsGood(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) } case _ => // (3) + def isGiven(alt: TermRef) = + alt1.symbol.is(Given) && alt.symbol != defn.NotGivenClass + def compareValues(tp1: Type, tp2: Type)(using Context) = + isAsGoodValueType(tp1, tp2, isGiven(alt1), isGiven(alt2)) tp2 match case tp2: MethodType => true // (3a) case tp2: PolyType if tp2.resultType.isInstanceOf[MethodType] => true // (3a) case tp2: PolyType => // (3b) - explore(isAsGoodValueType(tp1, instantiateWithTypeVars(tp2))) + explore(compareValues(tp1, instantiateWithTypeVars(tp2))) case _ => // 3b) - isAsGoodValueType(tp1, tp2) + compareValues(tp1, tp2) } /** Test whether value type `tp1` is as good as value type `tp2`. @@ -1812,6 +1814,7 @@ trait Applications extends Compatibility { * for overloading resolution (when `preferGeneral is false), and the opposite relation * `U <: T` or `U convertible to `T` for implicit disambiguation between givens * (when `preferGeneral` is true). For old-style implicit values, the 3.4 behavior is kept. + * If one of the alternatives is a given and the other is an implicit, the given wins. * * - In Scala 3.5-migration, use the 3.5 scheme normally, and the 3.4 scheme if * `Mode.OldImplicitResolution` is on. This is used to highlight differences in the @@ -1820,7 +1823,7 @@ trait Applications extends Compatibility { * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. */ - def isAsGoodValueType(tp1: Type, tp2: Type)(using Context) = + def isAsGoodValueType(tp1: Type, tp2: Type, alt1isGiven: Boolean, alt2isGiven: Boolean)(using Context): Boolean = val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) if !preferGeneral || Feature.migrateTo3 && oldResolution then // Normal specificity test for overloading resolution (where `preferGeneral` is false) @@ -1838,7 +1841,7 @@ trait Applications extends Compatibility { if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) || oldResolution - || !compareGivens + || !alt1isGiven && !alt2isGiven then // Intermediate rules: better means specialize, but map all type arguments downwards // These are enabled for 3.0-3.4, and for all comparisons between old-style implicits, @@ -1853,8 +1856,9 @@ trait Applications extends Compatibility { case _ => mapOver(t) (flip(tp1p) relaxed_<:< flip(tp2p)) || viewExists(tp1, tp2) else - // New rules: better means generalize - (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) + // New rules: better means generalize, givens always beat implicits + if alt1isGiven != alt2isGiven then alt1isGiven + else (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) end isAsGoodValueType /** Widen the result type of synthetic given methods from the implementation class to the @@ -1907,8 +1911,8 @@ trait Applications extends Compatibility { def comparePrefixes = val pre1 = widenPrefix(alt1) val pre2 = widenPrefix(alt2) - val winsPrefix1 = isAsGoodValueType(pre1, pre2) - val winsPrefix2 = isAsGoodValueType(pre2, pre1) + val winsPrefix1 = isCompatible(pre1, pre2) + val winsPrefix2 = isCompatible(pre2, pre1) if winsPrefix1 == winsPrefix2 then 0 else if winsPrefix1 then 1 else -1 From bc26c5176a42281b6621550237818c91e7acdecd Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 17 Apr 2024 23:01:54 +0200 Subject: [PATCH 242/465] Delay roll-out of new prioritization scheme: Now: 3.5: old scheme but warn if there are changes in the future 3.6-migration: new scheme, warn if prioritization has changed 3.6: new scheme, no warning --- .../tools/dotc/config/SourceVersion.scala | 1 + .../dotty/tools/dotc/typer/Applications.scala | 13 ++++---- .../dotty/tools/dotc/typer/Implicits.scala | 32 +++++++++++++++---- .../runtime/stdLibPatches/language.scala | 14 ++++++++ tests/neg/i15264.scala | 2 +- tests/run/implicit-specifity.scala | 2 +- tests/run/implied-priority.scala | 2 +- tests/warn/given-triangle.check | 4 +-- tests/warn/given-triangle.scala | 2 +- 9 files changed, 52 insertions(+), 20 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 7a464d331930..3a44021af2df 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -11,6 +11,7 @@ enum SourceVersion: case `3.3-migration`, `3.3` case `3.4-migration`, `3.4` case `3.5-migration`, `3.5` + case `3.6-migration`, `3.6` // !!! Keep in sync with scala.runtime.stdlibPatches.language !!! case `future-migration`, `future` diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index aa7659e78146..d91c4592a77b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1795,7 +1795,7 @@ trait Applications extends Compatibility { * available in 3.0-migration if mode `Mode.OldImplicitResolution` is turned on as well. * It is used to highlight differences between Scala 2 and 3 behavior. * - * - In Scala 3.0-3.4, the behavior is as follows: `T <:p U` iff there is an impliit conversion + * - In Scala 3.0-3.5, the behavior is as follows: `T <:p U` iff there is an impliit conversion * from `T` to `U`, or * * flip(T) <: flip(U) @@ -1810,15 +1810,14 @@ trait Applications extends Compatibility { * of parameters are not affected. So `T <: U` would imply `Set[Cmp[U]] <:p Set[Cmp[T]]`, * as usual, because `Set` is non-variant. * - * - From Scala 3.5, `T <:p U` means `T <: U` or `T` convertible to `U` + * - From Scala 3.6, `T <:p U` means `T <: U` or `T` convertible to `U` * for overloading resolution (when `preferGeneral is false), and the opposite relation * `U <: T` or `U convertible to `T` for implicit disambiguation between givens * (when `preferGeneral` is true). For old-style implicit values, the 3.4 behavior is kept. * If one of the alternatives is a given and the other is an implicit, the given wins. * - * - In Scala 3.5-migration, use the 3.5 scheme normally, and the 3.4 scheme if - * `Mode.OldImplicitResolution` is on. This is used to highlight differences in the - * two resolution schemes. + * - In Scala 3.5 and Scala 3.6-migration, we issue a warning if the result under + * Scala 3.6 differ wrt to the old behavior up to 3.5. * * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. @@ -1844,8 +1843,8 @@ trait Applications extends Compatibility { || !alt1isGiven && !alt2isGiven then // Intermediate rules: better means specialize, but map all type arguments downwards - // These are enabled for 3.0-3.4, and for all comparisons between old-style implicits, - // and in 3.5-migration when we compare with previous rules. + // These are enabled for 3.0-3.5, and for all comparisons between old-style implicits, + // and in 3.5 amd 3.6-migration when we compare with previous rules. val flip = new TypeMap: def apply(t: Type) = t match case t @ AppliedType(tycon, args) => diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 818512a4fa6f..6180fa1a5e52 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1293,25 +1293,43 @@ trait Implicits: * @return a number > 0 if `alt1` is preferred over `alt2` * a number < 0 if `alt2` is preferred over `alt1` * 0 if neither alternative is preferred over the other + * The behavior depends on the source version + * before 3.5: compare with preferGeneral = false + * 3.5: compare twice with preferGeneral = false and true, warning if result is different, + * return old result with preferGeneral = false + * 3.6-migration: compare twice with preferGeneral = false and true, warning if result is different, + * return new result with preferGeneral = true + * 3.6 and higher: compare with preferGeneral = true + * */ def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else - val cmp = comp(using searchContext()) - if Feature.sourceVersion == SourceVersion.`3.5-migration` then + var cmp = comp(using searchContext()) + val sv = Feature.sourceVersion + if sv == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) if cmp != prev then def choice(c: Int) = c match case -1 => "the second alternative" case 1 => "the first alternative" case _ => "none - it's ambiguous" - report.warning( - em"""Change in given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} - |Previous choice: ${choice(prev)} - |New choice : ${choice(cmp)}""", srcPos) - cmp + if sv == SourceVersion.`3.5` then + report.warning( + em"""Given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} will change + |Current choice : ${choice(prev)} + |New choice from Scala 3.6: ${choice(cmp)}""", srcPos) + prev + else + report.warning( + em"""Change in given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} + |Previous choice : ${choice(prev)} + |New choice from Scala 3.6: ${choice(cmp)}""", srcPos) + cmp + else cmp + else cmp end compareAlternatives /** If `alt1` is also a search success, try to disambiguate as follows: diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 3c9c172918d2..372e1e34bb85 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -260,6 +260,20 @@ object language: @compileTimeOnly("`3.5` can only be used at compile time in import statements") object `3.5` + /** Set source version to 3.6-migration. + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.6-migration` can only be used at compile time in import statements") + object `3.6-migration` + + /** Set source version to 3.6 + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.6` can only be used at compile time in import statements") + object `3.6` + // !!! Keep in sync with dotty.tools.dotc.config.SourceVersion !!! // Also add tests in `tests/pos/source-import-3-x.scala` and `tests/pos/source-import-3-x-migration.scala` diff --git a/tests/neg/i15264.scala b/tests/neg/i15264.scala index e13e1089dba3..825e74701f73 100644 --- a/tests/neg/i15264.scala +++ b/tests/neg/i15264.scala @@ -1,4 +1,4 @@ -import language.`3.5` +import language.`3.6` object priority: // lower number = higher priority class Prio0 extends Prio1 diff --git a/tests/run/implicit-specifity.scala b/tests/run/implicit-specifity.scala index 14954eddf2ef..da90110c9866 100644 --- a/tests/run/implicit-specifity.scala +++ b/tests/run/implicit-specifity.scala @@ -1,4 +1,4 @@ -import language.`3.5` +import language.`3.6` case class Show[T](val i: Int) object Show { diff --git a/tests/run/implied-priority.scala b/tests/run/implied-priority.scala index 61049de8e43e..15f6a40a27ef 100644 --- a/tests/run/implied-priority.scala +++ b/tests/run/implied-priority.scala @@ -1,6 +1,6 @@ /* These tests show various mechanisms available for implicit prioritization. */ -import language.`3.5` +import language.`3.6` class E[T](val str: String) // The type for which we infer terms below diff --git a/tests/warn/given-triangle.check b/tests/warn/given-triangle.check index 69583830c2bc..e849f9d4d642 100644 --- a/tests/warn/given-triangle.check +++ b/tests/warn/given-triangle.check @@ -2,5 +2,5 @@ 16 |@main def Test = f // warn | ^ | Change in given search preference for A between alternatives (given_A : A) and (given_B : B) - | Previous choice: the second alternative - | New choice : the first alternative + | Previous choice : the second alternative + | New choice from Scala 3.6: the first alternative diff --git a/tests/warn/given-triangle.scala b/tests/warn/given-triangle.scala index bc1a5c774f4f..ee4888ed1e06 100644 --- a/tests/warn/given-triangle.scala +++ b/tests/warn/given-triangle.scala @@ -1,4 +1,4 @@ -//> using options -source 3.5-migration +//> using options -source 3.6-migration class A class B extends A From 33f801bc3da1dbc4c0ff5322546c44a7b8f3602b Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 5 May 2024 18:25:46 +0200 Subject: [PATCH 243/465] Test extract from reactive-mongo This will now be ambiguous. --- tests/warn/bson.check | 10 ++++++++++ tests/warn/bson/Test.scala | 5 +++++ tests/warn/bson/bson.scala | 29 +++++++++++++++++++++++++++++ 3 files changed, 44 insertions(+) create mode 100644 tests/warn/bson.check create mode 100644 tests/warn/bson/Test.scala create mode 100644 tests/warn/bson/bson.scala diff --git a/tests/warn/bson.check b/tests/warn/bson.check new file mode 100644 index 000000000000..258ac4b4ff2c --- /dev/null +++ b/tests/warn/bson.check @@ -0,0 +1,10 @@ +-- Warning: tests/warn/bson/Test.scala:5:60 ---------------------------------------------------------------------------- +5 |def typedMapHandler[K, V: BSONHandler] = stringMapHandler[V] // warn + | ^ + |Given search preference for bson.BSONWriter[Map[String, V]] between alternatives (bson.BSONWriter.mapWriter : [V²](using x$1: bson.BSONWriter[V²]): bson.BSONDocumentWriter[Map[String, V²]]) and (bson.BSONWriter.collectionWriter : + | [T, Repr <: Iterable[T]](using x$1: bson.BSONWriter[T], x$2: Repr ¬ Option[T]): bson.BSONWriter[Repr]) will change + |Current choice : the first alternative + |New choice from Scala 3.6: none - it's ambiguous + | + |where: V is a type in method typedMapHandler + | V² is a type variable diff --git a/tests/warn/bson/Test.scala b/tests/warn/bson/Test.scala new file mode 100644 index 000000000000..78b6687adabf --- /dev/null +++ b/tests/warn/bson/Test.scala @@ -0,0 +1,5 @@ +//> using options -source 3.5 +import bson.* + +def stringMapHandler[V](using writer: BSONWriter[Map[String, V]]): BSONHandler[Map[String, V]] = ??? +def typedMapHandler[K, V: BSONHandler] = stringMapHandler[V] // warn diff --git a/tests/warn/bson/bson.scala b/tests/warn/bson/bson.scala new file mode 100644 index 000000000000..d901ee3e3a4f --- /dev/null +++ b/tests/warn/bson/bson.scala @@ -0,0 +1,29 @@ +package bson + +trait BSONWriter[T] +trait BSONDocumentWriter[T] extends BSONWriter[T] +object BSONWriter extends BSONWriterInstances + +trait BSONHandler[T] extends BSONWriter[T] + +private[bson] trait BSONWriterInstances { + given mapWriter[V](using BSONWriter[V]): BSONDocumentWriter[Map[String, V]] = bson.mapWriter[V] + export bson.collectionWriter +} + +final class ¬[A, B] +object ¬ { + implicit def defaultEvidence[A, B]: ¬[A, B] = new ¬[A, B]() + @annotation.implicitAmbiguous("Could not prove type ${A} is not (¬) ${A}") + implicit def ambiguousEvidence1[A]: ¬[A, A] = null + implicit def ambiguousEvidence2[A]: ¬[A, A] = null +} + +private[bson] trait DefaultBSONHandlers extends LowPriorityHandlers +private[bson] trait LowPriorityHandlers{ + given collectionWriter[T, Repr <: Iterable[T]](using BSONWriter[T], Repr ¬ Option[T]): BSONWriter[Repr] = ??? + private[bson] def mapWriter[V](implicit valueWriter: BSONWriter[V]): BSONDocumentWriter[Map[String, V]] = ??? +} + +// --- +package object bson extends DefaultBSONHandlers \ No newline at end of file From 8a3854fb73fcb88375b35106ea337b4ab907f900 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 09:00:02 +0200 Subject: [PATCH 244/465] Avoid ambiguity errors arising from double references in implicits If two implicit candidate references happen to be the same pick one of them instead of reporting an ambiguity. --- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 4 +++- tests/{neg => pos}/i12591/Inner.scala | 3 ++- tests/{neg => pos}/i12591/Outer.scala | 0 3 files changed, 5 insertions(+), 2 deletions(-) rename tests/{neg => pos}/i12591/Inner.scala (64%) rename tests/{neg => pos}/i12591/Outer.scala (100%) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 6180fa1a5e52..9f2e0628e70e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1340,7 +1340,9 @@ trait Implicits: case alt1: SearchSuccess => var diff = compareAlternatives(alt1, alt2) assert(diff <= 0) // diff > 0 candidates should already have been eliminated in `rank` - if diff == 0 && alt2.isExtension then + if diff == 0 && alt1.ref =:= alt2.ref then + diff = 1 // See i12951 for a test where this happens + else if diff == 0 && alt2.isExtension then if alt1.isExtension then // Fall back: if both results are extension method applications, // compare the extension methods instead of their wrappers. diff --git a/tests/neg/i12591/Inner.scala b/tests/pos/i12591/Inner.scala similarity index 64% rename from tests/neg/i12591/Inner.scala rename to tests/pos/i12591/Inner.scala index aae9bd5b9234..2f8018c4d824 100644 --- a/tests/neg/i12591/Inner.scala +++ b/tests/pos/i12591/Inner.scala @@ -9,5 +9,6 @@ object Foo: import Foo.TC //Adding import Foo.Bar resolves the issue -val badSummon = summon[TC[Bar]] // error here +val badSummon = summon[TC[Bar]] + // was an ambiguous error, now OK, since the two references are the same diff --git a/tests/neg/i12591/Outer.scala b/tests/pos/i12591/Outer.scala similarity index 100% rename from tests/neg/i12591/Outer.scala rename to tests/pos/i12591/Outer.scala From c5f2064b3683eeed629a64e34cc933db4f7d2618 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Mon, 6 May 2024 10:23:54 +0200 Subject: [PATCH 245/465] Don't show enum completions in new keyword context (#20304) Fixes https://github.com/scala/scala3/issues/19968 --- .../tools/dotc/interactive/Completion.scala | 31 ++++++++++++++----- .../tools/languageserver/CompletionTest.scala | 19 ++++++++++++ .../tools/pc/completions/Completions.scala | 7 ++--- .../pc/tests/completion/CompletionSuite.scala | 31 +++++++++++++++++++ 4 files changed, 76 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index f2b63cbec8d5..7882d635f84a 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -75,7 +75,7 @@ object Completion: customMatcher: Option[Name => Boolean] = None )(using Context): CompletionMap = val adjustedPath = typeCheckExtensionConstructPath(untpdPath, tpdPath, pos) - computeCompletions(pos, mode, rawPrefix, adjustedPath, customMatcher) + computeCompletions(pos, mode, rawPrefix, adjustedPath, untpdPath, customMatcher) /** * Inspect `path` to determine what kinds of symbols should be considered. @@ -199,12 +199,16 @@ object Completion: .flatten.getOrElse(tpdPath) private def computeCompletions( - pos: SourcePosition, mode: Mode, rawPrefix: String, adjustedPath: List[tpd.Tree], matches: Option[Name => Boolean] + pos: SourcePosition, + mode: Mode, rawPrefix: String, + adjustedPath: List[tpd.Tree], + untpdPath: List[untpd.Tree], + matches: Option[Name => Boolean] )(using Context): CompletionMap = val hasBackTick = rawPrefix.headOption.contains('`') val prefix = if hasBackTick then rawPrefix.drop(1) else rawPrefix val matches0 = matches.getOrElse(_.startsWith(prefix)) - val completer = new Completer(mode, pos, matches0) + val completer = new Completer(mode, pos, untpdPath, matches0) val result = adjustedPath match // Ignore synthetic select from `This` because in code it was `Ident` @@ -279,6 +283,12 @@ object Completion: if denot.isType then denot.symbol.showFullName else denot.info.widenTermRefExpr.show + + def isInNewContext(untpdPath: List[untpd.Tree]): Boolean = + untpdPath match + case _ :: untpd.New(selectOrIdent: (untpd.Select | untpd.Ident)) :: _ => true + case _ => false + /** Include in completion sets only symbols that * 1. is not absent (info is not NoType) * 2. are not a primary constructor, @@ -290,7 +300,11 @@ object Completion: * 8. symbol is not a constructor proxy module when in type completion mode * 9. have same term/type kind as name prefix given so far */ - def isValidCompletionSymbol(sym: Symbol, completionMode: Mode)(using Context): Boolean = + def isValidCompletionSymbol(sym: Symbol, completionMode: Mode, isNew: Boolean)(using Context): Boolean = + + lazy val isEnum = sym.is(Enum) || + (sym.companionClass.exists && sym.companionClass.is(Enum)) + sym.exists && !sym.isAbsent() && !sym.isPrimaryConstructor && @@ -300,6 +314,7 @@ object Completion: !sym.isPackageObject && !sym.is(Artifact) && !(completionMode.is(Mode.Type) && sym.isAllOf(ConstructorProxyModule)) && + !(isNew && isEnum) && ( (completionMode.is(Mode.Term) && (sym.isTerm || sym.is(ModuleClass)) || (completionMode.is(Mode.Type) && (sym.isType || sym.isStableMember))) @@ -323,7 +338,7 @@ object Completion: * For the results of all `xyzCompletions` methods term names and type names are always treated as different keys in the same map * and they never conflict with each other. */ - class Completer(val mode: Mode, pos: SourcePosition, matches: Name => Boolean): + class Completer(val mode: Mode, pos: SourcePosition, untpdPath: List[untpd.Tree], matches: Name => Boolean): /** Completions for terms and types that are currently in scope: * the members of the current class, local definitions and the symbols that have been imported, * recursively adding completions from outer scopes. @@ -530,7 +545,7 @@ object Completion: // There are four possible ways for an extension method to be applicable // 1. The extension method is visible under a simple name, by being defined or inherited or imported in a scope enclosing the reference. - val termCompleter = new Completer(Mode.Term, pos, matches) + val termCompleter = new Completer(Mode.Term, pos, untpdPath, matches) val extMethodsInScope = termCompleter.scopeCompletions.toList.flatMap: case (name, denots) => denots.collect: case d: SymDenotation if d.isTerm && d.termRef.symbol.is(Extension) => (d.termRef, name.asTermName) @@ -557,6 +572,8 @@ object Completion: } extMethodsWithAppliedReceiver.groupByName + lazy val isNew: Boolean = isInNewContext(untpdPath) + /** Include in completion sets only symbols that * 1. match the filter method, * 2. satisfy [[Completion.isValidCompletionSymbol]] @@ -564,7 +581,7 @@ object Completion: private def include(denot: SingleDenotation, nameInScope: Name)(using Context): Boolean = matches(nameInScope) && completionsFilter(NoType, nameInScope) && - isValidCompletionSymbol(denot.symbol, mode) + isValidCompletionSymbol(denot.symbol, mode, isNew) private def extractRefinements(site: Type)(using Context): Seq[SingleDenotation] = site match diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index 6ef8bee8a5d2..d64bb44c1a5d 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -1704,4 +1704,23 @@ class CompletionTest { .completion(m1, Set( ("getOrElse", Method, "[V1 >: String](key: Int, default: => V1): V1"), )) + + @Test def noEnumCompletionInNewContext: Unit = + code"""|enum TestEnum: + | case TestCase + |object M: + | TestEnu$m1 + | TestEnum.TestCa$m2 + | val x: TestEnu$m3 + | val y: TestEnum.Tes$m4 + | new TestEnu$m5 + | new TestEnum.TestCas$m6 + |""" + .completion(m1, Set(("TestEnum", Module, "TestEnum"))) + .completion(m2, Set(("TestCase", Field, "TestEnum"))) + .completion(m3, Set(("TestEnum", Module, "TestEnum"), ("TestEnum", Class, "TestEnum"))) + .completion(m4, Set(("TestCase", Field, "TestEnum"))) + .completion(m5, Set()) + .completion(m6, Set()) + } diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index 052287193540..fb39102399ba 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -72,10 +72,7 @@ class Completions( case _ :: (_: (Import | Export)) :: _ => false case _ => true - private lazy val isNew: Boolean = - path match - case _ :: New(selectOrIdent: (Select | Ident)) :: _ => true - case _ => false + private lazy val isNew: Boolean = Completion.isInNewContext(adjustedPath) def includeSymbol(sym: Symbol)(using Context): Boolean = def hasSyntheticCursorSuffix: Boolean = @@ -537,7 +534,7 @@ class Completions( val query = completionPos.query if completionMode.is(Mode.Scope) && query.nonEmpty then val visitor = new CompilerSearchVisitor(sym => - if Completion.isValidCompletionSymbol(sym, completionMode) && + if Completion.isValidCompletionSymbol(sym, completionMode, isNew) && !(sym.is(Flags.ExtensionMethod) || (sym.maybeOwner.is(Flags.Implicit) && sym.maybeOwner.isClass)) then indexedContext.lookupSym(sym) match diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index f4f659db1541..b5db258601bc 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -1877,3 +1877,34 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin, topLines = Some(2) ) + + @Test def `no-enum-completions-in-new-context` = + check( + """enum TestEnum: + | case TestCase + |object M: + | new TestEnu@@ + |""".stripMargin, + "" + ) + + @Test def `no-enum-case-completions-in-new-context` = + check( + """enum TestEnum: + | case TestCase + |object M: + | new TestEnum.TestCas@@ + |""".stripMargin, + "" + ) + + @Test def `deduplicated-enum-completions` = + check( + """enum TestEnum: + | case TestCase + |object M: + | val x: TestEn@@ + |""".stripMargin, + """TestEnum test + |""".stripMargin, + ) From ce4d618f7e6c64f64b689dad9dbee42672f4885b Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Mon, 6 May 2024 13:30:14 +0200 Subject: [PATCH 246/465] Remove unnecessary implicit in test --- .../explicit-nulls/run/tasty-flexible-type/quoted_1.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala b/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala index 3ab5eac144f0..782eec53602a 100644 --- a/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala +++ b/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala @@ -2,10 +2,10 @@ import scala.quoted.* object Macros { - implicit inline def printTree[T](inline x: T): Unit = + inline def printTree[T](inline x: T): Unit = ${ impl('x) } - def impl[T](x: Expr[T])(using Quotes) : Expr[Unit] = { + def impl[T](x: Expr[T])(using Quotes): Expr[Unit] = { import quotes.reflect.* val tree = x.asTerm @@ -19,9 +19,9 @@ object Macros { } } - inline def theTestBlock : Unit = ${ theTestBlockImpl } + inline def theTestBlock: Unit = ${ theTestBlockImpl } - def theTestBlockImpl(using qctx : Quotes) : Expr[Unit] = { + def theTestBlockImpl(using Quotes): Expr[Unit] = { import quotes.reflect.* val ft1 = FlexibleType(TypeRepr.of[String]) From 69664f7c01e0103cc02ee5fae9d63ad04abe46d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Thu, 2 May 2024 13:47:40 +0200 Subject: [PATCH 247/465] Make `unusedDataApply` inline so that no closure is allocated. It is used for every single tree in `CheckUnused`, so this is worth it. --- compiler/src/dotty/tools/dotc/transform/CheckUnused.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index deb8446affbb..eece239e93f4 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -40,8 +40,10 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke import CheckUnused.* import UnusedData.* - private def unusedDataApply[U](f: UnusedData => U)(using Context): Context = - ctx.property(_key).foreach(f) + private inline def unusedDataApply[U](inline f: UnusedData => U)(using Context): Context = + ctx.property(_key) match + case Some(ud) => f(ud) + case None => () ctx override def phaseName: String = CheckUnused.phaseNamePrefix + suffix From 0bd4b156aed129c141627283edeaddc518989a31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Thu, 2 May 2024 13:49:16 +0200 Subject: [PATCH 248/465] Do not use `LazyList` in `CheckUnused`. It is not efficient when the results are always used exactly once. --- .../tools/dotc/transform/CheckUnused.scala | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index eece239e93f4..8dbedd5c69f6 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -1,5 +1,7 @@ package dotty.tools.dotc.transform +import scala.annotation.tailrec + import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.ast.tpd.{Inlined, TreeTraverser} @@ -88,11 +90,17 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke override def prepareForIdent(tree: tpd.Ident)(using Context): Context = if tree.symbol.exists then - val prefixes = LazyList.iterate(tree.typeOpt.normalizedPrefix)(_.normalizedPrefix).takeWhile(_ != NoType) - .take(10) // Failsafe for the odd case if there was an infinite cycle - for prefix <- prefixes do - unusedDataApply(_.registerUsed(prefix.classSymbol, None)) - unusedDataApply(_.registerUsed(tree.symbol, Some(tree.name))) + unusedDataApply { ud => + @tailrec + def loopOnNormalizedPrefixes(prefix: Type, depth: Int): Unit = + // limit to 10 as failsafe for the odd case where there is an infinite cycle + if depth < 10 && prefix.exists then + ud.registerUsed(prefix.classSymbol, None) + loopOnNormalizedPrefixes(prefix.normalizedPrefix, depth + 1) + + loopOnNormalizedPrefixes(tree.typeOpt.normalizedPrefix, depth = 0) + ud.registerUsed(tree.symbol, Some(tree.name)) + } else if tree.hasType then unusedDataApply(_.registerUsed(tree.tpe.classSymbol, Some(tree.name))) else From 6d79caa74fa2137261607cda802d75213c3e4370 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Thu, 2 May 2024 13:51:13 +0200 Subject: [PATCH 249/465] Do not mangle names only to test whether it starts with a given string. --- compiler/src/dotty/tools/dotc/transform/CheckUnused.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 8dbedd5c69f6..7741565d11a2 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -125,8 +125,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke traverseAnnotations(tree.symbol) if !tree.symbol.is(Module) then ud.registerDef(tree) - if tree.name.mangledString.startsWith(nme.derived.mangledString + "$") - && tree.typeOpt != NoType then + if tree.name.startsWith("derived$") && tree.typeOpt != NoType then ud.registerUsed(tree.typeOpt.typeSymbol, None, true) ud.addIgnoredUsage(tree.symbol) } From 3188177a494877241f973efe557bb6e6d6c6d358 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Thu, 2 May 2024 13:51:43 +0200 Subject: [PATCH 250/465] Remove dead code `newCtx` in `CheckUnused`. --- compiler/src/dotty/tools/dotc/transform/CheckUnused.scala | 3 --- 1 file changed, 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 7741565d11a2..31fcc14d646d 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -207,9 +207,6 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke } ctx - private def newCtx(tree: tpd.Tree)(using Context) = - if tree.symbol.exists then ctx.withOwner(tree.symbol) else ctx - /** * This traverse is the **main** component of this phase * From a55ee4d5f3b0db7dc4b95b2beb6d4306395db0e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Thu, 2 May 2024 13:54:29 +0200 Subject: [PATCH 251/465] Do not use Sets of Trees in `CheckUnused`. `Tree`s have structural equality. Even if `==` should be able to exit quickly either because of `eq` or an early difference, sets systematically call `hashCode`, which is going to recurse into the entire structure. --- .../tools/dotc/transform/CheckUnused.scala | 66 +++++++++++-------- 1 file changed, 38 insertions(+), 28 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 31fcc14d646d..12f927194300 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -29,6 +29,7 @@ import dotty.tools.dotc.core.Definitions import dotty.tools.dotc.core.NameKinds.WildcardParamName import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.StdNames.nme +import dotty.tools.dotc.util.Spans.Span import scala.math.Ordering @@ -365,16 +366,16 @@ object CheckUnused: * See the `isAccessibleAsIdent` extension method below in the file */ private val usedInScope = MutStack(MutSet[(Symbol,Boolean, Option[Name], Boolean)]()) - private val usedInPosition = MutSet[(SrcPos, Name)]() + private val usedInPosition = MutMap.empty[Name, MutSet[Symbol]] /* unused import collected during traversal */ - private val unusedImport = MutSet[ImportSelector]() + private val unusedImport = new java.util.IdentityHashMap[ImportSelector, Unit] /* LOCAL DEF OR VAL / Private Def or Val / Pattern variables */ - private val localDefInScope = MutSet[tpd.MemberDef]() - private val privateDefInScope = MutSet[tpd.MemberDef]() - private val explicitParamInScope = MutSet[tpd.MemberDef]() - private val implicitParamInScope = MutSet[tpd.MemberDef]() - private val patVarsInScope = MutSet[tpd.Bind]() + private val localDefInScope = MutList.empty[tpd.MemberDef] + private val privateDefInScope = MutList.empty[tpd.MemberDef] + private val explicitParamInScope = MutList.empty[tpd.MemberDef] + private val implicitParamInScope = MutList.empty[tpd.MemberDef] + private val patVarsInScope = MutList.empty[tpd.Bind] /** All variables sets*/ private val setVars = MutSet[Symbol]() @@ -416,7 +417,8 @@ object CheckUnused: usedInScope.top += ((sym.companionModule, sym.isAccessibleAsIdent, name, isDerived)) usedInScope.top += ((sym.companionClass, sym.isAccessibleAsIdent, name, isDerived)) if sym.sourcePos.exists then - name.map(n => usedInPosition += ((sym.sourcePos, n))) + for n <- name do + usedInPosition.getOrElseUpdate(n, MutSet.empty) += sym /** Register a symbol that should be ignored */ def addIgnoredUsage(sym: Symbol)(using Context): Unit = @@ -434,9 +436,9 @@ object CheckUnused: if !tpd.languageImport(imp.expr).nonEmpty && !imp.isGeneratedByEnum && !isTransparentAndInline(imp) then impInScope.top += imp if currScopeType.top != ScopeType.ReplWrapper then // #18383 Do not report top-level import's in the repl as unused - unusedImport ++= imp.selectors.filter { s => - !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) && !isImportIgnored(imp, s) - } + for s <- imp.selectors do + if !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) && !isImportIgnored(imp, s) then + unusedImport.put(s, ()) end registerImport /** Register (or not) some `val` or `def` according to the context, scope and flags */ @@ -491,11 +493,11 @@ object CheckUnused: // We keep wildcard symbol for the end as they have the least precedence false case Some(sel) => - unusedImport -= sel + unusedImport.remove(sel) true } if !matchedExplicitImport && selWildCard.isDefined then - unusedImport -= selWildCard.get + unusedImport.remove(selWildCard.get) true // a matching import exists so the symbol won't be kept for outer scope else matchedExplicitImport @@ -520,56 +522,64 @@ object CheckUnused: def getUnused(using Context): UnusedResult = popScope() + + def isUsedInPosition(name: Name, span: Span): Boolean = + usedInPosition.get(name) match + case Some(syms) => syms.exists(sym => span.contains(sym.span)) + case None => false + val sortedImp = if ctx.settings.WunusedHas.imports || ctx.settings.WunusedHas.strictNoImplicitWarn then - unusedImport.map(d => UnusedSymbol(d.srcPos, d.name, WarnTypes.Imports)).toList + import scala.jdk.CollectionConverters.* + unusedImport.keySet().nn.iterator().nn.asScala + .map(d => UnusedSymbol(d.srcPos, d.name, WarnTypes.Imports)).toList else Nil // Partition to extract unset local variables from usedLocalDefs val (usedLocalDefs, unusedLocalDefs) = if ctx.settings.WunusedHas.locals then - localDefInScope.partition(d => d.symbol.usedDefContains) + localDefInScope.toList.partition(d => d.symbol.usedDefContains) else (Nil, Nil) val sortedLocalDefs = unusedLocalDefs - .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .filterNot(d => isUsedInPosition(d.symbol.name, d.span)) .filterNot(d => containsSyntheticSuffix(d.symbol)) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.LocalDefs)).toList + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.LocalDefs)) val unsetLocalDefs = usedLocalDefs.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetLocals)).toList val sortedExplicitParams = if ctx.settings.WunusedHas.explicits then - explicitParamInScope + explicitParamInScope.toList .filterNot(d => d.symbol.usedDefContains) - .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .filterNot(d => isUsedInPosition(d.symbol.name, d.span)) .filterNot(d => containsSyntheticSuffix(d.symbol)) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ExplicitParams)).toList + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ExplicitParams)) else Nil val sortedImplicitParams = if ctx.settings.WunusedHas.implicits then - implicitParamInScope + implicitParamInScope.toList .filterNot(d => d.symbol.usedDefContains) .filterNot(d => containsSyntheticSuffix(d.symbol)) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ImplicitParams)).toList + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ImplicitParams)) else Nil // Partition to extract unset private variables from usedPrivates val (usedPrivates, unusedPrivates) = if ctx.settings.WunusedHas.privates then - privateDefInScope.partition(d => d.symbol.usedDefContains) + privateDefInScope.toList.partition(d => d.symbol.usedDefContains) else (Nil, Nil) - val sortedPrivateDefs = unusedPrivates.filterNot(d => containsSyntheticSuffix(d.symbol)).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PrivateMembers)).toList - val unsetPrivateDefs = usedPrivates.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetPrivates)).toList + val sortedPrivateDefs = unusedPrivates.filterNot(d => containsSyntheticSuffix(d.symbol)).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PrivateMembers)) + val unsetPrivateDefs = usedPrivates.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetPrivates)) val sortedPatVars = if ctx.settings.WunusedHas.patvars then - patVarsInScope + patVarsInScope.toList .filterNot(d => d.symbol.usedDefContains) .filterNot(d => containsSyntheticSuffix(d.symbol)) - .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PatVars)).toList + .filterNot(d => isUsedInPosition(d.symbol.name, d.span)) + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PatVars)) else Nil val warnings = From 701d69fc2a5b4294d73357461331aaf5c2b06d72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Thu, 2 May 2024 13:56:00 +0200 Subject: [PATCH 252/465] Remove a useless sort and otherwise sort by offset, not line. It is pointless to sort a list before converting it into a Set. --- .../tools/dotc/transform/CheckUnused.scala | 23 ++++++++----------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 12f927194300..d6c3cd394d5c 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -290,7 +290,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke /** Do the actual reporting given the result of the anaylsis */ private def reportUnused(res: UnusedData.UnusedResult)(using Context): Unit = - res.warnings.toList.sortBy(_.pos.line)(using Ordering[Int]).foreach { s => + res.warnings.toList.sortBy(_.pos.span.point)(using Ordering[Int]).foreach { s => s match case UnusedSymbol(t, _, WarnTypes.Imports) => report.warning(s"unused import", t) @@ -583,19 +583,14 @@ object CheckUnused: else Nil val warnings = - val unsorted = - sortedImp ::: - sortedLocalDefs ::: - sortedExplicitParams ::: - sortedImplicitParams ::: - sortedPrivateDefs ::: - sortedPatVars ::: - unsetLocalDefs ::: - unsetPrivateDefs - unsorted.sortBy { s => - val pos = s.pos.sourcePos - (pos.line, pos.column) - } + sortedImp ::: + sortedLocalDefs ::: + sortedExplicitParams ::: + sortedImplicitParams ::: + sortedPrivateDefs ::: + sortedPatVars ::: + unsetLocalDefs ::: + unsetPrivateDefs UnusedResult(warnings.toSet) end getUnused //============================ HELPERS ==================================== From 803dff70331589569f9707b01c78730ef16f8623 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 3 May 2024 09:29:03 +0200 Subject: [PATCH 253/465] Refactor unused imports to try and make sense of it. --- .../tools/dotc/transform/CheckUnused.scala | 76 +++++++++++-------- 1 file changed, 46 insertions(+), 30 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index d6c3cd394d5c..ee80bdac5017 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -10,6 +10,7 @@ import dotty.tools.dotc.ast.untpd.ImportSelector import dotty.tools.dotc.config.ScalaSettings import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.{em, i} +import dotty.tools.dotc.core.Denotations.SingleDenotation import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.StdNames @@ -409,13 +410,18 @@ object CheckUnused: * as the same element can be imported with different renaming */ def registerUsed(sym: Symbol, name: Option[Name], isDerived: Boolean = false)(using Context): Unit = - if !isConstructorOfSynth(sym) && !doNotRegister(sym) then - if sym.isConstructor && sym.exists then + if sym.exists && !isConstructorOfSynth(sym) && !doNotRegister(sym) then + if sym.isConstructor then registerUsed(sym.owner, None) // constructor are "implicitly" imported with the class else - usedInScope.top += ((sym, sym.isAccessibleAsIdent, name, isDerived)) - usedInScope.top += ((sym.companionModule, sym.isAccessibleAsIdent, name, isDerived)) - usedInScope.top += ((sym.companionClass, sym.isAccessibleAsIdent, name, isDerived)) + val accessibleAsIdent = sym.isAccessibleAsIdent + def addIfExists(sym: Symbol): Unit = + if sym.exists then + usedDef += sym + usedInScope.top += ((sym, accessibleAsIdent, name, isDerived)) + addIfExists(sym) + addIfExists(sym.companionModule) + addIfExists(sym.companionClass) if sym.sourcePos.exists then for n <- name do usedInPosition.getOrElseUpdate(n, MutSet.empty) += sym @@ -508,8 +514,6 @@ object CheckUnused: // we keep the symbols not referencing an import in this scope // as it can be the only reference to an outer import usedInScope.top ++= kept - // register usage in this scope for other warnings at the end of the phase - usedDef ++= used.map(_._1) // retrieve previous scope type currScopeType.pop end popScope @@ -685,42 +689,54 @@ object CheckUnused: extension (sym: Symbol) /** is accessible without import in current context */ private def isAccessibleAsIdent(using Context): Boolean = - sym.exists && - ctx.outersIterator.exists{ c => - c.owner == sym.owner - || sym.owner.isClass && c.owner.isClass - && c.owner.thisType.baseClasses.contains(sym.owner) - && c.owner.thisType.member(sym.name).alternatives.contains(sym) - } + ctx.outersIterator.exists{ c => + c.owner == sym.owner + || sym.owner.isClass && c.owner.isClass + && c.owner.thisType.baseClasses.contains(sym.owner) + && c.owner.thisType.member(sym.name).alternatives.contains(sym) + } /** Given an import and accessibility, return selector that matches import<->symbol */ private def isInImport(imp: tpd.Import, isAccessible: Boolean, altName: Option[Name], isDerived: Boolean)(using Context): Option[ImportSelector] = + assert(sym.exists) + val tpd.Import(qual, sels) = imp val qualTpe = qual.tpe val dealiasedSym = sym.dealias - val simpleSelections = qualTpe.member(sym.name).alternatives - val selectionsToDealias = sels.flatMap(sel => - qualTpe.member(sel.name.toTypeName).alternatives - ::: qualTpe.member(sel.name.toTermName).alternatives) - def qualHasSymbol = simpleSelections.map(_.symbol).contains(sym) || (simpleSelections ::: selectionsToDealias).map(_.symbol).map(_.dealias).contains(dealiasedSym) - def selector = sels.find(sel => (sel.name.toTermName == sym.name || sel.name.toTypeName == sym.name) && altName.map(n => n.toTermName == sel.rename).getOrElse(true)) - def dealiasedSelector = + + val selectionsToDealias: List[SingleDenotation] = + val typeSelections = sels.flatMap(n => qualTpe.member(n.name.toTypeName).alternatives) + val termSelections = sels.flatMap(n => qualTpe.member(n.name.toTermName).alternatives) + typeSelections ::: termSelections + + val qualHasSymbol: Boolean = + val simpleSelections = qualTpe.member(sym.name).alternatives + simpleSelections.exists(d => d.symbol == sym || d.symbol.dealias == dealiasedSym) + || selectionsToDealias.exists(d => d.symbol.dealias == dealiasedSym) + + def selector: Option[ImportSelector] = + sels.find(sel => sym.name.toTermName == sel.name && altName.forall(n => n.toTermName == sel.rename)) + + def dealiasedSelector: Option[ImportSelector] = if isDerived then - sels.flatMap(sel => selectionsToDealias.map(m => (sel, m.symbol))).collect { + sels.flatMap(sel => selectionsToDealias.map(m => (sel, m.symbol))).collectFirst { case (sel, sym) if sym.dealias == dealiasedSym => sel - }.headOption + } else None - def givenSelector = if sym.is(Given) || sym.is(Implicit) - then sels.filter(sel => sel.isGiven && !sel.bound.isEmpty).find(sel => sel.boundTpe =:= sym.info) + + def givenSelector: Option[ImportSelector] = + if sym.is(Given) || sym.is(Implicit) then + sels.filter(sel => sel.isGiven && !sel.bound.isEmpty).find(sel => sel.boundTpe =:= sym.info) else None - def wildcard = sels.find(sel => sel.isWildcard && ((sym.is(Given) == sel.isGiven && sel.bound.isEmpty) || sym.is(Implicit))) - if sym.exists && qualHasSymbol && (!isAccessible || sym.isRenamedSymbol(altName)) then + + def wildcard: Option[ImportSelector] = + sels.find(sel => sel.isWildcard && ((sym.is(Given) == sel.isGiven && sel.bound.isEmpty) || sym.is(Implicit))) + + if qualHasSymbol && (!isAccessible || altName.exists(_.toSimpleName != sym.name.toSimpleName)) then selector.orElse(dealiasedSelector).orElse(givenSelector).orElse(wildcard) // selector with name or wildcard (or given) else None - - private def isRenamedSymbol(symNameInScope: Option[Name])(using Context) = - sym.name != nme.NO_NAME && symNameInScope.exists(_.toSimpleName != sym.name.toSimpleName) + end isInImport private def dealias(using Context): Symbol = if sym.isType && sym.asType.denot.isAliasType then From 0c1f090ec78a1ab078250fcd473b2ec8e80de282 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 3 May 2024 09:50:03 +0200 Subject: [PATCH 254/465] Fix some indentation. --- .../tools/dotc/transform/CheckUnused.scala | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index ee80bdac5017..15a73f18f323 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -827,23 +827,24 @@ object CheckUnused: end UnusedData private object UnusedData: - enum ScopeType: - case Local - case Template - case ReplWrapper - case Other - - object ScopeType: - /** return the scope corresponding to the enclosing scope of the given tree */ - def fromTree(tree: tpd.Tree)(using Context): ScopeType = tree match - case tree: tpd.Template => if tree.symbol.name.isReplWrapperName then ReplWrapper else Template - case _:tpd.Block => Local - case _ => Other - - case class UnusedSymbol(pos: SrcPos, name: Name, warnType: WarnTypes) - /** A container for the results of the used elements analysis */ - case class UnusedResult(warnings: Set[UnusedSymbol]) - object UnusedResult: - val Empty = UnusedResult(Set.empty) + enum ScopeType: + case Local + case Template + case ReplWrapper + case Other + + object ScopeType: + /** return the scope corresponding to the enclosing scope of the given tree */ + def fromTree(tree: tpd.Tree)(using Context): ScopeType = tree match + case tree: tpd.Template => if tree.symbol.name.isReplWrapperName then ReplWrapper else Template + case _:tpd.Block => Local + case _ => Other + + case class UnusedSymbol(pos: SrcPos, name: Name, warnType: WarnTypes) + /** A container for the results of the used elements analysis */ + case class UnusedResult(warnings: Set[UnusedSymbol]) + object UnusedResult: + val Empty = UnusedResult(Set.empty) + end UnusedData end CheckUnused From 6d29951d667a0004ae5066952a8f42e85bc3b3ee Mon Sep 17 00:00:00 2001 From: Aviv Keller <38299977+RedYetiDev@users.noreply.github.com> Date: Mon, 6 May 2024 08:16:43 -0400 Subject: [PATCH 255/465] Optimize main.js (#20093) This Pull Request optimizes the functions used in `/docs/_spec/public/scripts/main.js`. ## Changes made: 1. `currentChapter()` function was simplified using `split()` and `pop()` instead of `lastIndexOf()` and `substring()`. 2. Used template literals for string interpolation. 3. Simplified the `heading` function by reducing repetitive code and improving readability. 4. Changed `.removeClass()` and `.addClass()` to `.toggleClass()` for toggling the class based on condition. 5. General cleanup and optimization for better readability and performance. --- docs/_spec/public/scripts/main.js | 33 ++++++++++++------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/docs/_spec/public/scripts/main.js b/docs/_spec/public/scripts/main.js index 9ade9c770f1e..c74c8d0ff9a1 100644 --- a/docs/_spec/public/scripts/main.js +++ b/docs/_spec/public/scripts/main.js @@ -1,34 +1,29 @@ function currentChapter() { - var path = document.location.pathname; - var idx = path.lastIndexOf("/") + 1; - var chap = path.substring(idx, idx + 2); - return parseInt(chap, 10); + return parseInt(document.location.pathname.split('/').pop().substr(0, 2), 10); } function heading(i, heading, $heading) { - var currentLevel = parseInt(heading.tagName.substring(1)); - var result = ""; + const currentLevel = parseInt(heading.tagName.substring(1)); + if (currentLevel === this.headerLevel) { - this.headerCounts[this.headerLevel] += 1; - return "" + this.headerCounts[this.headerLevel] + " " + $heading.text(); + this.headerCounts[this.headerLevel]++; } else if (currentLevel < this.headerLevel) { - while(currentLevel < this.headerLevel) { + while (currentLevel < this.headerLevel) { this.headerCounts[this.headerLevel] = 1; - this.headerLevel -= 1; + this.headerLevel--; } - this.headerCounts[this.headerLevel] += 1; - return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); + this.headerCounts[this.headerLevel]++; } else { - while(currentLevel > this.headerLevel) { - this.headerLevel += 1; + while (currentLevel > this.headerLevel) { + this.headerLevel++; this.headerCounts[this.headerLevel] = 1; } - return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); } + return `${this.headerCounts[this.headerLevel]} ${$heading.text()}`; } // ignore when using wkhtmltopdf, or it won't work... -if(window.jekyllEnv !== 'spec-pdf') { +if (window.jekyllEnv !== 'spec-pdf') { $('#toc').toc( { 'selectors': 'h1,h2,h3', @@ -64,8 +59,6 @@ document.addEventListener("DOMContentLoaded", function() { }); $("#chapters a").each(function (index) { - if (document.location.pathname.endsWith($(this).attr("href"))) - $(this).addClass("chapter-active"); - else - $(this).removeClass("chapter-active"); + const href = $(this).attr("href"); + $(this).toggleClass("chapter-active", document.location.pathname.endsWith(href)); }); From 55025c22ab2145be6c883f085c47ebc0d3f8e417 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20Fornal?= <24961583+Florian3k@users.noreply.github.com> Date: Mon, 6 May 2024 16:22:05 +0200 Subject: [PATCH 256/465] Scaladoc: improve refined function types rendering (#20333) Fixes #19967 Improves rendering of refined function types - unneeded parameters names are omitted when possible --- .../src/tests/refinedFunctionTypes.scala | 37 ++++++++++++++++ scaladoc-testcases/src/tests/thisType.scala | 4 +- .../src/tests/typesSignatures.scala | 2 +- .../tools/scaladoc/tasty/TypesSupport.scala | 44 ++++++++++++++++--- .../TranslatableSignaturesTestCases.scala | 2 + 5 files changed, 80 insertions(+), 9 deletions(-) create mode 100644 scaladoc-testcases/src/tests/refinedFunctionTypes.scala diff --git a/scaladoc-testcases/src/tests/refinedFunctionTypes.scala b/scaladoc-testcases/src/tests/refinedFunctionTypes.scala new file mode 100644 index 000000000000..d978a0ea2264 --- /dev/null +++ b/scaladoc-testcases/src/tests/refinedFunctionTypes.scala @@ -0,0 +1,37 @@ +package tests +package refinedFunctionTypes + +import annotation.experimental + +@experimental +infix type $throws[R, +E <: Exception] = CanThrow[E] ?=> R + +@experimental +infix type $throws2[+E <: Exception] = (c: CanThrow[E]) ?=> c.type + +@experimental +infix type $throws3[+E <: Exception] = [T] => (c: CanThrow[E]) ?=> c.type + +@experimental +infix type $throws4[+E <: Exception] = [T] => (c: CanThrow[E]) ?=> T //expected: infix type $throws4[+E <: Exception] = [T] => CanThrow[E] ?=> T + +type TA1 = (a: Int, b: (Boolean, String)) => List[(a.type, b.type)] + +type TA2 = (a: Int, b: (Boolean, String)) ?=> List[Boolean] + +@experimental +type TB0 = [R, E <: Exception] =>> PolyFunction { def apply[T](c: CanThrow[E]): R; } //expected: type TB0[R, E <: Exception] = [T] => CanThrow[E] => R + +@experimental +type TB1 = [R, E <: Exception] =>> PolyFunction { def apply[T](c: CanThrow[E], y: c.type): R; } //expected: type TB1[R, E <: Exception] = [T] => (c: CanThrow[E], y: c.type) => R + +@experimental +type TB2 = [R, E <: Exception] =>> PolyFunction { def apply[T](using c: CanThrow[E]): c.type; } //expected: type TB2[R, E <: Exception] = [T] => (c: CanThrow[E]) ?=> c.type + +type TC1 = [T] => (a: T) => T //expected: type TC1 = [T] => T => T + +type TC2 = [T] => (a: T) ?=> T //expected: type TC2 = [T] => T ?=> T + +type TC3 = [T] => (a: T) => a.type + +type TC4 = [T] => (a: T) ?=> a.type diff --git a/scaladoc-testcases/src/tests/thisType.scala b/scaladoc-testcases/src/tests/thisType.scala index 942e50af86ec..28cb55fcc49e 100644 --- a/scaladoc-testcases/src/tests/thisType.scala +++ b/scaladoc-testcases/src/tests/thisType.scala @@ -4,5 +4,5 @@ package thisType // issue 16024 class X[Map[_, _[_]]]: - inline def map[F[_]](f: [t] => t => F[t]): Map[this.type, F] = //expected: inline def map[F[_]](f: [t] => (x$1: t) => F[t]): Map[this.type, F] - ??? + inline def map[F[_]](f: [t] => t => F[t]): Map[this.type, F] + = ??? diff --git a/scaladoc-testcases/src/tests/typesSignatures.scala b/scaladoc-testcases/src/tests/typesSignatures.scala index e7a29ad8c8e1..5b6281ec7cb5 100644 --- a/scaladoc-testcases/src/tests/typesSignatures.scala +++ b/scaladoc-testcases/src/tests/typesSignatures.scala @@ -28,7 +28,7 @@ class Base // Tests do not support multiline signatures type Elem[X] = X match { case String => Char case Array[t] => t case Iterable[t] => t } - type F = [X] => (x: X) => List[X] + type F = [X] => (x: X) => List[X] //expected: type F = [X] => X => List[X] type G = Int => Int diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala index 373a26dd0297..855678a091d2 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala @@ -4,6 +4,7 @@ package tasty import scala.jdk.CollectionConverters._ import scala.quoted._ +import scala.util.control.NonFatal import NameNormalizer._ import SyntheticsSupport._ @@ -124,6 +125,12 @@ trait TypesSupport: ++ keyword(" =>> ").l ++ inner(resType) + case Refinement(parent, "apply", mt : MethodType) if isPolyOrEreased(parent) => + val isCtx = isContextualMethod(mt) + val sym = defn.FunctionClass(mt.paramTypes.length, isCtx) + val at = sym.typeRef.appliedTo(mt.paramTypes :+ mt.resType) + inner(Refinement(at, "apply", mt)) + case r: Refinement => { //(parent, name, info) def getRefinementInformation(t: TypeRepr): List[TypeRepr] = t match { case r: Refinement => getRefinementInformation(r.parent) :+ r @@ -164,16 +171,22 @@ trait TypesSupport: case t: PolyType => val paramBounds = getParamBounds(t) val method = t.resType.asInstanceOf[MethodType] - val paramList = getParamList(method) - val resType = inner(method.resType) - plain("[").l ++ paramBounds ++ plain("]").l ++ keyword(" => ").l ++ paramList ++ keyword(" => ").l ++ resType + val rest = parseDependentFunctionType(method) + plain("[").l ++ paramBounds ++ plain("]").l ++ keyword(" => ").l ++ rest case other => noSupported(s"Not supported type in refinement $info") } def parseDependentFunctionType(info: TypeRepr): SSignature = info match { case m: MethodType => - val paramList = getParamList(m) - paramList ++ keyword(" => ").l ++ inner(m.resType) + val isCtx = isContextualMethod(m) + if isDependentMethod(m) then + val paramList = getParamList(m) + val arrow = keyword(if isCtx then " ?=> " else " => ").l + val resType = inner(m.resType) + paramList ++ arrow ++ resType + else + val sym = defn.FunctionClass(m.paramTypes.length, isCtx) + inner(sym.typeRef.appliedTo(m.paramTypes :+ m.resType)) case other => noSupported("Dependent function type without MethodType refinement") } @@ -213,8 +226,9 @@ trait TypesSupport: case Seq(rtpe) => plain("()").l ++ keyword(arrow).l ++ inner(rtpe) case Seq(arg, rtpe) => - val partOfSignature = arg match + val partOfSignature = stripAnnotated(arg) match case _: TermRef | _: TypeRef | _: ConstantType | _: ParamRef => inner(arg) + case at: AppliedType if !isInfix(at) && !at.isFunctionType && !at.isTupleN => inner(arg) case _ => inParens(inner(arg)) partOfSignature ++ keyword(arrow).l ++ inner(rtpe) case args => @@ -385,3 +399,21 @@ trait TypesSupport: case _ => false at.args.size == 2 && (!at.typeSymbol.name.forall(isIdentifierPart) || infixAnnot) + + private def isPolyOrEreased(using Quotes)(tr: reflect.TypeRepr) = + Set("scala.PolyFunction", "scala.runtime.ErasedFunction") + .contains(tr.typeSymbol.fullName) + + private def isContextualMethod(using Quotes)(mt: reflect.MethodType) = + mt.asInstanceOf[dotty.tools.dotc.core.Types.MethodType].isContextualMethod + + private def isDependentMethod(using Quotes)(mt: reflect.MethodType) = + val method = mt.asInstanceOf[dotty.tools.dotc.core.Types.MethodType] + try method.isParamDependent || method.isResultDependent + catch case NonFatal(_) => true + + private def stripAnnotated(using Quotes)(tr: reflect.TypeRepr): reflect.TypeRepr = + import reflect.* + tr match + case AnnotatedType(tr, _) => stripAnnotated(tr) + case other => other diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala index 2b654d186aef..d60a4d82ff44 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala @@ -120,3 +120,5 @@ class MatchTypeTuple extends SignatureTest("matchTypeTuple", SignatureTest.all) class InfixTypes extends SignatureTest("infixTypes", SignatureTest.all) class ExtendsCall extends SignatureTest("extendsCall", SignatureTest.all) + +class RefinedFunctionTypes extends SignatureTest("refinedFunctionTypes", SignatureTest.all) From 9d1f20d6f181f2c6a44aa227bb69fcbcb07c43bd Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 13 Dec 2023 12:25:25 +0100 Subject: [PATCH 257/465] Improvements to tuples: Drop experimental --- library/src/scala/Tuple.scala | 20 +++++++++---------- library/src/scala/runtime/Tuples.scala | 1 - .../stdlibExperimentalDefinitions.scala | 6 ------ 3 files changed, 9 insertions(+), 18 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 3738bd05a19b..663d124b2df5 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -1,6 +1,6 @@ package scala -import annotation.{experimental, showAsInfix} +import annotation.showAsInfix import compiletime.* import compiletime.ops.int.* @@ -65,7 +65,6 @@ sealed trait Tuple extends Product { inline def take[This >: this.type <: Tuple](n: Int): Take[This, n.type] = runtime.Tuples.take(this, n).asInstanceOf[Take[This, n.type]] - /** Given a tuple `(a1, ..., am)`, returns the tuple `(an+1, ..., am)` consisting * all its elements except the first n ones. */ @@ -82,7 +81,6 @@ sealed trait Tuple extends Product { /** Given a tuple `(a1, ..., am)`, returns the reversed tuple `(am, ..., a1)` * consisting all its elements. */ - @experimental inline def reverse[This >: this.type <: Tuple]: Reverse[This] = runtime.Tuples.reverse(this).asInstanceOf[Reverse[This]] } @@ -201,14 +199,14 @@ object Tuple { type IsMappedBy[F[_]] = [X <: Tuple] =>> X =:= Map[InverseMap[X, F], F] /** Type of the reversed tuple */ - @experimental - type Reverse[X <: Tuple] = ReverseOnto[X, EmptyTuple] - - /** Prepends all elements of a tuple in reverse order onto the other tuple */ - @experimental - type ReverseOnto[From <: Tuple, +To <: Tuple] <: Tuple = From match - case x *: xs => ReverseOnto[xs, x *: To] - case EmptyTuple => To + type Reverse[X <: Tuple] = Helpers.ReverseImpl[EmptyTuple, X] + + object Helpers: + + /** Type of the reversed tuple */ + type ReverseImpl[Acc <: Tuple, X <: Tuple] <: Tuple = X match + case x *: xs => ReverseImpl[x *: Acc, xs] + case EmptyTuple => Acc /** Transforms a tuple `(T1, ..., Tn)` into `(T1, ..., Ti)`. */ type Take[T <: Tuple, N <: Int] <: Tuple = N match { diff --git a/library/src/scala/runtime/Tuples.scala b/library/src/scala/runtime/Tuples.scala index 41425e8559ba..81dca31e355e 100644 --- a/library/src/scala/runtime/Tuples.scala +++ b/library/src/scala/runtime/Tuples.scala @@ -505,7 +505,6 @@ object Tuples { } } - @experimental def reverse(self: Tuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlReverse(xxl) case _ => specialCaseReverse(self) diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 76c08fa24213..b19ce1c2ebef 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -71,12 +71,6 @@ val experimentalDefinitionInLibrary = Set( "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.erasedArgs", "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.hasErasedArgs", - // New feature: reverse method on Tuple - "scala.Tuple.reverse", // can be stabilized in 3.5 - "scala.Tuple$.Reverse", // can be stabilized in 3.5 - "scala.Tuple$.ReverseOnto", // can be stabilized in 3.5 - "scala.runtime.Tuples$.reverse", // can be stabilized in 3.5 - // New feature: fromNullable for explicit nulls "scala.Predef$.fromNullable", ) From 2395ece1106a62f7841efc850415f6be11d55378 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 13 Dec 2023 14:54:23 +0100 Subject: [PATCH 258/465] Improvements to Tuples: New methods New methods: filter, indicesWhere, reverseOnto --- library/src/scala/Tuple.scala | 80 +++++++++++++++++++++++++---------- 1 file changed, 58 insertions(+), 22 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 663d124b2df5..471bb228b4af 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -5,7 +5,7 @@ import compiletime.* import compiletime.ops.int.* /** Tuple of arbitrary arity */ -sealed trait Tuple extends Product { +sealed trait Tuple extends Product: import Tuple.* /** Create a copy of this tuple as an Array */ @@ -83,9 +83,23 @@ sealed trait Tuple extends Product { */ inline def reverse[This >: this.type <: Tuple]: Reverse[This] = runtime.Tuples.reverse(this).asInstanceOf[Reverse[This]] -} -object Tuple { + /** A tuple with the elements of this tuple in reversed order added in front of `acc` */ + inline def reverseOnto[This >: this.type <: Tuple, Acc <: Tuple](acc: Acc): ReverseOnto[This, Acc] = + (this.reverse ++ acc).asInstanceOf[ReverseOnto[This, Acc]] + + /** A tuple consisting of all elements of this tuple that have types + * for which the given type level predicate `P` reduces to the literal + * constant `true`. + */ + inline def filter[This >: this.type <: Tuple, P[_] <: Boolean]: Filter[This, P] = + val toInclude = constValueTuple[IndicesWhere[This, P]].toArray + val arr = new Array[Object](toInclude.length) + for i <- 0 until toInclude.length do + arr(i) = this.productElement(toInclude(i).asInstanceOf[Int]).asInstanceOf[Object] + Tuple.fromArray(arr).asInstanceOf[Filter[This, P]] + +object Tuple: /** Type of a tuple with an element appended */ type Append[X <: Tuple, Y] <: NonEmptyTuple = X match { @@ -165,25 +179,38 @@ object Tuple { * ``` * @syntax markdown */ - type Filter[Tup <: Tuple, P[_] <: Boolean] <: Tuple = Tup match { + type Filter[X <: Tuple, P[_] <: Boolean] <: Tuple = X match case EmptyTuple => EmptyTuple - case h *: t => P[h] match { + case h *: t => P[h] match case true => h *: Filter[t, P] case false => Filter[t, P] - } - } - /** Given two tuples, `A1 *: ... *: An * At` and `B1 *: ... *: Bn *: Bt` - * where at least one of `At` or `Bt` is `EmptyTuple` or `Tuple`, - * returns the tuple type `(A1, B1) *: ... *: (An, Bn) *: Ct` - * where `Ct` is `EmptyTuple` if `At` or `Bt` is `EmptyTuple`, otherwise `Ct` is `Tuple`. + /** A tuple consisting of those indices `N` of tuple `X` where the predicate `P` + * is true for `Elem[X, N]`. Indices are type level values <: Int. */ - type Zip[T1 <: Tuple, T2 <: Tuple] <: Tuple = (T1, T2) match { + type IndicesWhere[X <: Tuple, P[_] <: Boolean] = + helpers.IndicesWhereHelper[X, P, 0] + + /** The type of the tuple consisting of all element values of + * tuple `X` zipped with corresponding elements of tuple `Y`. + * If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * For example, if + * ``` + * X = (S1, ..., Si) + * Y = (T1, ..., Tj) where j >= i + * ``` + * then + * ``` + * Zip[X, Y] = ((S1, T1), ..., (Si, Ti)) + * ``` + * @syntax markdown + */ + type Zip[T1 <: Tuple, T2 <: Tuple] <: Tuple = (T1, T2) match case (h1 *: t1, h2 *: t2) => (h1, h2) *: Zip[t1, t2] case (EmptyTuple, _) => EmptyTuple case (_, EmptyTuple) => EmptyTuple case _ => Tuple - } /** Converts a tuple `(F[T1], ..., F[Tn])` to `(T1, ... Tn)` */ type InverseMap[X <: Tuple, F[_]] <: Tuple = X match { @@ -198,15 +225,13 @@ object Tuple { */ type IsMappedBy[F[_]] = [X <: Tuple] =>> X =:= Map[InverseMap[X, F], F] - /** Type of the reversed tuple */ - type Reverse[X <: Tuple] = Helpers.ReverseImpl[EmptyTuple, X] - - object Helpers: + /** A tuple with the elements of tuple `X` in reversed order */ + type Reverse[X <: Tuple] = ReverseOnto[X, EmptyTuple] - /** Type of the reversed tuple */ - type ReverseImpl[Acc <: Tuple, X <: Tuple] <: Tuple = X match - case x *: xs => ReverseImpl[x *: Acc, xs] - case EmptyTuple => Acc + /** A tuple with the elements of tuple `X` in reversed order added in front of `Acc` */ + type ReverseOnto[X <: Tuple, Acc <: Tuple] <: Tuple = X match + case x *: xs => ReverseOnto[xs, x *: Acc] + case EmptyTuple => Acc /** Transforms a tuple `(T1, ..., Tn)` into `(T1, ..., Ti)`. */ type Take[T <: Tuple, N <: Int] <: Tuple = N match { @@ -275,7 +300,18 @@ object Tuple { given canEqualTuple[H1, T1 <: Tuple, H2, T2 <: Tuple]( using eqHead: CanEqual[H1, H2], eqTail: CanEqual[T1, T2] ): CanEqual[H1 *: T1, H2 *: T2] = CanEqual.derived -} + + object helpers: + + /** Used to implement IndicesWhere */ + type IndicesWhereHelper[X <: Tuple, P[_] <: Boolean, N <: Int] <: Tuple = X match + case EmptyTuple => EmptyTuple + case h *: t => P[h] match + case true => N *: IndicesWhereHelper[t, P, S[N]] + case false => IndicesWhereHelper[t, P, S[N]] + + end helpers +end Tuple /** A tuple of 0 elements */ type EmptyTuple = EmptyTuple.type From 64a79c914ce0dc71306b9cb57546923b78191cb3 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 13 Dec 2023 15:00:11 +0100 Subject: [PATCH 259/465] Improvements to tuples: Allow prefix slice in fromArray --- library/src/scala/Tuple.scala | 15 +++++++++++---- library/src/scala/runtime/Tuples.scala | 11 ++++++++--- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 471bb228b4af..a5967686ad3e 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -271,22 +271,29 @@ object Tuple: def unapply(x: EmptyTuple): true = true /** Convert an array into a tuple of unknown arity and types */ - def fromArray[T](xs: Array[T]): Tuple = { + def fromArray[T](xs: Array[T]): Tuple = + fromArray(xs, xs.length) + + /** Convert the first `n` elements of an array into a tuple of unknown arity and types */ + def fromArray[T](xs: Array[T], n: Int): Tuple = { val xs2 = xs match { case xs: Array[Object] => xs case xs => xs.map(_.asInstanceOf[Object]) } - runtime.Tuples.fromArray(xs2) + runtime.Tuples.fromArray(xs2, n) } /** Convert an immutable array into a tuple of unknown arity and types */ - def fromIArray[T](xs: IArray[T]): Tuple = { + def fromIArray[T](xs: IArray[T]): Tuple = fromIArray(xs, xs.length) + + /** Convert the first `n` elements of an immutable array into a tuple of unknown arity and types */ + def fromIArray[T](xs: IArray[T], n: Int): Tuple = { val xs2: IArray[Object] = xs match { case xs: IArray[Object] @unchecked => xs case _ => xs.map(_.asInstanceOf[Object]) } - runtime.Tuples.fromIArray(xs2) + runtime.Tuples.fromIArray(xs2, n) } /** Convert a Product into a tuple of unknown arity and types */ diff --git a/library/src/scala/runtime/Tuples.scala b/library/src/scala/runtime/Tuples.scala index 81dca31e355e..be6904b9d1d0 100644 --- a/library/src/scala/runtime/Tuples.scala +++ b/library/src/scala/runtime/Tuples.scala @@ -28,7 +28,7 @@ object Tuples { arr } - def fromArray(xs: Array[Object]): Tuple = xs.length match { + def fromArray(xs: Array[Object], n: Int): Tuple = n match { case 0 => EmptyTuple case 1 => Tuple1(xs(0)) case 2 => Tuple2(xs(0), xs(1)) @@ -55,10 +55,15 @@ object Tuples { case _ => TupleXXL.fromIArray(xs.clone().asInstanceOf[IArray[Object]]).asInstanceOf[Tuple] } - def fromIArray(xs: IArray[Object]): Tuple = - if (xs.length <= 22) fromArray(xs.asInstanceOf[Array[Object]]) + def fromArray(xs: Array[Object]): Tuple = fromArray(xs, xs.length) + + def fromIArray(xs: IArray[Object], n: Int): Tuple = + if n <= 22 || n != xs.length + then fromArray(xs.asInstanceOf[Array[Object]], n) else TupleXXL.fromIArray(xs).asInstanceOf[Tuple] + def fromIArray(xs: IArray[Object]): Tuple = fromIArray(xs, xs.length) + def fromProduct(xs: Product): Tuple = (xs.productArity match { case 0 => EmptyTuple case 1 => From cb952651f0d417a4e05c383f85ad18e3b6362dfb Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 13 Dec 2023 17:52:24 +0100 Subject: [PATCH 260/465] Improvements to tuples: Rearrange types into a more logical order --- library/src/scala/Tuple.scala | 131 +++++++++++++++++----------------- 1 file changed, 64 insertions(+), 67 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index a5967686ad3e..2c9b22f0b761 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -101,75 +101,95 @@ sealed trait Tuple extends Product: object Tuple: - /** Type of a tuple with an element appended */ - type Append[X <: Tuple, Y] <: NonEmptyTuple = X match { - case EmptyTuple => Y *: EmptyTuple - case x *: xs => x *: Append[xs, Y] - } + /** The size of a tuple, represented as a literal constant subtype of Int */ + type Size[X <: Tuple] <: Int = X match + case EmptyTuple => 0 + case x *: xs => S[Size[xs]] + + /** The type of the element at position N in the tuple X */ + type Elem[X <: Tuple, N <: Int] = X match + case x *: xs => + N match + case 0 => x + case S[n1] => Elem[xs, n1] - /** Type of the head of a tuple */ - type Head[X <: Tuple] = X match { + /** The type of the first element of a tuple */ + type Head[X <: Tuple] = X match case x *: _ => x - } - /** Type of the initial part of the tuple without its last element */ - type Init[X <: Tuple] <: Tuple = X match { + /** The type of the last element of a tuple */ + type Last[X <: Tuple] = X match + case x *: EmptyTuple => x + case _ *: xs => Last[xs] + + /** The type of a tuple consisting of all elements of tuple X except the first one */ + type Tail[X <: Tuple] <: Tuple = X match + case _ *: xs => xs + + /** The type of the initial part of a tuple without its last element */ + type Init[X <: Tuple] <: Tuple = X match case _ *: EmptyTuple => EmptyTuple case x *: xs => x *: Init[xs] - } - /** Type of the tail of a tuple */ - type Tail[X <: Tuple] <: Tuple = X match { - case _ *: xs => xs - } + /** The type of the tuple consisting of the first `N` elements of `X`, + * or all elements if `N` exceeds `Size[X]`. + */ + type Take[X <: Tuple, N <: Int] <: Tuple = N match + case 0 => EmptyTuple + case S[n1] => X match + case EmptyTuple => EmptyTuple + case x *: xs => x *: Take[xs, n1] - /** Type of the last element of a tuple */ - type Last[X <: Tuple] = X match { - case x *: EmptyTuple => x - case _ *: xs => Last[xs] + /** The type of the tuple consisting of all elements of `X` except the first `N` ones, + * or no elements if `N` exceeds `Size[X]`. + */ + type Drop[X <: Tuple, N <: Int] <: Tuple = N match { + case 0 => X + case S[n1] => X match { + case EmptyTuple => EmptyTuple + case x *: xs => Drop[xs, n1] + } } - /** Type of the concatenation of two tuples */ - type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match { - case EmptyTuple => Y - case x1 *: xs1 => x1 *: Concat[xs1, Y] - } + /** The pair type `(Take(X, N), Drop[X, N]). */ + type Split[X <: Tuple, N <: Int] = (Take[X, N], Drop[X, N]) - /** Type of the element at position N in the tuple X */ - type Elem[X <: Tuple, N <: Int] = X match { - case x *: xs => - N match { - case 0 => x - case S[n1] => Elem[xs, n1] - } + /** Type of a tuple with an element appended */ + type Append[X <: Tuple, Y] <: NonEmptyTuple = X match { + case EmptyTuple => Y *: EmptyTuple + case x *: xs => x *: Append[xs, Y] } - /** Literal constant Int size of a tuple */ - type Size[X <: Tuple] <: Int = X match { - case EmptyTuple => 0 - case x *: xs => S[Size[xs]] - } + /** Type of the concatenation of two tuples `X` and `Y` */ + type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match + case EmptyTuple => Y + case x1 *: xs1 => x1 *: Concat[xs1, Y] /** Fold a tuple `(T1, ..., Tn)` into `F[T1, F[... F[Tn, Z]...]]]` */ type Fold[Tup <: Tuple, Z, F[_, _]] = Tup match case EmptyTuple => Z case h *: t => F[h, Fold[t, Z, F]] - /** Converts a tuple `(T1, ..., Tn)` to `(F[T1], ..., F[Tn])` */ - type Map[Tup <: Tuple, F[_ <: Union[Tup]]] <: Tuple = Tup match { + /** The type of tuple `X` mapped with the type-level function `F`. + * If `X = (T1, ..., Ti)` then `Map[X, F] = `(F[T1], ..., F[Ti])`. + */ + type Map[Tup <: Tuple, F[_ <: Union[Tup]]] <: Tuple = Tup match case EmptyTuple => EmptyTuple case h *: t => F[h] *: Map[t, F] - } - /** Converts a tuple `(T1, ..., Tn)` to a flattened `(..F[T1], ..., ..F[Tn])` */ - type FlatMap[Tup <: Tuple, F[_ <: Union[Tup]] <: Tuple] <: Tuple = Tup match { + /** The type of tuple `X` flat-mapped with the type-level function `F`. + * If `X = (T1, ..., Ti)` then `FlatMap[X, F] = `F[T1] ++ ... ++ F[Ti]` + */ + type FlatMap[Tup <: Tuple, F[_ <: Union[Tup]] <: Tuple] <: Tuple = Tup match case EmptyTuple => EmptyTuple case h *: t => Concat[F[h], FlatMap[t, F]] - } + // TODO: implement term level analogue - /** Filters out those members of the tuple for which the predicate `P` returns `false`. - * A predicate `P[X]` is a type that can be either `true` or `false`. For example: + /** The type of the tuple consisting of all elements of tuple `X` that have types + * for which the given type level predicate `P` reduces to the literal + * constant `true`. A predicate `P[X]` is a type that can be either `true` + * or `false`. For example: * ```scala * type IsString[x] <: Boolean = x match { * case String => true @@ -233,29 +253,6 @@ object Tuple: case x *: xs => ReverseOnto[xs, x *: Acc] case EmptyTuple => Acc - /** Transforms a tuple `(T1, ..., Tn)` into `(T1, ..., Ti)`. */ - type Take[T <: Tuple, N <: Int] <: Tuple = N match { - case 0 => EmptyTuple - case S[n1] => T match { - case EmptyTuple => EmptyTuple - case x *: xs => x *: Take[xs, n1] - } - } - - /** Transforms a tuple `(T1, ..., Tn)` into `(Ti+1, ..., Tn)`. */ - type Drop[T <: Tuple, N <: Int] <: Tuple = N match { - case 0 => T - case S[n1] => T match { - case EmptyTuple => EmptyTuple - case x *: xs => Drop[xs, n1] - } - } - - /** Splits a tuple (T1, ..., Tn) into a pair of two tuples `(T1, ..., Ti)` and - * `(Ti+1, ..., Tn)`. - */ - type Split[T <: Tuple, N <: Int] = (Take[T, N], Drop[T, N]) - /** Given a tuple `(T1, ..., Tn)`, returns a union of its * member types: `T1 | ... | Tn`. Returns `Nothing` if the tuple is empty. */ From 6ae8252653f5a03b6ba9f8f430537bca1b5b33b4 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 13 Dec 2023 17:53:15 +0100 Subject: [PATCH 261/465] Improvements to tuples: more new types and methods --- library/src/scala/Tuple.scala | 72 +++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 2c9b22f0b761..e84e1fe562c3 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -166,6 +166,17 @@ object Tuple: case EmptyTuple => Y case x1 *: xs1 => x1 *: Concat[xs1, Y] + /** An infix shorthand for `Concat[X, Y]` */ + infix type ++[X <: Tuple, +Y <: Tuple] = Concat[X, Y] + + /** The index of `Y` in tuple `X` as a literal constant Int, + * or `Size[X]` if `Y` does not occur in `X` + */ + type IndexOf[X <: Tuple, Y] <: Int = X match + case Y *: _ => 0 + case x *: xs => S[IndexOf[xs, Y]] + case EmptyTuple => 0 + /** Fold a tuple `(T1, ..., Tn)` into `F[T1, F[... F[Tn, Z]...]]]` */ type Fold[Tup <: Tuple, Z, F[_, _]] = Tup match case EmptyTuple => Z @@ -258,6 +269,42 @@ object Tuple: */ type Union[T <: Tuple] = Fold[T, Nothing, [x, y] =>> x | y] + /** A type level Boolean indicating whether the tuple `X` conforms + * to the tuple `Y`. This means: + * - the two tuples have the same number of elements + * - for corresponding elements `x` in `X` and `y` in `Y`, `x` matches `y`. + * @pre The elements of `X` are assumed to be singleton types + */ + type Conforms[X <: Tuple, Y <: Tuple] <: Boolean = Y match + case EmptyTuple => + X match + case EmptyTuple => true + case _ => false + case y *: ys => + X match + case `y` *: xs => Conforms[xs, ys] + case _ => false + + /** A type level Boolean indicating whether the tuple `X` has an element + * that matches `Y`. + * @pre The elements of `X` are assumed to be singleton types + */ + type Contains[X <: Tuple, Y] <: Boolean = X match + case Y *: _ => true + case x *: xs => Contains[xs, Y] + case EmptyTuple => false + + /** A type level Boolean indicating whether the type `Y` contains + * none of the elements of `X`. + * @pre The elements of `X` and `Y` are assumed to be singleton types + */ + type Disjoint[X <: Tuple, Y <: Tuple] <: Boolean = X match + case x *: xs => + Contains[Y, x] match + case true => false + case false => Disjoint[xs, Y] + case EmptyTuple => true + /** Empty tuple */ def apply(): EmptyTuple = EmptyTuple @@ -297,6 +344,31 @@ object Tuple: def fromProduct(product: Product): Tuple = runtime.Tuples.fromProduct(product) + extension [X <: Tuple](inline x: X) + + /** The index (starting at 0) of the first element in the type `X` of `x` + * that matches type `Y`. + */ + inline def indexOfType[Y] = constValue[IndexOf[X, Y]] + + /** A boolean indicating whether there is an element in the type `X` of `x` + * that matches type `Y`. + */ + + inline def containsType[Y] = constValue[Contains[X, Y]] + + /* Note: It would be nice to add the following two extension methods: + + inline def indexOf[Y: Precise](y: Y) = constValue[IndexOf[X, Y]] + inline def containsType[Y: Precise](y: Y) = constValue[Contains[X, Y]] + + because we could then move indexOf/contains completely to the value level. + But this requires `Y` to be inferred precisely, and therefore a mechanism + like the `Precise` context bound used above, which does not yet exist. + */ + + end extension + def fromProductTyped[P <: Product](p: P)(using m: scala.deriving.Mirror.ProductOf[P]): m.MirroredElemTypes = runtime.Tuples.fromProduct(p).asInstanceOf[m.MirroredElemTypes] From 1f79b87803b2aa55b0b2dfd0ed260337e1d77dd2 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 2 Dec 2023 19:41:52 +0100 Subject: [PATCH 262/465] Add NamedTuple object to library --- library/src/scala/NamedTuple.scala | 112 ++++++++++++++++++ .../stdlibExperimentalDefinitions.scala | 4 + 2 files changed, 116 insertions(+) create mode 100644 library/src/scala/NamedTuple.scala diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala new file mode 100644 index 000000000000..9970adfccce7 --- /dev/null +++ b/library/src/scala/NamedTuple.scala @@ -0,0 +1,112 @@ +package scala +import annotation.experimental +import compiletime.ops.boolean.* + +@experimental +object NamedTuple: + + opaque type AnyNamedTuple = Any + opaque type NamedTuple[N <: Tuple, V <: Tuple] >: V <: AnyNamedTuple = V + + def apply[N <: Tuple, V <: Tuple](x: V) = x + + def unapply[N <: Tuple, V <: Tuple](x: NamedTuple[N, V]): Some[V] = Some(x) + + extension [V <: Tuple](x: V) + inline def withNames[N <: Tuple]: NamedTuple[N, V] = x + + extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) + + inline def values: V = x + + inline def size: Tuple.Size[V] = values.size + + // This intentionally works for empty named tuples as well. I think NnEmptyTuple is a dead end + // and should be reverted, justy like NonEmptyList is also appealing at first, but a bad idea + // in the end. + inline def apply(n: Int): Tuple.Elem[V, n.type] = + inline values match + case tup: NonEmptyTuple => tup(n).asInstanceOf[Tuple.Elem[V, n.type]] + case tup => tup.productElement(n).asInstanceOf[Tuple.Elem[V, n.type]] + + inline def head: Tuple.Elem[V, 0] = apply(0) + inline def tail: Tuple.Drop[V, 1] = values.drop(1) + + inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] + inline def init: Tuple.Init[V] = values.take(size - 1).asInstanceOf[Tuple.Init[V]] + + inline def take(n: Int): NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]] = + values.take(n) + + inline def drop(n: Int): NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]] = + values.drop(n) + + inline def splitAt(n: Int): NamedTuple[Tuple.Split[N, n.type], Tuple.Split[V, n.type]] = + values.splitAt(n) + + inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) + : NamedTuple[Tuple.Concat[N, N2], Tuple.Concat[V, V2]] + = values ++ that.values + + // inline def :* [L] (x: L): NamedTuple[Append[N, ???], Append[V, L] = ??? + // inline def *: [H] (x: H): NamedTuple[??? *: N], H *: V] = ??? + + inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = + values.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] + + inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = + values.reverse + + inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): NamedTuple[N, Tuple.Zip[V, V2]] = + values.zip(that.values) + + inline def toList: List[Tuple.Union[V]] = values.toList.asInstanceOf[List[Tuple.Union[V]]] + inline def toArray: Array[Object] = values.toArray + inline def toIArray: IArray[Object] = values.toIArray + + end extension + + /** The names of the named tuple type `NT` */ + type Names[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[n, _] => n + + /** The value types of the named tuple type `NT` */ + type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[_, x] => x + + type Size[X <: AnyNamedTuple] = Tuple.Size[DropNames[X]] + + type Elem[X <: AnyNamedTuple, N <: Int] = Tuple.Elem[DropNames[X], N] + + type Head[X <: AnyNamedTuple] = Elem[X, 0] + + type Last[X <: AnyNamedTuple] = Tuple.Last[DropNames[X]] + + type Init[X <: AnyNamedTuple] = + NamedTuple[Tuple.Init[Names[X]], Tuple.Init[DropNames[X]]] + + type Tail[X <: AnyNamedTuple] = Drop[X, 1] + + type Take[X <: AnyNamedTuple, N <: Int] = + NamedTuple[Tuple.Take[Names[X], N], Tuple.Take[DropNames[X], N]] + + type Drop[X <: AnyNamedTuple, N <: Int] = + NamedTuple[Tuple.Drop[Names[X], N], Tuple.Drop[DropNames[X], N]] + + type Split[X <: AnyNamedTuple, N <: Int] = (Take[X, N], Drop[X, N]) + + type Concat[X <: AnyNamedTuple, Y <: AnyNamedTuple] = + NamedTuple[Tuple.Concat[Names[X], Names[Y]], Tuple.Concat[DropNames[X], DropNames[Y]]] + + type Map[X <: AnyNamedTuple, F[_ <: Tuple.Union[DropNames[X]]]] = + NamedTuple[Names[X], Tuple.Map[DropNames[X], F]] + + type Reverse[X <: AnyNamedTuple] = + NamedTuple[Tuple.Reverse[Names[X]], Tuple.Reverse[DropNames[X]]] + + type Zip[X <: AnyNamedTuple, Y <: AnyNamedTuple] = + Tuple.Conforms[Names[X], Names[Y]] match + case true => + NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] + +end NamedTuple diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index b19ce1c2ebef..26cad0668b37 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -73,6 +73,10 @@ val experimentalDefinitionInLibrary = Set( // New feature: fromNullable for explicit nulls "scala.Predef$.fromNullable", + + // New feature: named tuples + "scala.NamedTuple", + "scala.NamedTuple$", ) From b9899b7718284280b6d1762e5ce3119160e6d8f4 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 3 Dec 2023 13:15:24 +0100 Subject: [PATCH 263/465] Support for named tuples with new representation --- .../src/dotty/tools/dotc/ast/Desugar.scala | 107 ++++++++-- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 4 + compiler/src/dotty/tools/dotc/ast/untpd.scala | 8 +- .../src/dotty/tools/dotc/config/Feature.scala | 1 + .../dotty/tools/dotc/core/Definitions.scala | 11 ++ .../src/dotty/tools/dotc/core/StdNames.scala | 2 + .../src/dotty/tools/dotc/core/TypeUtils.scala | 36 +++- .../dotty/tools/dotc/parsing/Parsers.scala | 80 +++++--- .../tools/dotc/printing/PlainPrinter.scala | 14 +- .../tools/dotc/printing/RefinedPrinter.scala | 17 +- .../tools/dotc/transform/PatternMatcher.scala | 24 ++- .../dotty/tools/dotc/typer/Applications.scala | 12 +- .../src/dotty/tools/dotc/typer/Typer.scala | 140 ++++++------- .../test/dotc/pos-test-pickling.blacklist | 2 + docs/_docs/internals/syntax.md | 9 +- docs/_docs/reference/syntax.md | 8 +- docs/sidebar.yml | 1 + library/src/scala/NamedTuple.scala | 114 +++++++++-- library/src/scala/runtime/LazyVals.scala | 2 +- .../runtime/stdLibPatches/language.scala | 7 + tests/neg/depfuns.scala | 4 +- tests/neg/i7247.scala | 2 +- tests/neg/i7751.scala | 2 +- tests/neg/named-tuples-2.check | 8 + tests/neg/named-tuples-2.scala | 6 + tests/neg/named-tuples.check | 105 ++++++++++ tests/neg/named-tuples.scala | 51 +++++ tests/neg/namedTypeParams.check | 16 +- tests/new/test.scala | 11 +- tests/pos/named-tuples-strawman-2.scala | 185 ++++++++++++++++++ tests/pos/named-tuples-strawman.scala | 48 +++++ tests/pos/named-tuples.check | 10 + tests/pos/named-tuples1.scala | 13 ++ tests/pos/tuple-ops.scala | 36 ++++ tests/run/named-patterns.check | 10 + tests/run/named-patterns.scala | 43 ++++ tests/run/named-tuples-xxl.check | 6 + tests/run/named-tuples-xxl.scala | 91 +++++++++ tests/run/named-tuples.check | 9 + tests/run/named-tuples.scala | 99 ++++++++++ 40 files changed, 1175 insertions(+), 179 deletions(-) create mode 100644 tests/neg/named-tuples-2.check create mode 100644 tests/neg/named-tuples-2.scala create mode 100644 tests/neg/named-tuples.check create mode 100644 tests/neg/named-tuples.scala create mode 100644 tests/pos/named-tuples-strawman-2.scala create mode 100644 tests/pos/named-tuples-strawman.scala create mode 100644 tests/pos/named-tuples.check create mode 100644 tests/pos/named-tuples1.scala create mode 100644 tests/pos/tuple-ops.scala create mode 100644 tests/run/named-patterns.check create mode 100644 tests/run/named-patterns.scala create mode 100644 tests/run/named-tuples-xxl.check create mode 100644 tests/run/named-tuples-xxl.scala create mode 100644 tests/run/named-tuples.check create mode 100644 tests/run/named-tuples.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 03505075121a..39a30a185c72 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -9,10 +9,10 @@ import Decorators.* import Annotations.Annotation import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} -import util.{Property, SourceFile, SourcePosition, Chars} +import util.{Property, SourceFile, SourcePosition, SrcPos, Chars} import config.Feature.{sourceVersion, migrateTo3, enabled} import config.SourceVersion.* -import collection.mutable.ListBuffer +import collection.mutable import reporting.* import annotation.constructorOnly import printing.Formatting.hl @@ -234,7 +234,7 @@ object desugar { private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = val DefDef(_, paramss, tpt, rhs) = meth - val evidenceParamBuf = ListBuffer[ValDef]() + val evidenceParamBuf = mutable.ListBuffer[ValDef]() var seenContextBounds: Int = 0 def desugarContextBounds(rhs: Tree): Tree = rhs match @@ -1441,22 +1441,101 @@ object desugar { AppliedTypeTree( TypeTree(defn.throwsAlias.typeRef).withSpan(op.span), tpt :: excepts :: Nil) + private def checkWellFormedTupleElems(elems: List[Tree])(using Context): List[Tree] = + val seen = mutable.Set[Name]() + for case arg @ NamedArg(name, _) <- elems do + if seen.contains(name) then + report.error(em"Duplicate tuple element name", arg.srcPos) + seen += name + if name.startsWith("_") && name.toString.tail.toIntOption.isDefined then + report.error( + em"$name cannot be used as the name of a tuple element because it is a regular tuple selector", + arg.srcPos) + + elems match + case elem :: elems1 => + val mismatchOpt = + if elem.isInstanceOf[NamedArg] + then elems1.find(!_.isInstanceOf[NamedArg]) + else elems1.find(_.isInstanceOf[NamedArg]) + mismatchOpt match + case Some(misMatch) => + report.error(em"Illegal combination of named and unnamed tuple elements", misMatch.srcPos) + elems.mapConserve(dropNamedArg) + case None => elems + case _ => elems + end checkWellFormedTupleElems + /** Translate tuple expressions of arity <= 22 * * () ==> () * (t) ==> t * (t1, ..., tN) ==> TupleN(t1, ..., tN) */ - def smallTuple(tree: Tuple)(using Context): Tree = { - val ts = tree.trees - val arity = ts.length - assert(arity <= Definitions.MaxTupleArity) - def tupleTypeRef = defn.TupleType(arity).nn - if (arity == 0) - if (ctx.mode is Mode.Type) TypeTree(defn.UnitType) else unitLiteral - else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts) - else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts) - } + def tuple(tree: Tuple, pt: Type)(using Context): Tree = + var elems = checkWellFormedTupleElems(tree.trees) + if ctx.mode.is(Mode.Pattern) then elems = adaptPatternArgs(elems, pt) + val elemValues = elems.mapConserve(dropNamedArg) + val tup = + val arity = elems.length + if arity <= Definitions.MaxTupleArity then + def tupleTypeRef = defn.TupleType(arity).nn + val tree1 = + if arity == 0 then + if ctx.mode is Mode.Type then TypeTree(defn.UnitType) else unitLiteral + else if ctx.mode is Mode.Type then AppliedTypeTree(ref(tupleTypeRef), elemValues) + else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), elemValues) + tree1.withSpan(tree.span) + else + cpy.Tuple(tree)(elemValues) + val names = elems.collect: + case NamedArg(name, arg) => name + if names.isEmpty || ctx.mode.is(Mode.Pattern) then + tup + else + def namesTuple = inMode(ctx.mode &~ Mode.Pattern | Mode.Type): + tuple(Tuple( + names.map: name => + SingletonTypeTree(Literal(Constant(name.toString))).withSpan(tree.span)), + WildcardType) + if ctx.mode.is(Mode.Type) then + AppliedTypeTree(ref(defn.NamedTupleTypeRef), namesTuple :: tup :: Nil) + else + TypeApply( + Apply(Select(ref(defn.NamedTupleModule), nme.withNames), tup), + namesTuple :: Nil) + + /** When desugaring a list pattern arguments `elems` adapt them and the + * expected type `pt` to each other. This means: + * - If `elems` are named pattern elements, rearrange them to match `pt`. + * This requires all names in `elems` to be also present in `pt`. + * - If `elems` are unnamed elements, and `pt` is a named tuple, drop all + * tuple element names from `pt`. + */ + def adaptPatternArgs(elems: List[Tree], pt: Type)(using Context): List[Tree] = + + def reorderedNamedArgs(wildcardSpan: Span): List[untpd.Tree] = + var selNames = pt.namedTupleElementTypes.map(_(0)) + if selNames.isEmpty && pt.classSymbol.is(CaseClass) then + selNames = pt.classSymbol.caseAccessors.map(_.name.asTermName) + val nameToIdx = selNames.zipWithIndex.toMap + val reordered = Array.fill[untpd.Tree](selNames.length): + untpd.Ident(nme.WILDCARD).withSpan(wildcardSpan) + for case arg @ NamedArg(name: TermName, _) <- elems do + nameToIdx.get(name) match + case Some(idx) => + if reordered(idx).isInstanceOf[Ident] then + reordered(idx) = arg + else + report.error(em"Duplicate named pattern", arg.srcPos) + case _ => + report.error(em"No element named `$name` is defined in selector type $pt", arg.srcPos) + reordered.toList + + elems match + case (first @ NamedArg(_, _)) :: _ => reorderedNamedArgs(first.span.startPos) + case _ => elems + end adaptPatternArgs private def isTopLevelDef(stat: Tree)(using Context): Boolean = stat match case _: ValDef | _: PatDef | _: DefDef | _: Export | _: ExtMethods => true @@ -1990,7 +2069,7 @@ object desugar { * without duplicates */ private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = { - val buf = ListBuffer[VarInfo]() + val buf = mutable.ListBuffer[VarInfo]() def seenName(name: Name) = buf exists (_._1.name == name) def add(named: NameTree, t: Tree): Unit = if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 34c87eedb081..fbf3cfe163b2 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -244,6 +244,10 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => def hasNamedArg(args: List[Any]): Boolean = args exists isNamedArg val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[?]] + def dropNamedArg(arg: Tree) = arg match + case NamedArg(_, arg1) => arg1 + case arg => arg + /** Is this pattern node a catch-all (wildcard or variable) pattern? */ def isDefaultCase(cdef: CaseDef): Boolean = cdef match { case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 08f3db4981ff..fa13bd6610ba 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -528,15 +528,15 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def makeSelfDef(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(PrivateLocal) - def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match { + def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match + case (t: NamedArg) :: Nil => Tuple(t :: Nil) case t :: Nil => Parens(t) case _ => Tuple(ts) - } - def makeTuple(ts: List[Tree])(using Context): Tree = ts match { + def makeTuple(ts: List[Tree])(using Context): Tree = ts match + case (t: NamedArg) :: Nil => Tuple(t :: Nil) case t :: Nil => t case _ => Tuple(ts) - } def makeAndType(left: Tree, right: Tree)(using Context): AppliedTypeTree = AppliedTypeTree(ref(defn.andType.typeRef), left :: right :: Nil) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 4852eaba9334..1fe9cae936c9 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -33,6 +33,7 @@ object Feature: val pureFunctions = experimental("pureFunctions") val captureChecking = experimental("captureChecking") val into = experimental("into") + val namedTuples = experimental("namedTuples") def experimentalAutoEnableFeatures(using Context): List[TermName] = defn.languageExperimentalFeatures diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 675084ec230b..5868da397fc3 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -949,6 +949,9 @@ class Definitions { def TupleXXL_fromIterator(using Context): Symbol = TupleXXLModule.requiredMethod("fromIterator") def TupleXXL_unapplySeq(using Context): Symbol = TupleXXLModule.requiredMethod(nme.unapplySeq) + @tu lazy val NamedTupleModule = requiredModule("scala.NamedTuple") + @tu lazy val NamedTupleTypeRef: TypeRef = NamedTupleModule.termRef.select(tpnme.NamedTuple).asInstanceOf + @tu lazy val RuntimeTupleMirrorTypeRef: TypeRef = requiredClassRef("scala.runtime.TupleMirror") @tu lazy val RuntimeTuplesModule: Symbol = requiredModule("scala.runtime.Tuples") @@ -1304,6 +1307,14 @@ class Definitions { case ByNameFunction(_) => true case _ => false + object NamedTuple: + def apply(nmes: Type, vals: Type)(using Context): Type = + AppliedType(NamedTupleTypeRef, nmes :: vals :: Nil) + def unapply(t: Type)(using Context): Option[(Type, Type)] = t match + case AppliedType(tycon, nmes :: vals :: Nil) if tycon.typeSymbol == NamedTupleTypeRef.symbol => + Some((nmes, vals)) + case _ => None + final def isCompiletime_S(sym: Symbol)(using Context): Boolean = sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 9772199678d7..ebd246d20575 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -374,6 +374,7 @@ object StdNames { val MirroredMonoType: N = "MirroredMonoType" val MirroredType: N = "MirroredType" val Modifiers: N = "Modifiers" + val NamedTuple: N = "NamedTuple" val NestedAnnotArg: N = "NestedAnnotArg" val NoFlags: N = "NoFlags" val NoPrefix: N = "NoPrefix" @@ -649,6 +650,7 @@ object StdNames { val wildcardType: N = "wildcardType" val withFilter: N = "withFilter" val withFilterIfRefutable: N = "withFilterIfRefutable$" + val withNames: N = "withNames" val WorksheetWrapper: N = "WorksheetWrapper" val wrap: N = "wrap" val writeReplace: N = "writeReplace" diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index a3d6ab065a77..7ac0df05b268 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -4,7 +4,8 @@ package core import TypeErasure.ErasedValueType import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* -import Names.Name +import Names.{Name, TermName} +import Constants.Constant class TypeUtils { /** A decorator that provides methods on types @@ -65,8 +66,12 @@ class TypeUtils { case tp: AppliedType if defn.isTupleNType(tp) && normalize => Some(tp.args) // if normalize is set, use the dealiased tuple // otherwise rely on the default case below to print unaliased tuples. + case tp: SkolemType => + recur(tp.underlying, bound) case tp: SingletonType => - if tp.termSymbol == defn.EmptyTupleModule then Some(Nil) else None + if tp.termSymbol == defn.EmptyTupleModule then Some(Nil) + else if normalize then recur(tp.widen, bound) + else None case _ => if defn.isTupleClass(tp.typeSymbol) && !normalize then Some(tp.dealias.argInfos) else None @@ -114,6 +119,33 @@ class TypeUtils { case Some(types) => TypeOps.nestedPairs(types) case None => throw new AssertionError("not a tuple") + def namedTupleElementTypesUpTo(bound: Int, normalize: Boolean = true)(using Context): List[(TermName, Type)] = + (if normalize then self.normalized else self).dealias match + case defn.NamedTuple(nmes, vals) => + val names = nmes.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil).map: + case ConstantType(Constant(str: String)) => str.toTermName + val values = vals.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil) + names.zip(values) + case t => + Nil + + def namedTupleElementTypes(using Context): List[(TermName, Type)] = + namedTupleElementTypesUpTo(Int.MaxValue) + + def isNamedTupleType(using Context): Boolean = self match + case defn.NamedTuple(_, _) => true + case _ => false + + /** Drop all named elements in tuple type */ + def stripNamedTuple(using Context): Type = self.normalized.dealias match + case defn.NamedTuple(_, vals) => + vals + case self @ AnnotatedType(tp, annot) => + val tp1 = tp.stripNamedTuple + if tp1 ne tp then AnnotatedType(tp1, annot) else self + case _ => + self + def refinedWith(name: Name, info: Type)(using Context) = RefinedType(self, name, info) /** Is this type a methodic type that takes at least one parameter? */ diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 2f3daa79fb07..d24dd2882ad6 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -646,6 +646,14 @@ object Parsers { ts.toList else leading :: Nil + def maybeNamed(op: () => Tree): () => Tree = () => + if isIdent && in.lookahead.token == EQUALS && in.featureEnabled(Feature.namedTuples) then + atSpan(in.offset): + val name = ident() + in.nextToken() + NamedArg(name, op()) + else op() + def inSepRegion[T](f: Region => Region)(op: => T): T = val cur = in.currentRegion in.currentRegion = f(cur) @@ -1644,6 +1652,14 @@ object Parsers { && in.featureEnabled(Feature.into) && canStartTypeTokens.contains(in.lookahead.token) + def convertToElem(t: Tree): Tree = t match + case ByNameTypeTree(t1) => + syntaxError(ByNameParameterNotSupported(t), t.span) + t1 + case ValDef(name, tpt, _) => + NamedArg(name, convertToElem(tpt)).withSpan(t.span) + case _ => t + var isValParamList = false if in.token == LPAREN then in.nextToken() @@ -1703,7 +1719,8 @@ object Parsers { if isValParamList || in.isArrow || isPureArrow then functionRest(args) else - val tuple = atSpan(start)(makeTupleOrParens(args1)) + val tuple = atSpan(start): + makeTupleOrParens(args.mapConserve(convertToElem)) typeRest: infixTypeRest: refinedTypeRest: @@ -1979,6 +1996,7 @@ object Parsers { * | Singleton `.' id * | Singleton `.' type * | ‘(’ ArgTypes ‘)’ + * | ‘(’ NamesAndTypes ‘)’ * | Refinement * | TypeSplice -- deprecated syntax (since 3.0.0) * | SimpleType1 TypeArgs @@ -1987,7 +2005,7 @@ object Parsers { def simpleType1() = simpleTypeRest { if in.token == LPAREN then atSpan(in.offset) { - makeTupleOrParens(inParensWithCommas(argTypes(namedOK = false, wildOK = true))) + makeTupleOrParens(inParensWithCommas(argTypes(namedOK = false, wildOK = true, tupleOK = true))) } else if in.token == LBRACE then atSpan(in.offset) { RefinedTypeTree(EmptyTree, refinement(indentOK = false)) } @@ -2070,32 +2088,33 @@ object Parsers { /** ArgTypes ::= Type {`,' Type} * | NamedTypeArg {`,' NamedTypeArg} * NamedTypeArg ::= id `=' Type + * NamesAndTypes ::= NameAndType {‘,’ NameAndType} + * NameAndType ::= id ':' Type */ - def argTypes(namedOK: Boolean, wildOK: Boolean): List[Tree] = { - - def argType() = { + def argTypes(namedOK: Boolean, wildOK: Boolean, tupleOK: Boolean): List[Tree] = + def argType() = val t = typ() - if (wildOK) t else rejectWildcardType(t) - } + if wildOK then t else rejectWildcardType(t) - def namedTypeArg() = { - val name = ident() - accept(EQUALS) - NamedArg(name.toTypeName, argType()) - } + def namedArgType() = + atSpan(in.offset): + val name = ident() + accept(EQUALS) + NamedArg(name.toTypeName, argType()) - if (namedOK && in.token == IDENTIFIER) - in.currentRegion.withCommasExpected { - argType() match { - case Ident(name) if in.token == EQUALS => - in.nextToken() - commaSeparatedRest(NamedArg(name, argType()), () => namedTypeArg()) - case firstArg => - commaSeparatedRest(firstArg, () => argType()) - } - } - else commaSeparated(() => argType()) - } + def namedElem() = + atSpan(in.offset): + val name = ident() + acceptColon() + NamedArg(name, argType()) + + if namedOK && isIdent && in.lookahead.token == EQUALS then + commaSeparated(() => namedArgType()) + else if tupleOK && isIdent && in.lookahead.isColon && in.featureEnabled(Feature.namedTuples) then + commaSeparated(() => namedElem()) + else + commaSeparated(() => argType()) + end argTypes def paramTypeOf(core: () => Tree): Tree = if in.token == ARROW || isPureArrow(nme.PUREARROW) then @@ -2142,7 +2161,7 @@ object Parsers { * NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]' */ def typeArgs(namedOK: Boolean, wildOK: Boolean): List[Tree] = - inBracketsWithCommas(argTypes(namedOK, wildOK)) + inBracketsWithCommas(argTypes(namedOK, wildOK, tupleOK = false)) /** Refinement ::= `{' RefineStatSeq `}' */ @@ -2719,7 +2738,9 @@ object Parsers { } /** ExprsInParens ::= ExprInParens {`,' ExprInParens} + * | NamedExprInParens {‘,’ NamedExprInParens} * Bindings ::= Binding {`,' Binding} + * NamedExprInParens ::= id '=' ExprInParens */ def exprsInParensOrBindings(): List[Tree] = if in.token == RPAREN then Nil @@ -2729,7 +2750,7 @@ object Parsers { if isErasedKw then isFormalParams = true if isFormalParams then binding(Modifiers()) else - val t = exprInParens() + val t = maybeNamed(exprInParens)() if t.isInstanceOf[ValDef] then isFormalParams = true t commaSeparatedRest(exprOrBinding(), exprOrBinding) @@ -3083,7 +3104,7 @@ object Parsers { * | Literal * | Quoted * | XmlPattern - * | `(' [Patterns] `)' + * | `(' [Patterns | NamedPatterns] `)' * | SimplePattern1 [TypeArgs] [ArgumentPatterns] * | ‘given’ RefinedType * SimplePattern1 ::= SimpleRef @@ -3134,9 +3155,12 @@ object Parsers { p /** Patterns ::= Pattern [`,' Pattern] + * | NamedPattern {‘,’ NamedPattern} + * NamedPattern ::= id '=' Pattern */ def patterns(location: Location = Location.InPattern): List[Tree] = - commaSeparated(() => pattern(location)) + commaSeparated(maybeNamed(() => pattern(location))) + // check that patterns are all named or all unnamed is done at desugaring def patternsOpt(location: Location = Location.InPattern): List[Tree] = if (in.token == RPAREN) Nil else patterns(location) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 241bfb4f7c7b..87f7c88e0407 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -69,7 +69,8 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp.ref) case tp @ AppliedType(tycon, args) => if (defn.isCompiletimeAppliedType(tycon.typeSymbol)) tp.tryCompiletimeConstantFold - else tycon.dealias.appliedTo(args) + else if !tycon.typeSymbol.isOpaqueAlias then tycon.dealias.appliedTo(args) + else tp case tp: NamedType => tp.reduceProjection case _ => @@ -121,16 +122,17 @@ class PlainPrinter(_ctx: Context) extends Printer { } (keyword ~ refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close - protected def argText(arg: Type, isErased: Boolean = false): Text = keywordText("erased ").provided(isErased) ~ (homogenizeArg(arg) match { - case arg: TypeBounds => "?" ~ toText(arg) - case arg => toText(arg) - }) + protected def argText(arg: Type, isErased: Boolean = false): Text = + keywordText("erased ").provided(isErased) + ~ homogenizeArg(arg).match + case arg: TypeBounds => "?" ~ toText(arg) + case arg => toText(arg) /** Pretty-print comma-separated type arguments for a constructor to be inserted among parentheses or brackets * (hence with `GlobalPrec` precedence). */ protected def argsText(args: List[Type]): Text = - atPrec(GlobalPrec) { Text(args.map(arg => argText(arg) ), ", ") } + atPrec(GlobalPrec) { Text(args.map(argText(_)), ", ") } /** The longest sequence of refinement types, starting at given type * and following parents. diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index e84cbc7c50d5..2873325aecb6 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -205,6 +205,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextTuple(args: List[Type]): Text = "(" ~ argsText(args) ~ ")" + def toTextNamedTuple(elems: List[(TermName, Type)]): Text = + val elemsText = atPrec(GlobalPrec): + Text(elems.map((name, tp) => toText(name) ~ " : " ~ toText(tp)), ", ") + "(" ~ elemsText ~ ")" + def isInfixType(tp: Type): Boolean = tp match case AppliedType(tycon, args) => args.length == 2 @@ -239,8 +244,14 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def appliedText(tp: Type): Text = tp match case tp @ AppliedType(tycon, args) => - tp.tupleElementTypesUpTo(200, normalize = false) match - case Some(types) if types.size >= 2 && !printDebug => toTextTuple(types) + val namedElems = tp.namedTupleElementTypesUpTo(200, normalize = false) + if namedElems.nonEmpty then + toTextNamedTuple(namedElems) + else tp.tupleElementTypesUpTo(200, normalize = false) match + //case Some(types @ (defn.NamedTupleElem(_, _) :: _)) if !printDebug => + // toTextTuple(types) + case Some(types) if types.size >= 2 && !printDebug => + toTextTuple(types) case _ => val tsym = tycon.typeSymbol if tycon.isRepeatedParam then toTextLocal(args.head) ~ "*" @@ -490,7 +501,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { exprText ~ colon ~ toText(tpt) } case NamedArg(name, arg) => - toText(name) ~ " = " ~ toText(arg) + toText(name) ~ (if name.isTermName && arg.isType then " : " else " = ") ~ toText(arg) case Assign(lhs, rhs) => changePrec(GlobalPrec) { toTextLocal(lhs) ~ " = " ~ toText(rhs) } case block: Block => diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index bed29a122399..a7f987b8b2f3 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -112,8 +112,13 @@ object PatternMatcher { sanitize(tpe), coord = rhs.span) // TODO: Drop Case once we use everywhere else `isPatmatGenerated`. + private def dropNamedTuple(tree: Tree): Tree = + val tpe = tree.tpe.widen + if tpe.isNamedTupleType then tree.cast(tpe.stripNamedTuple) else tree + /** The plan `let x = rhs in body(x)` where `x` is a fresh variable */ - private def letAbstract(rhs: Tree, tpe: Type = NoType)(body: Symbol => Plan): Plan = { + private def letAbstract(rhs0: Tree, tpe: Type = NoType)(body: Symbol => Plan): Plan = { + val rhs = dropNamedTuple(rhs0) val declTpe = if tpe.exists then tpe else rhs.tpe val vble = newVar(rhs, EmptyFlags, declTpe) initializer(vble) = rhs @@ -334,6 +339,7 @@ object PatternMatcher { def unapplyPlan(unapp: Tree, args: List[Tree]): Plan = { def caseClass = unapp.symbol.owner.linkedClass lazy val caseAccessors = caseClass.caseAccessors + val unappType = unapp.tpe.widen.stripNamedTuple def isSyntheticScala2Unapply(sym: Symbol) = sym.is(Synthetic) && sym.owner.is(Scala2x) @@ -349,28 +355,26 @@ object PatternMatcher { !defn.isTupleNType(tree.tpe match { case tp: OrType => tp.join case tp => tp }) // widen even hard unions, to see if it's a union of tuples val components = if isGenericTuple then caseAccessors.indices.toList.map(tupleApp(_, ref(scrutinee))) else caseAccessors.map(tupleSel) matchArgsPlan(components, args, onSuccess) - else if (unapp.tpe <:< (defn.BooleanType)) + else if unappType.isRef(defn.BooleanClass) then TestPlan(GuardTest, unapp, unapp.span, onSuccess) else letAbstract(unapp) { unappResult => val isUnapplySeq = unapp.symbol.name == nme.unapplySeq - if (isProductMatch(unapp.tpe.widen, args.length) && !isUnapplySeq) { - val selectors = productSelectors(unapp.tpe).take(args.length) + if isProductMatch(unappType, args.length) && !isUnapplySeq then + val selectors = productSelectors(unappType).take(args.length) .map(ref(unappResult).select(_)) matchArgsPlan(selectors, args, onSuccess) } - else if (isUnapplySeq && unapplySeqTypeElemTp(unapp.tpe.widen.finalResultType).exists) { + else if isUnapplySeq && unapplySeqTypeElemTp(unappType.finalResultType).exists then unapplySeqPlan(unappResult, args) - } - else if (isUnapplySeq && isProductSeqMatch(unapp.tpe.widen, args.length, unapp.srcPos)) { - val arity = productArity(unapp.tpe.widen, unapp.srcPos) + else if isUnapplySeq && isProductSeqMatch(unappType, args.length, unapp.srcPos) then + val arity = productArity(unappType, unapp.srcPos) unapplyProductSeqPlan(unappResult, args, arity) - } else if unappResult.info <:< defn.NonEmptyTupleTypeRef then val components = (0 until foldApplyTupleType(unappResult.denot.info).length).toList.map(tupleApp(_, ref(unappResult))) matchArgsPlan(components, args, onSuccess) else { - assert(isGetMatch(unapp.tpe)) + assert(isGetMatch(unappType)) val argsPlan = { val get = ref(unappResult).select(nme.get, _.info.isParameterless) val arity = productArity(get.tpe, unapp.srcPos) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index d91c4592a77b..082c8bf3d1db 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1343,9 +1343,10 @@ trait Applications extends Compatibility { case _ => false case _ => false - def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = { + def typedUnApply(tree: untpd.Apply, selType0: Type)(using Context): Tree = { record("typedUnApply") - val Apply(qual, args) = tree + val Apply(qual, unadaptedArgs) = tree + val selType = selType0.stripNamedTuple def notAnExtractor(tree: Tree): Tree = // prefer inner errors @@ -1562,7 +1563,10 @@ trait Applications extends Compatibility { for (argType <- argTypes) assert(!isBounds(argType), unapplyApp.tpe.show) val bunchedArgs = argTypes match { case argType :: Nil => - if (args.lengthCompare(1) > 0 && Feature.autoTuplingEnabled && defn.isTupleNType(argType)) untpd.Tuple(args) :: Nil + if args.lengthCompare(1) > 0 + && Feature.autoTuplingEnabled + && defn.isTupleNType(argType) + then untpd.Tuple(args) :: Nil else args case _ => args } @@ -1578,7 +1582,7 @@ trait Applications extends Compatibility { else tryWithTypeTest(Typed(result, TypeTree(ownType)), selType) case tp => val unapplyErr = if (tp.isError) unapplyFn else notAnExtractor(unapplyFn) - val typedArgsErr = args mapconserve (typed(_, defn.AnyType)) + val typedArgsErr = unadaptedArgs.mapconserve(typed(_, defn.AnyType)) cpy.UnApply(tree)(unapplyErr, Nil, typedArgsErr) withType unapplyErr.tpe } } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 68a14603cb7a..a510a6eaa578 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -720,56 +720,65 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, // but that is done only after we search for extension methods or conversions. typedSelect(tree, pt, qual) - else if qual.tpe.isSmallGenericTuple then - val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) - typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) else - val tree1 = { - if selName.isTypeName then EmptyTree - else tryExtensionOrConversion( - tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) - }.orElse { - if ctx.gadt.isNarrowing then - // try GADT approximation if we're trying to select a member - // Member lookup cannot take GADTs into account b/c of cache, so we - // approximate types based on GADT constraints instead. For an example, - // see MemberHealing in gadt-approximation-interaction.scala. - val wtp = qual.tpe.widen - gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") - val gadtApprox = Inferencing.approximateGADT(wtp) - gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") - val qual1 = qual.cast(gadtApprox) - val tree1 = cpy.Select(tree0)(qual1, selName) - val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) - if checkedType1.exists then - gadts.println(i"Member selection healed by GADT approximation") - finish(tree1, qual1, checkedType1) - else if qual1.tpe.isSmallGenericTuple then - gadts.println(i"Tuple member selection healed by GADT approximation") - typedSelect(tree, pt, qual1) - else - tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) - else EmptyTree - } - if !tree1.isEmpty then - tree1 - else if canDefineFurther(qual.tpe.widen) then - typedSelect(tree, pt, qual) - else if qual.tpe.derivesFrom(defn.DynamicClass) - && selName.isTermName && !isDynamicExpansion(tree) - then - val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) - if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then - assignType(tree2, TryDynamicCallType) - else - typedDynamicSelect(tree2, Nil, pt) + val namedTupleElems = qual.tpe.widen.namedTupleElementTypes + val nameIdx = namedTupleElems.indexWhere(_._1 == selName) + if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then + typed( + untpd.Apply( + untpd.Select(untpd.TypedSplice(qual), nme.apply), + untpd.Literal(Constant(nameIdx))), + pt) + else if qual.tpe.isSmallGenericTuple then + val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) + typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) else - assignType(tree, - rawType match - case rawType: NamedType => - inaccessibleErrorType(rawType, superAccess, tree.srcPos) - case _ => - notAMemberErrorType(tree, qual, pt)) + val tree1 = + if selName.isTypeName then EmptyTree + else tryExtensionOrConversion( + tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) + .orElse { + if ctx.gadt.isNarrowing then + // try GADT approximation if we're trying to select a member + // Member lookup cannot take GADTs into account b/c of cache, so we + // approximate types based on GADT constraints instead. For an example, + // see MemberHealing in gadt-approximation-interaction.scala. + val wtp = qual.tpe.widen + gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") + val gadtApprox = Inferencing.approximateGADT(wtp) + gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") + val qual1 = qual.cast(gadtApprox) + val tree1 = cpy.Select(tree0)(qual1, selName) + val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) + if checkedType1.exists then + gadts.println(i"Member selection healed by GADT approximation") + finish(tree1, qual1, checkedType1) + else if qual1.tpe.isSmallGenericTuple then + gadts.println(i"Tuple member selection healed by GADT approximation") + typedSelect(tree, pt, qual1) + else + tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) + else EmptyTree + } + if !tree1.isEmpty then + tree1 + else if canDefineFurther(qual.tpe.widen) then + typedSelect(tree, pt, qual) + else if qual.tpe.derivesFrom(defn.DynamicClass) + && selName.isTermName && !isDynamicExpansion(tree) + then + val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) + if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then + assignType(tree2, TryDynamicCallType) + else + typedDynamicSelect(tree2, Nil, pt) + else + assignType(tree, + rawType match + case rawType: NamedType => + inaccessibleErrorType(rawType, superAccess, tree.srcPos) + case _ => + notAMemberErrorType(tree, qual, pt)) end typedSelect def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { @@ -2450,7 +2459,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer body1.isInstanceOf[RefTree] && !isWildcardArg(body1) || body1.isInstanceOf[Literal] val symTp = - if isStableIdentifierOrLiteral then pt + if isStableIdentifierOrLiteral || pt.isNamedTupleType then pt else if isWildcardStarArg(body1) || pt == defn.ImplicitScrutineeTypeRef || body1.tpe <:< pt // There is some strange interaction with gadt matching. @@ -3050,37 +3059,32 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } /** Translate tuples of all arities */ - def typedTuple(tree: untpd.Tuple, pt: Type)(using Context): Tree = { - val arity = tree.trees.length - if (arity <= Definitions.MaxTupleArity) - typed(desugar.smallTuple(tree).withSpan(tree.span), pt) - else { - val pts = - pt.tupleElementTypes match - case Some(types) if types.size == arity => types - case _ => List.fill(arity)(defn.AnyType) - val elems = tree.trees.lazyZip(pts).map( + def typedTuple(tree: untpd.Tuple, pt: Type)(using Context): Tree = + val tree1 = desugar.tuple(tree, pt) + if tree1 ne tree then typed(tree1, pt) + else + val arity = tree.trees.length + val pts = pt.stripNamedTuple.tupleElementTypes match + case Some(types) if types.size == arity => types + case _ => List.fill(arity)(defn.AnyType) + val elems = tree.trees.lazyZip(pts).map: if ctx.mode.is(Mode.Type) then typedType(_, _, mapPatternBounds = true) - else typed(_, _)) - if (ctx.mode.is(Mode.Type)) + else typed(_, _) + if ctx.mode.is(Mode.Type) then elems.foldRight(TypeTree(defn.EmptyTupleModule.termRef): Tree)((elemTpt, elemTpts) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), List(elemTpt, elemTpts))) .withSpan(tree.span) - else { + else val tupleXXLobj = untpd.ref(defn.TupleXXLModule.termRef) val app = untpd.cpy.Apply(tree)(tupleXXLobj, elems.map(untpd.TypedSplice(_))) .withSpan(tree.span) val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) - if (ctx.mode.is(Mode.Pattern)) app1 - else { + if ctx.mode.is(Mode.Pattern) then app1 + else val elemTpes = elems.lazyZip(pts).map((elem, pt) => TypeComparer.widenInferred(elem.tpe, pt, widenUnions = true)) val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) - } - } - } - } /** Retrieve symbol attached to given tree */ protected def retrieveSym(tree: untpd.Tree)(using Context): Symbol = tree.removeAttachment(SymOfTree) match { diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 638455e7f2de..94e510e04396 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -66,6 +66,8 @@ tuple-fold.scala mt-redux-norm.perspective.scala i18211.scala 10867.scala +named-tuples1.scala +named-tuples-strawman-2.scala # Opaque type i5720.scala diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 4207a13ea66d..8cc070d5dbc5 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -198,7 +198,7 @@ SimpleType ::= SimpleLiteral SimpleType1 ::= id Ident(name) | Singleton ‘.’ id Select(t, name) | Singleton ‘.’ ‘type’ SingletonTypeTree(p) - | ‘(’ Types ‘)’ Tuple(ts) + | ‘(’ [Types | NamesAndTypes] ‘)’ Tuple(ts) | Refinement RefinedTypeTree(EmptyTree, refinement) | TypeSplice -- deprecated syntax | SimpleType1 TypeArgs AppliedTypeTree(t, args) @@ -222,6 +222,8 @@ Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) Types ::= Type {‘,’ Type} +NamesAndTypes ::= NameAndType {‘,’ NameAndType} +NameAndType ::= id ':' Type ``` ### Expressions @@ -290,8 +292,10 @@ TypeSplice ::= spliceId | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted type pattern -- deprecated syntax | ‘$’ ‘{’ Pattern ‘}’ -- when inside quoted type pattern -- deprecated syntax ExprsInParens ::= ExprInParens {‘,’ ExprInParens} + | NamedExprInParens {‘,’ NamedExprInParens} ExprInParens ::= PostfixExpr ‘:’ Type -- normal Expr allows only RefinedType here | Expr +NamedExprInParens ::= id '=' ExprInParens ParArgumentExprs ::= ‘(’ [ExprsInParens] ‘)’ exprs | ‘(’ ‘using’ ExprsInParens ‘)’ | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ exprs :+ Typed(expr, Ident(wildcardStar)) @@ -343,6 +347,9 @@ SimplePattern1 ::= SimpleRef PatVar ::= varid | ‘_’ Patterns ::= Pattern {‘,’ Pattern} + | NamedPattern {‘,’ NamedPattern} +NamedPattern ::= id '=' Pattern + ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ Apply(fn, pats) | ‘(’ [Patterns ‘,’] PatVar ‘*’ ‘)’ ``` diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index f8e7ba6a5cbc..ae541b65d8c4 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -198,7 +198,7 @@ SimpleType ::= SimpleLiteral | id | Singleton ‘.’ id | Singleton ‘.’ ‘type’ - | ‘(’ Types ‘)’ + | ‘(’ [Types] ‘)’ | Refinement | SimpleType1 TypeArgs | SimpleType1 ‘#’ id @@ -263,7 +263,7 @@ SimpleExpr ::= SimpleRef | quoteId -- only inside splices | ‘new’ ConstrApp {‘with’ ConstrApp} [TemplateBody] | ‘new’ TemplateBody - | ‘(’ ExprsInParens ‘)’ + | ‘(’ [ExprsInParens] ‘)’ | SimpleExpr ‘.’ id | SimpleExpr ‘.’ MatchClause | SimpleExpr TypeArgs @@ -279,8 +279,7 @@ ExprSplice ::= spliceId | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern | ‘$’ ‘{’ Pattern ‘}’ -- when inside quoted pattern ExprsInParens ::= ExprInParens {‘,’ ExprInParens} -ExprInParens ::= PostfixExpr ‘:’ Type - | Expr +ExprInParens ::= PostfixExpr ‘:’ Type | Expr ParArgumentExprs ::= ‘(’ [ExprsInParens] ‘)’ | ‘(’ ‘using’ ExprsInParens ‘)’ | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ @@ -331,6 +330,7 @@ SimplePattern1 ::= SimpleRef PatVar ::= varid | ‘_’ Patterns ::= Pattern {‘,’ Pattern} + ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ | ‘(’ [Patterns ‘,’] PatVar ‘*’ ‘)’ ``` diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 5d72f15838cd..b38e057f06b1 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -154,6 +154,7 @@ subsection: - page: reference/experimental/cc.md - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md + - page: reference/experimental/named-tuples.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index 9970adfccce7..c65a760ee22c 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -17,93 +17,165 @@ object NamedTuple: extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) - inline def values: V = x + /** The underlying tuple without the names */ + inline def toTuple: V = x - inline def size: Tuple.Size[V] = values.size + /** The number of elements in this tuple */ + inline def size: Tuple.Size[V] = toTuple.size // This intentionally works for empty named tuples as well. I think NnEmptyTuple is a dead end // and should be reverted, justy like NonEmptyList is also appealing at first, but a bad idea // in the end. + + /** The value (without the name) at index `n` of this tuple */ inline def apply(n: Int): Tuple.Elem[V, n.type] = - inline values match + inline toTuple match case tup: NonEmptyTuple => tup(n).asInstanceOf[Tuple.Elem[V, n.type]] case tup => tup.productElement(n).asInstanceOf[Tuple.Elem[V, n.type]] + /** The first element value of this tuple */ inline def head: Tuple.Elem[V, 0] = apply(0) - inline def tail: Tuple.Drop[V, 1] = values.drop(1) + /** The tuple consisting of all elements of this tuple except the first one */ + inline def tail: Tuple.Drop[V, 1] = toTuple.drop(1) + + /** The last element value of this tuple */ inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] - inline def init: Tuple.Init[V] = values.take(size - 1).asInstanceOf[Tuple.Init[V]] + /** The tuple consisting of all elements of this tuple except the last one */ + inline def init: Tuple.Init[V] = toTuple.take(size - 1).asInstanceOf[Tuple.Init[V]] + + /** The tuple consisting of the first `n` elements of this tuple, or all + * elements if `n` exceeds `size`. + */ inline def take(n: Int): NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]] = - values.take(n) + toTuple.take(n) + /** The tuple consisting of all elements of this tuple except the first `n` ones, + * or no elements if `n` exceeds `size`. + */ inline def drop(n: Int): NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]] = - values.drop(n) + toTuple.drop(n) + /** The tuple `(x.take(n), x.drop(n))` */ inline def splitAt(n: Int): NamedTuple[Tuple.Split[N, n.type], Tuple.Split[V, n.type]] = - values.splitAt(n) + toTuple.splitAt(n) + /** The tuple consisting of all elements of this tuple followed by all elements + * of tuple `that`. The names of the two tuples must be disjoint. + */ inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) : NamedTuple[Tuple.Concat[N, N2], Tuple.Concat[V, V2]] - = values ++ that.values + = toTuple ++ that.toTuple // inline def :* [L] (x: L): NamedTuple[Append[N, ???], Append[V, L] = ??? // inline def *: [H] (x: H): NamedTuple[??? *: N], H *: V] = ??? + /** The named tuple consisting of all element values of this tuple mapped by + * the polymorphic mapping function `f`. The names of elements are preserved. + * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. + */ inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = - values.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] + toTuple.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] + /** The named tuple consisting of all elements of this tuple in reverse */ inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = - values.reverse - + toTuple.reverse + + /** The named tuple consisting of all elements values of this tuple zipped + * with corresponding element values in named tuple `that`. + * If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The names of `x` and `that` at the same index must be the same. + * The result tuple keeps the same names as the operand tuples. + */ inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): NamedTuple[N, Tuple.Zip[V, V2]] = - values.zip(that.values) + toTuple.zip(that.toTuple) + + /** A list consisting of all element values */ + inline def toList: List[Tuple.Union[V]] = toTuple.toList.asInstanceOf[List[Tuple.Union[V]]] - inline def toList: List[Tuple.Union[V]] = values.toList.asInstanceOf[List[Tuple.Union[V]]] - inline def toArray: Array[Object] = values.toArray - inline def toIArray: IArray[Object] = values.toIArray + /** An array consisting of all element values */ + inline def toArray: Array[Object] = toTuple.toArray + + /** An immutable array consisting of all element values */ + inline def toIArray: IArray[Object] = toTuple.toIArray end extension - /** The names of the named tuple type `NT` */ - type Names[NT <: AnyNamedTuple] <: Tuple = NT match + /** The names of a named tuple, represented as a tuple of literal string values. */ + type Names[X <: AnyNamedTuple] <: Tuple = X match case NamedTuple[n, _] => n - /** The value types of the named tuple type `NT` */ + /** The value types of a named tuple represented as a regular tuple. */ type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match case NamedTuple[_, x] => x + /** The size of a named tuple, represented as a literal constant subtype of Int */ type Size[X <: AnyNamedTuple] = Tuple.Size[DropNames[X]] + /** The type of the element value at position N in the named tuple X */ type Elem[X <: AnyNamedTuple, N <: Int] = Tuple.Elem[DropNames[X], N] + /** The type of the first element value of a named tuple */ type Head[X <: AnyNamedTuple] = Elem[X, 0] + /** The type of the last element value of a named tuple */ type Last[X <: AnyNamedTuple] = Tuple.Last[DropNames[X]] + /** The type of a named tuple consisting of all elements of named tuple X except the first one */ + type Tail[X <: AnyNamedTuple] = Drop[X, 1] + + /** The type of the initial part of a named tuple without its last element */ type Init[X <: AnyNamedTuple] = NamedTuple[Tuple.Init[Names[X]], Tuple.Init[DropNames[X]]] - type Tail[X <: AnyNamedTuple] = Drop[X, 1] - + /** The type of the named tuple consisting of the first `N` elements of `X`, + * or all elements if `N` exceeds `Size[X]`. + */ type Take[X <: AnyNamedTuple, N <: Int] = NamedTuple[Tuple.Take[Names[X], N], Tuple.Take[DropNames[X], N]] + /** The type of the named tuple consisting of all elements of `X` except the first `N` ones, + * or no elements if `N` exceeds `Size[X]`. + */ type Drop[X <: AnyNamedTuple, N <: Int] = NamedTuple[Tuple.Drop[Names[X], N], Tuple.Drop[DropNames[X], N]] + /** The pair type `(Take(X, N), Drop[X, N]). */ type Split[X <: AnyNamedTuple, N <: Int] = (Take[X, N], Drop[X, N]) + /** Type of the concatenation of two tuples `X` and `Y` */ type Concat[X <: AnyNamedTuple, Y <: AnyNamedTuple] = NamedTuple[Tuple.Concat[Names[X], Names[Y]], Tuple.Concat[DropNames[X], DropNames[Y]]] + /** The type of the named tuple `X` mapped with the type-level function `F`. + * If `X = (n1 : T1, ..., ni : Ti)` then `Map[X, F] = `(n1 : F[T1], ..., ni : F[Ti])`. + */ type Map[X <: AnyNamedTuple, F[_ <: Tuple.Union[DropNames[X]]]] = NamedTuple[Names[X], Tuple.Map[DropNames[X], F]] + /** A named tuple with the elements of tuple `X` in reversed order */ type Reverse[X <: AnyNamedTuple] = NamedTuple[Tuple.Reverse[Names[X]], Tuple.Reverse[DropNames[X]]] + /** The type of the named tuple consisting of all element values of + * named tuple `X` zipped with corresponding element values of + * named tuple `Y`. If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The names of `X` and `Y` at the same index must be the same. + * The result tuple keeps the same names as the operand tuples. + * For example, if + * ``` + * X = (n1 : S1, ..., ni : Si) + * Y = (n1 : T1, ..., nj : Tj) where j >= i + * ``` + * then + * ``` + * Zip[X, Y] = (n1 : (S1, T1), ..., ni: (Si, Ti)) + * ``` + * @syntax markdown + */ type Zip[X <: AnyNamedTuple, Y <: AnyNamedTuple] = Tuple.Conforms[Names[X], Names[Y]] match case true => diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index ea369539d021..e38e016f5182 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -9,7 +9,7 @@ import scala.annotation.* */ object LazyVals { @nowarn - private[this] val unsafe: sun.misc.Unsafe = { + private val unsafe: sun.misc.Unsafe = { def throwInitializationException() = throw new ExceptionInInitializerError( new IllegalStateException("Can't find instance of sun.misc.Unsafe") diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 372e1e34bb85..b2bd4b791423 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -91,6 +91,13 @@ object language: @compileTimeOnly("`into` can only be used at compile time in import statements") object into + /** Experimental support for named tuples. + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] + */ + @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") + object namedTuples + /** Was needed to add support for relaxed imports of extension methods. * The language import is no longer needed as this is now a standard feature since SIP was accepted. * @see [[http://dotty.epfl.ch/docs/reference/contextual/extension-methods]] diff --git a/tests/neg/depfuns.scala b/tests/neg/depfuns.scala index ac96915a78b5..989aa72be820 100644 --- a/tests/neg/depfuns.scala +++ b/tests/neg/depfuns.scala @@ -1,5 +1,7 @@ +import language.experimental.erasedDefinitions + object Test { - type T = (x: Int) + type T = (erased x: Int) } // error: `=>' expected diff --git a/tests/neg/i7247.scala b/tests/neg/i7247.scala index 9172f90fad07..3514f20c47fe 100644 --- a/tests/neg/i7247.scala +++ b/tests/neg/i7247.scala @@ -1,2 +1,2 @@ val x = "foo" match - case _: (a *: (b: Any)) => ??? // error \ No newline at end of file + case _: (a *: (b: Any)) => ??? // error, now OK since (b: Any) is a named tuple \ No newline at end of file diff --git a/tests/neg/i7751.scala b/tests/neg/i7751.scala index 978ed860574f..fd66e7d451be 100644 --- a/tests/neg/i7751.scala +++ b/tests/neg/i7751.scala @@ -1,3 +1,3 @@ import language.`3.3` -val a = Some(a=a,)=> // error // error +val a = Some(a=a,)=> // error // error // error // error val a = Some(x=y,)=> diff --git a/tests/neg/named-tuples-2.check b/tests/neg/named-tuples-2.check new file mode 100644 index 000000000000..0a52d5f3989b --- /dev/null +++ b/tests/neg/named-tuples-2.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/named-tuples-2.scala:5:9 --------------------------------------------------------------------------- +5 | case (name, age) => () // error + | ^ + | this case is unreachable since type (String, Int, Boolean) is not a subclass of class Tuple2 +-- Error: tests/neg/named-tuples-2.scala:6:9 --------------------------------------------------------------------------- +6 | case (n, a, m, x) => () // error + | ^ + | this case is unreachable since type (String, Int, Boolean) is not a subclass of class Tuple4 diff --git a/tests/neg/named-tuples-2.scala b/tests/neg/named-tuples-2.scala new file mode 100644 index 000000000000..0507891e0549 --- /dev/null +++ b/tests/neg/named-tuples-2.scala @@ -0,0 +1,6 @@ +import language.experimental.namedTuples +def Test = + val person = (name = "Bob", age = 33, married = true) + person match + case (name, age) => () // error + case (n, a, m, x) => () // error diff --git a/tests/neg/named-tuples.check b/tests/neg/named-tuples.check new file mode 100644 index 000000000000..485858fb18a0 --- /dev/null +++ b/tests/neg/named-tuples.check @@ -0,0 +1,105 @@ +-- Error: tests/neg/named-tuples.scala:9:19 ---------------------------------------------------------------------------- +9 | val illformed = (_2 = 2) // error + | ^^^^^^ + | _2 cannot be used as the name of a tuple element because it is a regular tuple selector +-- Error: tests/neg/named-tuples.scala:10:20 --------------------------------------------------------------------------- +10 | type Illformed = (_1: Int) // error + | ^^^^^^^ + | _1 cannot be used as the name of a tuple element because it is a regular tuple selector +-- Error: tests/neg/named-tuples.scala:11:40 --------------------------------------------------------------------------- +11 | val illformed2 = (name = "", age = 0, name = true) // error + | ^^^^^^^^^^^ + | Duplicate tuple element name +-- Error: tests/neg/named-tuples.scala:12:45 --------------------------------------------------------------------------- +12 | type Illformed2 = (name: String, age: Int, name: Boolean) // error + | ^^^^^^^^^^^^^ + | Duplicate tuple element name +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:18:25 ------------------------------------------------------ +18 | val y: (String, Int) = person // error + | ^^^^^^ + | Found: (Test.person : (name : String, age : Int)) + | Required: (String, Int) + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:19:20 ------------------------------------------------------ +19 | val _: NameOnly = person // error + | ^^^^^^ + | Found: (Test.person : (name : String, age : Int)) + | Required: Test.NameOnly + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:20:18 ------------------------------------------------------ +20 | val _: Person = nameOnly // error + | ^^^^^^^^ + | Found: (Test.nameOnly : (name : String)) + | Required: Test.Person + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:22:36 ------------------------------------------------------ +22 | val _: (age: Int, name: String) = person // error + | ^^^^^^ + | Found: (Test.person : (name : String, age : Int)) + | Required: (age : Int, name : String) + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/named-tuples.scala:24:17 --------------------------------------------------------------------------- +24 | val (name = x, agee = y) = person // error + | ^^^^^^^^ + | No element named `agee` is defined in selector type (name : String, age : Int) +-- Error: tests/neg/named-tuples.scala:27:10 --------------------------------------------------------------------------- +27 | case (name = n, age = a) => () // error // error + | ^^^^^^^^ + | No element named `name` is defined in selector type (String, Int) +-- Error: tests/neg/named-tuples.scala:27:20 --------------------------------------------------------------------------- +27 | case (name = n, age = a) => () // error // error + | ^^^^^^^ + | No element named `age` is defined in selector type (String, Int) +-- [E172] Type Error: tests/neg/named-tuples.scala:29:27 --------------------------------------------------------------- +29 | val pp = person ++ (1, 2) // error + | ^ + | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). +-- [E172] Type Error: tests/neg/named-tuples.scala:32:18 --------------------------------------------------------------- +32 | person ++ (1, 2) match // error + | ^ + | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). +-- Error: tests/neg/named-tuples.scala:35:17 --------------------------------------------------------------------------- +35 | val bad = ("", age = 10) // error + | ^^^^^^^^ + | Illegal combination of named and unnamed tuple elements +-- Error: tests/neg/named-tuples.scala:38:20 --------------------------------------------------------------------------- +38 | case (name = n, age) => () // error + | ^^^ + | Illegal combination of named and unnamed tuple elements +-- Error: tests/neg/named-tuples.scala:39:16 --------------------------------------------------------------------------- +39 | case (name, age = a) => () // error + | ^^^^^^^ + | Illegal combination of named and unnamed tuple elements +-- Error: tests/neg/named-tuples.scala:42:10 --------------------------------------------------------------------------- +42 | case (age = x) => // error + | ^^^^^^^ + | No element named `age` is defined in selector type Tuple +-- [E172] Type Error: tests/neg/named-tuples.scala:44:27 --------------------------------------------------------------- +44 | val p2 = person ++ person // error + | ^ + |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("name" : String), ("age" : String))] =:= (true : Boolean). +-- [E172] Type Error: tests/neg/named-tuples.scala:45:43 --------------------------------------------------------------- +45 | val p3 = person ++ (first = 11, age = 33) // error + | ^ + |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("first" : String), ("age" : String))] =:= (true : Boolean). +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:47:22 ------------------------------------------------------ +47 | val p5 = person.zip(first = 11, age = 33) // error + | ^^^^^^^^^^^^^^^^^^^^ + | Found: (first : Int, age : Int) + | Required: NamedTuple.NamedTuple[(("name" : String), ("age" : String)), V2] + | + | where: V2 is a type variable with constraint <: Tuple + | + | longer explanation available when compiling with `-explain` +-- Warning: tests/neg/named-tuples.scala:24:29 ------------------------------------------------------------------------- +24 | val (name = x, agee = y) = person // error + | ^^^^^^ + |pattern's type (String, Int) is more specialized than the right hand side expression's type (name : String, age : Int) + | + |If the narrowing is intentional, this can be communicated by adding `: @unchecked` after the expression, + |which may result in a MatchError at runtime. + |This patch can be rewritten automatically under -rewrite -source 3.2-migration. diff --git a/tests/neg/named-tuples.scala b/tests/neg/named-tuples.scala new file mode 100644 index 000000000000..5d1f3777dd73 --- /dev/null +++ b/tests/neg/named-tuples.scala @@ -0,0 +1,51 @@ +import annotation.experimental +import language.experimental.namedTuples + +@experimental object Test: + + type Person = (name: String, age: Int) + val person = (name = "Bob", age = 33): (name: String, age: Int) + + val illformed = (_2 = 2) // error + type Illformed = (_1: Int) // error + val illformed2 = (name = "", age = 0, name = true) // error + type Illformed2 = (name: String, age: Int, name: Boolean) // error + + type NameOnly = (name: String) + + val nameOnly = (name = "Louis") + + val y: (String, Int) = person // error + val _: NameOnly = person // error + val _: Person = nameOnly // error + + val _: (age: Int, name: String) = person // error + + val (name = x, agee = y) = person // error + + ("Ives", 2) match + case (name = n, age = a) => () // error // error + + val pp = person ++ (1, 2) // error + val qq = ("a", true) ++ (1, 2) + + person ++ (1, 2) match // error + case _ => + + val bad = ("", age = 10) // error + + person match + case (name = n, age) => () // error + case (name, age = a) => () // error + + (??? : Tuple) match + case (age = x) => // error + + val p2 = person ++ person // error + val p3 = person ++ (first = 11, age = 33) // error + val p4 = person.zip(person) // ok + val p5 = person.zip(first = 11, age = 33) // error + + + + diff --git a/tests/neg/namedTypeParams.check b/tests/neg/namedTypeParams.check index 3f6f9f7913e8..5e0672f20f25 100644 --- a/tests/neg/namedTypeParams.check +++ b/tests/neg/namedTypeParams.check @@ -24,16 +24,16 @@ 19 | f[X = Int, String](1, "") // error // error | ^ | '=' expected, but ']' found --- Error: tests/neg/namedTypeParams.scala:6:8 -------------------------------------------------------------------------- +-- Error: tests/neg/namedTypeParams.scala:6:4 -------------------------------------------------------------------------- 6 | f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental - | ^^^ - | Named type arguments are experimental, - | they must be enabled with a `experimental.namedTypeArguments` language import or setting --- Error: tests/neg/namedTypeParams.scala:6:17 ------------------------------------------------------------------------- + | ^^^^^^^ + | Named type arguments are experimental, + | they must be enabled with a `experimental.namedTypeArguments` language import or setting +-- Error: tests/neg/namedTypeParams.scala:6:13 ------------------------------------------------------------------------- 6 | f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental - | ^^^ - | Named type arguments are experimental, - | they must be enabled with a `experimental.namedTypeArguments` language import or setting + | ^^^^^^^ + | Named type arguments are experimental, + | they must be enabled with a `experimental.namedTypeArguments` language import or setting -- [E006] Not Found Error: tests/neg/namedTypeParams.scala:11:11 ------------------------------------------------------- 11 | val x: C[T = Int] = // error: ']' expected, but `=` found // error | ^ diff --git a/tests/new/test.scala b/tests/new/test.scala index e6bfc29fd808..16a823547553 100644 --- a/tests/new/test.scala +++ b/tests/new/test.scala @@ -1,2 +1,9 @@ -object Test: - def f: Any = 1 +import language.experimental.namedTuples + +type Person = (name: String, age: Int) + +def test = + val bob = (name = "Bob", age = 33): (name: String, age: Int) + + val silly = bob match + case (name = n, age = a) => n.length + a diff --git a/tests/pos/named-tuples-strawman-2.scala b/tests/pos/named-tuples-strawman-2.scala new file mode 100644 index 000000000000..7c18d063bcf4 --- /dev/null +++ b/tests/pos/named-tuples-strawman-2.scala @@ -0,0 +1,185 @@ +import compiletime.* +import compiletime.ops.int.* +import compiletime.ops.boolean.! +import Tuple.* + +object TupleOps: + + /** The `X` tuple, with its element at index `N` replaced by `Y`. + * If `N` is equal to `Size[X]`, the element `Y` is appended instead + */ + type UpdateOrAppend[X <: Tuple, N <: Int, Y] <: Tuple = X match + case x *: xs => + N match + case 0 => Y *: xs + case S[n1] => x *: UpdateOrAppend[xs, n1, Y] + case EmptyTuple => + N match + case 0 => Y *: EmptyTuple + + inline def updateOrAppend[X <: Tuple, N <: Int, Y](xs: X, y: Y): UpdateOrAppend[X, N, Y] = + locally: + val n = constValue[N] + val size = xs.size + require(0 <= n && n <= xs.size, s"Index $n out of range 0..$size") + if n == size then xs :* y + else + val elems = xs.toArray + elems(n) = y.asInstanceOf[Object] + fromArray(elems) + .asInstanceOf[UpdateOrAppend[X, N, Y]] + + extension [X <: Tuple](inline xs: X) + // Note: Y must be inferred precisely, or given explicitly. This means even though `updateOrAppend` + // is clearly useful, we cannot yet move it to tuple since it is still too awkward to use. + // Once we have precise inference, we could replace `Y <: Singleton` with `Y: Precise` + // and then it should work beautifully. + inline def updateOrAppend[N <: Int & Singleton, Y <: Singleton](inline n: N, inline y: Y): UpdateOrAppend[X, N, Y] = + locally: + val size = xs.size + require(0 <= n && n <= size, s"Index $n out of range 0..$size") + if n == size then xs :* y + else + val elems = xs.toArray + elems(n) = y.asInstanceOf[Object] + fromArray(elems) + .asInstanceOf[UpdateOrAppend[X, N, Y]] + + /** If `Y` does not occur in tuple `X`, `X` with `Y` appended. Otherwise `X`. */ + type AppendIfDistinct[X <: Tuple, Y] <: Tuple = X match + case Y *: xs => X + case x *: xs => x *: AppendIfDistinct[xs, Y] + case EmptyTuple => Y *: EmptyTuple + + inline def appendIfDistinct[X <: Tuple, Y](xs: X, y: Y): AppendIfDistinct[X, Y] = + (if xs.containsType[Y] then xs else xs :* y).asInstanceOf[AppendIfDistinct[X, Y]] + + /** `X` with all elements from `Y` that do not occur in `X` appended */ + type ConcatDistinct[X <: Tuple, Y <: Tuple] <: Tuple = Y match + case y *: ys => ConcatDistinct[AppendIfDistinct[X, y], ys] + case EmptyTuple => X + + inline def concatDistinct[X <: Tuple, Y <: Tuple](xs: X, ys: Y): ConcatDistinct[X, Y] = + (xs ++ ys.filter[Y, [Elem] =>> ![Contains[X, Elem]]]).asInstanceOf[ConcatDistinct[X, Y]] + +object NamedTupleOps: + import TupleOps.* + + opaque type AnyNamedTuple = Any + + opaque type NamedTuple[N <: Tuple, +X <: Tuple] >: X <: AnyNamedTuple = X + + object NamedTuple: + def apply[N <: Tuple, X <: Tuple](x: X): NamedTuple[N, X] = x + + /** The names of the named tuple type `NT` */ + type Names[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[n, _] => n + + /** The value types of the named tuple type `NT` */ + type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[_, x] => x + + extension [NT <: AnyNamedTuple](x: NT) + inline def toTuple: DropNames[NT] = x.asInstanceOf + inline def names: Names[NT] = constValueTuple[Names[NT]] + + /** Internal use only: Merge names and value components of two named tuple to + * impement `UpdateWith`. + * @param N the names of the combined tuple + * @param X the value types of the first named tuple + * @param N2 the names of the second named tuple + * @param Y the value types of the second named tuple + */ + type Merge[N <: Tuple, X <: Tuple, N2 <: Tuple, Y <: Tuple] = (N2, Y) match + case (n *: ns, y *: ys) => + Merge[N, UpdateOrAppend[X, IndexOf[N, n], y], ns, ys] + case (EmptyTuple, EmptyTuple) => + NamedTuple[N, X] + + /** A joint named tuple where + * - The names are the names of named tuple `NT1` followed by those names of `NT2` which + * do not appear in `NT1` + * - The values are the values of `NT1` and `NT2` corresponding to these names. + * If a name is present in both `NT1` and `NT2` the value in `NT2` is used. + */ + type UpdateWith[NT1 <: AnyNamedTuple, NT2 <: AnyNamedTuple] = + Merge[ConcatDistinct[Names[NT1], Names[NT2]], DropNames[NT1], Names[NT2], DropNames[NT2]] + + extension [NT1 <: AnyNamedTuple](nt1: NT1) + inline def updateWith[NT2 <: AnyNamedTuple](nt2: NT2): UpdateWith[NT1, NT2] = + val names = constValueTuple[ConcatDistinct[Names[NT1], Names[NT2]]].toArray + val names2 = constValueTuple[Names[NT2]].toArray + val values1 = nt1.toTuple + val values2 = nt2.toTuple + val values = new Array[Object](names.length) + values1.toArray.copyToArray(values) + for i <- 0 until values2.size do + val idx = names.indexOf(names2(i)) + values(idx) = values2.productElement(i).asInstanceOf[Object] + Tuple.fromArray(values).asInstanceOf[UpdateWith[NT1, NT2]] + +@main def Test = + import TupleOps.* + import NamedTupleOps.* + + type Names = "first" *: "last" *: "age" *: EmptyTuple + type Values = "Bob" *: "Miller" *: 33 *: EmptyTuple + + val names: Names = ("first", "last", "age") + val values: Values = ("Bob", "Miller", 33) + + val x1: IndexOf[Names, "first"] = constValue + val _: 0 = x1 + + val x2: IndexOf[Names, "age"] = names.indexOfType["age"] + val _: 2 = x2 + + val x3: IndexOf[Names, "what?"] = names.indexOfType["what?"] + val _: 3 = x3 + + type Releases = "first" *: "middle" *: EmptyTuple + type ReleaseValues = 1.0 *: true *: EmptyTuple + + val releases: Releases = ("first", "middle") + val releaseValues: ReleaseValues = (1.0, true) + + val x4 = values.updateOrAppend(names.indexOfType["age"], 11) + //updateOrAppend[Values](values)[IndexOf[Names, "age"], 11](indexOf[Names](names)["age"]("age"), 11) + val _: ("Bob", "Miller", 11) = x4 + assert(("Bob", "Miller", 11) == x4) + + val x5 = updateOrAppend[Values, IndexOf[Names, "what"], true](values, true) + val _: ("Bob", "Miller", 33, true) = x5 + assert(("Bob", "Miller", 33, true) == x5) + + val x6 = updateOrAppend[Values, IndexOf[Names, "first"], "Peter"](values, "Peter") + val _: ("Peter", "Miller", 33) = x6 + assert(("Peter", "Miller", 33) == x6) + + val x7 = concatDistinct[Names, Releases](names, releases) + val _: ("first", "last", "age", "middle") = x7 + assert(("first", "last", "age", "middle") == x7, x7) + + val x8 = concatDistinct[Releases, Names](releases, names) + val _: ("first", "middle", "last", "age") = x8 + assert(("first", "middle", "last", "age") == x8) + + def x9: Merge[ConcatDistinct[Names, Releases], Values, Releases, ReleaseValues] = ??? + def x9c: NamedTuple[("first", "last", "age", "middle"), (1.0, "Miller", 33, true)] = x9 + + val person = NamedTuple[Names, Values](values) + val release = NamedTuple[Releases, ReleaseValues](releaseValues) + + val x10 = person.updateWith(release) + val _: UpdateWith[NamedTuple[Names, Values], NamedTuple[Releases, ReleaseValues]] = x10 + val _: ("first", "last", "age", "middle") = x10.names + val _: (1.0, "Miller", 33, true) = x10.toTuple + assert((("first", "last", "age", "middle") == x10.names)) + assert((1.0, "Miller", 33, true) == x10.toTuple) + + val x11 = release.updateWith(person) + val _: UpdateWith[NamedTuple[Releases, ReleaseValues], NamedTuple[Names, Values]] = x11 + val _: NamedTuple[("first", "middle", "last", "age"), ("Bob", true, "Miller", 33)] = x11 + assert(("first", "middle", "last", "age") == x11.names) + assert(("Bob", true, "Miller", 33) == x11.toTuple) diff --git a/tests/pos/named-tuples-strawman.scala b/tests/pos/named-tuples-strawman.scala new file mode 100644 index 000000000000..859e1d1448e2 --- /dev/null +++ b/tests/pos/named-tuples-strawman.scala @@ -0,0 +1,48 @@ +object Test: + + object Named: + opaque type Named[name <: String & Singleton, A] >: A = A + def apply[S <: String & Singleton, A](name: S, x: A): Named[name.type, A] = x + extension [name <: String & Singleton, A](named: Named[name, A]) def value: A = named + import Named.* + + type DropNames[T <: Tuple] = T match + case Named[_, x] *: xs => x *: DropNames[xs] + case _ => T + + extension [T <: Tuple](x: T) def toTuple: DropNames[T] = + x.asInstanceOf // named and unnamed tuples have the same runtime representation + + val name = "hi" + val named = Named(name, 33) // ok, but should be rejectd + + inline val name2 = "hi" + val named2 = Named(name2, 33) // ok, but should be rejectd + val _: Named["hi", Int] = named2 + + var x = (Named("name", "Bob"), Named("age", 33)) + + val y: (String, Int) = x.toTuple + + x = y + + val z = y.toTuple + + type PersonInfo = (Named["name", String], Named["age", Int]) + type AddressInfo = (Named["city", String], Named["zip", Int]) + + val ok1: (Named["name", String], Named["age", Int]) = x + val ok2: PersonInfo = y + //val err1: (Named["bad", String], Named["age", Int]) = x // error + val err2: (Named["bad", String], Named["age", Int]) = x.toTuple // ok + val ok3: (Named["bad", String], Named["age", Int]) = y // ok + + val addr = (Named("city", "Lausanne"), Named("zip", 1003)) + val _: AddressInfo = addr + + type CombinedInfo = Tuple.Concat[PersonInfo, AddressInfo] + + val combined: CombinedInfo = x ++ addr + +// val person = (name = "Bob", age = 33): (name: String, age: Int) +// person.age diff --git a/tests/pos/named-tuples.check b/tests/pos/named-tuples.check new file mode 100644 index 000000000000..24928c7dbdac --- /dev/null +++ b/tests/pos/named-tuples.check @@ -0,0 +1,10 @@ +(Bob,33) +33 +Bob +(Bob,33,Lausanne,1003) +33 +no match +Bob is younger than Bill +Bob is younger than Lucy +Bill is younger than Lucy +matched elements (name, Bob), (age, 33) diff --git a/tests/pos/named-tuples1.scala b/tests/pos/named-tuples1.scala new file mode 100644 index 000000000000..58e3fc065e61 --- /dev/null +++ b/tests/pos/named-tuples1.scala @@ -0,0 +1,13 @@ +import annotation.experimental +import language.experimental.namedTuples + +@main def Test = + val bob = (name = "Bob", age = 33): (name: String, age: Int) + val persons = List( + bob, + (name = "Bill", age = 40), + (name = "Lucy", age = 45) + ) + val ages = persons.map(_.age) + // pickling failure: matchtype is reduced after pickling, unreduced before. + assert(ages.sum == 118) diff --git a/tests/pos/tuple-ops.scala b/tests/pos/tuple-ops.scala new file mode 100644 index 000000000000..df708e669e0f --- /dev/null +++ b/tests/pos/tuple-ops.scala @@ -0,0 +1,36 @@ +import language.experimental.namedTuples +import Tuple.* + +def test = + val x1: Conforms[(1, 2), (1, 2)] = ??? + val _: true = x1 + + val x2: Conforms[(1, 2), (1, 3)] = ??? + val _: false = x2 + + val x3: Conforms[(1, 2), (1, 2, 4)] = ??? + val _: false = x2 + + val x4: Conforms[(1, 2, 4), (1, 2)] = ??? + val _: false = x2 + + summon[Disjoint[(1, 2, 3), (4, 5)] =:= true] + summon[Disjoint[(1, 2, 6), (4, 5)] =:= true] + summon[Disjoint[(1, 2, 6), EmptyTuple] =:= true] + summon[Disjoint[EmptyTuple, EmptyTuple] =:= true] + + summon[Contains[(1, 2, 3), Int] =:= true] + summon[Contains[(1, 2, 3), 2] =:= true] + summon[Contains[(1, 2, 3), 4] =:= false] + + summon[Conforms[(1, 2, 3), (1, 2, 3)] =:= true] + summon[Conforms[(1, 2, 3), (1, 2)] =:= false] + summon[Conforms[(1, 2, 3), (1, 2, 4)] =:= false] + summon[Conforms[(1, 2, 3), (Int, 2, 3)] =:= true] +// summon[Conforms[(Int, 2, 3), (1, 2, 3)] =:= true] // error, reduction gets stuck + + summon[Disjoint[(1, 2, 3), (4, 2)] =:= false] + summon[Disjoint[("a", "b"), ("b", "c")] =:= false] + summon[Disjoint[(1, 2, 6), Tuple1[2]] =:= false] + summon[Disjoint[Tuple1[3], (4, 3, 6)] =:= false] + diff --git a/tests/run/named-patterns.check b/tests/run/named-patterns.check new file mode 100644 index 000000000000..ba8dbb8b21f7 --- /dev/null +++ b/tests/run/named-patterns.check @@ -0,0 +1,10 @@ +name Bob, age 22 +name Bob +age 22 +age 22, name Bob +Bob, 22 +1003 Lausanne, Rue de la Gare 44 +1003 Lausanne +Rue de la Gare in Lausanne +1003 Lausanne, Rue de la Gare 44 +1003 Lausanne, Rue de la Gare 44 diff --git a/tests/run/named-patterns.scala b/tests/run/named-patterns.scala new file mode 100644 index 000000000000..1e7e0697e782 --- /dev/null +++ b/tests/run/named-patterns.scala @@ -0,0 +1,43 @@ +import language.experimental.namedTuples + +object Test1: + class Person(val name: String, val age: Int) + + object Person: + def unapply(p: Person): (name: String, age: Int) = (p.name, p.age) + + case class Address(city: String, zip: Int, street: String, number: Int) + + @main def Test = + val bob = Person("Bob", 22) + bob match + case Person(name = n, age = a) => println(s"name $n, age $a") + bob match + case Person(name = n) => println(s"name $n") + bob match + case Person(age = a) => println(s"age $a") + bob match + case Person(age = a, name = n) => println(s"age $a, name $n") + bob match + case Person(age, name) => println(s"$age, $name") + + val addr = Address("Lausanne", 1003, "Rue de la Gare", 44) + addr match + case Address(city = c, zip = z, street = s, number = n) => + println(s"$z $c, $s $n") + addr match + case Address(zip = z, city = c) => + println(s"$z $c") + addr match + case Address(city = c, street = s) => + println(s"$s in $c") + addr match + case Address(number = n, street = s, zip = z, city = c) => + println(s"$z $c, $s $n") + addr match + case Address(c, z, s, number) => + println(s"$z $c, $s $number") + + + + diff --git a/tests/run/named-tuples-xxl.check b/tests/run/named-tuples-xxl.check new file mode 100644 index 000000000000..ee5f60bec756 --- /dev/null +++ b/tests/run/named-tuples-xxl.check @@ -0,0 +1,6 @@ +(0,0,0,0,0,0,0,0,0,0,Bob,0,33,0,0,0,0,0,0,0,0,0,0,0) +(0,0,0,0,0,0,0,0,0,0,Bob,0,33,0,0,0,0,0,0,0,0,0,0,0) +(0,0,0,0,0,0,0,0,0,0,Bob,0,33,0,0,0,0,0,0,0,0,0,0,0) +Bob is younger than Bill +Bob is younger than Lucy +Bill is younger than Lucy diff --git a/tests/run/named-tuples-xxl.scala b/tests/run/named-tuples-xxl.scala new file mode 100644 index 000000000000..3a0a1e5e1294 --- /dev/null +++ b/tests/run/named-tuples-xxl.scala @@ -0,0 +1,91 @@ +import language.experimental.namedTuples +import NamedTuple.toTuple + +type Person = ( + x0: Int, x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, + name: String, y1: Int, age: Int, y2: Int, + z0: Int, z1: Int, z2: Int, z3: Int, z4: Int, z5: Int, z6: Int, z7: Int, z8: Int, z9: Int) + +val bob = ( + x0 = 0, x1 = 0, x2 = 0, x3 = 0, x4 = 0, x5 = 0, x6 = 0, x7 = 0, x8 = 0, x9 = 0, + name = "Bob", y1 = 0, age = 33, y2 = 0, + z0 = 0, z1 = 0, z2 = 0, z3 = 0, z4 = 0, z5 = 0, z6 = 0, z7 = 0, z8 = 0, z9 = 0) + +val person2: Person = bob + + +type AddressInfo = (city: String, zip: Int) +val addr = (city = "Lausanne", zip = 1003) + +type CombinedInfo = NamedTuple.Concat[Person, AddressInfo] +val bobWithAddr = bob ++ addr +val _: CombinedInfo = bobWithAddr +val _: CombinedInfo = bob ++ addr + +@main def Test = + assert(bob.name == "Bob") + assert(bob.age == 33) + bob match + case p @ (name = "Bob", age = a) => + val x = p + println(x) + assert(p.age == 33) + assert(a == 33) + case _ => + assert(false) + + bob match + case p @ (name = "Peter", age = _) => assert(false) + case p @ (name = "Bob", age = 0) => assert(false) + case _ => + bob match + case b @ (x0 = 0, x1 = 0, x2 = 0, x3 = 0, x4 = 0, x5 = 0, x6 = 0, x7 = 0, x8 = 0, x9 = 0, + name = "Bob", y1 = 0, age = 33, y2 = 0, + z0 = 0, z1 = 0, z2 = 0, z3 = 0, z4 = 0, z5 = 0, z6 = 0, z7 = 0, z8 = 0, z9 = 0) + => // !!! spurious unreachable case warning + println(bob) + println(b) + case _ => assert(false) + + val x = bob.age + assert(x == 33) + + val y: ( + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + String, Int, Int, Int, + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int) + = bob.toTuple + + def ageOf(person: Person) = person.age + + assert(ageOf(bob) == 33) + + val persons = List( + bob, + (x0 = 0, x1 = 0, x2 = 0, x3 = 0, x4 = 0, x5 = 0, x6 = 0, x7 = 0, x8 = 0, x9 = 0, + name = "Bill", y1 = 0, age = 40, y2 = 0, + z0 = 0, z1 = 0, z2 = 0, z3 = 0, z4 = 0, z5 = 0, z6 = 0, z7 = 0, z8 = 0, z9 = 0), + (x0 = 0, x1 = 0, x2 = 0, x3 = 0, x4 = 0, x5 = 0, x6 = 0, x7 = 0, x8 = 0, x9 = 0, + name = "Lucy", y1 = 0, age = 45, y2 = 0, + z0 = 0, z1 = 0, z2 = 0, z3 = 0, z4 = 0, z5 = 0, z6 = 0, z7 = 0, z8 = 0, z9 = 0), + ) + for + p <- persons + q <- persons + if p.age < q.age + do + println(s"${p.name} is younger than ${q.name}") + + val name1 = bob(10) + val age1 = bob(12) + + val minors = persons.filter: + case (age = a) => a < 18 + case _ => false + + assert(minors.isEmpty) + + bob match + case bob1 @ (age = 33, name = "Bob") => + val x: Person = bob1 // bob1 still has type Person with the unswapped elements + case _ => assert(false) diff --git a/tests/run/named-tuples.check b/tests/run/named-tuples.check new file mode 100644 index 000000000000..c53a2f52ff09 --- /dev/null +++ b/tests/run/named-tuples.check @@ -0,0 +1,9 @@ +(Bob,33) +33 +Bob +(Bob,33,Lausanne,1003) +33 +no match +Bob is younger than Bill +Bob is younger than Lucy +Bill is younger than Lucy diff --git a/tests/run/named-tuples.scala b/tests/run/named-tuples.scala new file mode 100644 index 000000000000..0c9e3fb4d455 --- /dev/null +++ b/tests/run/named-tuples.scala @@ -0,0 +1,99 @@ +import language.experimental.namedTuples +import NamedTuple.toTuple + +type Person = (name: String, age: Int) +val bob = (name = "Bob", age = 33): (name: String, age: Int) +val person2: (name: String, age: Int) = bob + +type Uni = (uni: Double) +val uni = (uni = 1.0) +val _: Uni = uni + +type AddressInfo = (city: String, zip: Int) +val addr = (city = "Lausanne", zip = 1003) +val _: AddressInfo = addr + +type CombinedInfo = NamedTuple.Concat[Person, AddressInfo] +val bobWithAddr = bob ++ addr +val _: CombinedInfo = bobWithAddr +val _: CombinedInfo = bob ++ addr + +@main def Test = + println(bob) + println(bob.age) + println(person2.name) + println(bobWithAddr) + bob match + case p @ (name = "Bob", age = _) => println(p.age) + bob match + case p @ (name = "Bob", age = age) => assert(age == 33) + bob match + case p @ (name = "Peter", age = _) => println(p.age) + case p @ (name = "Bob", age = 0) => println(p.age) + case _ => println("no match") + + val x = bob.age + assert(x == 33) + + val y: (String, Int) = bob.toTuple + + def ageOf(person: Person) = person.age + + assert(ageOf(bob) == 33) + assert(ageOf((name = "anon", age = 22)) == 22) + assert(ageOf(("anon", 11)) == 11) + + val persons = List( + bob, + (name = "Bill", age = 40), + (name = "Lucy", age = 45) + ) + for + p <- persons + q <- persons + if p.age < q.age + do + println(s"${p.name} is younger than ${q.name}") + + //persons.select(_.age, _.name) + //persons.join(addresses).withCommon(_.name) + + def minMax(elems: Int*): (min: Int, max: Int) = + var min = elems(0) + var max = elems(0) + for elem <- elems do + if elem < min then min = elem + if elem > max then max = elem + (min = min, max = max) + + val mm = minMax(1, 3, 400, -3, 10) + assert(mm.min == -3) + assert(mm.max == 400) + + val name1 = bob(0) + val age1 = bob(1) + val _: String = name1 + val _: Int = age1 + + val bobS = bob.reverse + val _: (age: Int, name: String) = bobS + val _: NamedTuple.Reverse[Person] = bobS + + val silly = bob match + case (name, age) => name.length + age + + assert(silly == 36) + + val minors = persons.filter: + case (age = a) => a < 18 + case _ => false + + assert(minors.isEmpty) + + bob match + case bob1 @ (age = 33, name = "Bob") => + val x: Person = bob1 // bob1 still has type Person with the unswapped elements + case _ => assert(false) + + + From 27d62886267e9d9069cd842f4fd50bd3dc908993 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 3 Dec 2023 13:16:06 +0100 Subject: [PATCH 264/465] Add doc page --- .../reference/experimental/named-tuples.md | 136 ++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100644 docs/_docs/reference/experimental/named-tuples.md diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md new file mode 100644 index 000000000000..a32581336eac --- /dev/null +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -0,0 +1,136 @@ +--- +layout: doc-page +title: "Named Tuples" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/named-tuples.html +--- + +The elements of a tuple can now be named. Example: +```scala +type Person = (name: String, age: Int) +val Bob: Person = (name = "Bob", age = 33) + +Bob match + case (name, age) => + println(s"$name is $age years old") + +val persons: List[Person] = ... +val minors = persons.filter: p => + p.age < 18 +``` +Named bindings in tuples are similar to function parameters and arguments. We use `name: Type` for element types and `name = value` for element values. It is illegal to mix named and unnamed elements in a tuple, or to use the same same +name for two different elements. + +Fields of named tuples can be selected by their name, as in the line `p.age < 18` above. + +### Conformance + +The order of names in a named tuple matters. For instance, the type `Person` above and the type `(age: Int, name: String)` would be different, incompatible types. + +Values of named tuple types can also be be defined using regular tuples. For instance: +```scala +val x: Person = ("Laura", 25) + +def register(person: Person) = ... +register(person = ("Silvain", 16)) +register(("Silvain", 16)) +``` +This follows since a regular tuple `(T_1, ..., T_n)` is treated as a subtype of a named tuple `(N_1 = T_1, ..., N_n = T_n)` with the same element types. On the other hand, named tuples do not conform to unnamed tuples, so the following is an error: +```scala +val x: (String, Int) = Bob // error: type mismatch +``` +One can convert a named tuple to an unnamed tuple with the `dropNames` method, so the following works: +```scala +val x: (String, Int) = Bob.dropNames // ok +``` +Note that conformance rules for named tuples are analogous to the rules for named parameters. One can assign parameters by position to a named parameter list. +```scala + def f(param: Int) = ... + f(param = 1) // OK + f(2) // Also OK +``` +But one cannot use a name to pass an argument to an unnamed parameter: +```scala + val f: Int => T + f(2) // OK + f(param = 2) // Not OK +``` +The rules for tuples are analogous. Unnamed tuples conform to named tuple types, but the opposite does not hold. + + +### Pattern Matching + +When pattern matching on a named tuple, the pattern may be named or unnamed. +If the pattern is named it needs to mention only a subset of the tuple names, and these names can come in any order. So the following are all OK: +```scala +Bob match + case (name, age) => ... + +Bob match + case (name = x, age = y) => ... + +Bob match + case (age = x) => ... + +Bob match + case (age = x, name = y) => ... +``` + +### Expansion + +Named tuples are in essence just a convenient syntax for regular tuples. In the internal representation, a named tuple type is represented at compile time as a pair of two tuples. One tuple contains the names as literal constant string types, the other contains the element types. The runtime representation of a named tuples consists of just the element values, whereas the names are forgotten. This is achieved by declaring `NamedTuple` +in package `scala` as an opaque type as follows: +```scala + opaque type NamedTuple[N <: Tuple, +V <: Tuple] >: V = V +``` +For instance, the `Person` type would be represented as the type +```scala +NamedTuple[("name", "age"), (String, Int)] +``` +`NamedTuple` is an opaque type alias of its second, value parameter. The first parameter is a string constant type which determines the name of the element. Since the type is just an alias of its value part, names are erased at runtime, and named tuples and regular tuples have the same representation. + +A `NamedTuple[N, V]` type is publicly known to be a supertype (but not a subtype) of its value paramater `V`, which means that regular tuples can be assigned to named tuples but not _vice versa_. + +The `NamedTuple` object contains a number of extension methods for named tuples hat mirror the same functions in `Tuple`. Examples are +`apply`, `head`, `tail`, `take`, `drop`, `++`, `map`, or `zip`. +Similar to `Tuple`, the `NamedTuple` object also contains types such as `Elem`, `Head`, `Concat` +that describe the results of these extension methods. + +The translation of named tuples to instances of `NamedTuple` is fixed by the specification and therefore known to the programmer. This means that: + + - All tuple operations also work with named tuples "out of the box". + - Macro libraries can rely on this expansion. + +### Restrictions + +The following restrictions apply to named tuple elements: + + 1. Either all elements of a tuple are named or none are named. It is illegal to mix named and unnamed elements in a tuple. For instance, the following is in error: + ```scala + val illFormed1 = ("Bob", age = 33) // error + ``` + 2. Each element name in a named tuple must be unique. For instance, the following is in error: + ```scala + val illFormed2 = (name = "", age = 0, name = true) // error + ``` + 3. Named tuples can be matched with either named or regular patterns. But regular tuples and other selector types can only be matched with regular tuple patterns. For instance, the following is in error: + ```scala + (tuple: Tuple) match + case (age = x) => // error + ``` + +### Syntax + +The syntax of Scala is extended as follows to support named tuples: +``` +SimpleType ::= ... + | ‘(’ NameAndType {‘,’ NameAndType} ‘)’ +NameAndType ::= id ':' Type + +SimpleExpr ::= ... + | '(' NamedExprInParens {‘,’ NamedExprInParens} ')' +NamedExprInParens ::= id '=' ExprInParens + +SimplePattern ::= ... + | '(' NamedPattern {‘,’ NamedPattern} ')' +NamedPattern ::= id '=' Pattern +``` From 5dd48f9406fa58a8df1e53a92f216d8056c7b3e5 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 3 Dec 2023 14:27:35 +0100 Subject: [PATCH 265/465] Make NamedTuple covariant in its value type --- library/src/scala/NamedTuple.scala | 2 +- tests/neg/named-tuples.check | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index c65a760ee22c..ddc2b545f9ba 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -6,7 +6,7 @@ import compiletime.ops.boolean.* object NamedTuple: opaque type AnyNamedTuple = Any - opaque type NamedTuple[N <: Tuple, V <: Tuple] >: V <: AnyNamedTuple = V + opaque type NamedTuple[N <: Tuple, +V <: Tuple] >: V <: AnyNamedTuple = V def apply[N <: Tuple, V <: Tuple](x: V) = x diff --git a/tests/neg/named-tuples.check b/tests/neg/named-tuples.check index 485858fb18a0..067b6eeb937c 100644 --- a/tests/neg/named-tuples.check +++ b/tests/neg/named-tuples.check @@ -90,9 +90,7 @@ 47 | val p5 = person.zip(first = 11, age = 33) // error | ^^^^^^^^^^^^^^^^^^^^ | Found: (first : Int, age : Int) - | Required: NamedTuple.NamedTuple[(("name" : String), ("age" : String)), V2] - | - | where: V2 is a type variable with constraint <: Tuple + | Required: NamedTuple.NamedTuple[(("name" : String), ("age" : String)), Tuple] | | longer explanation available when compiling with `-explain` -- Warning: tests/neg/named-tuples.scala:24:29 ------------------------------------------------------------------------- From bd9bb8a777684eaa6025e08f9c05188d8d74b954 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 5 Dec 2023 11:19:23 +0100 Subject: [PATCH 266/465] Various tweaks --- .../reference/experimental/named-tuples.md | 4 +- library/src/scala/NamedTuple.scala | 2 +- library/src/scala/Tuple.scala | 1 - tests/neg/named-tuples.check | 71 ++++++++++--------- tests/neg/named-tuples.scala | 1 + tests/run/named-tuples.check | 1 + tests/run/named-tuples.scala | 14 +++- 7 files changed, 55 insertions(+), 39 deletions(-) diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md index a32581336eac..f9ba87382e32 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -38,9 +38,9 @@ This follows since a regular tuple `(T_1, ..., T_n)` is treated as a subtype of ```scala val x: (String, Int) = Bob // error: type mismatch ``` -One can convert a named tuple to an unnamed tuple with the `dropNames` method, so the following works: +One can convert a named tuple to an unnamed tuple with the `toTuple` method, so the following works: ```scala -val x: (String, Int) = Bob.dropNames // ok +val x: (String, Int) = Bob.toTuple // ok ``` Note that conformance rules for named tuples are analogous to the rules for named parameters. One can assign parameters by position to a named parameter list. ```scala diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index ddc2b545f9ba..d5334cc2773d 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -8,7 +8,7 @@ object NamedTuple: opaque type AnyNamedTuple = Any opaque type NamedTuple[N <: Tuple, +V <: Tuple] >: V <: AnyNamedTuple = V - def apply[N <: Tuple, V <: Tuple](x: V) = x + def apply[N <: Tuple, V <: Tuple](x: V): NamedTuple[N, V] = x def unapply[N <: Tuple, V <: Tuple](x: NamedTuple[N, V]): Some[V] = Some(x) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index e84e1fe562c3..e128fa8f0e81 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -354,7 +354,6 @@ object Tuple: /** A boolean indicating whether there is an element in the type `X` of `x` * that matches type `Y`. */ - inline def containsType[Y] = constValue[Contains[X, Y]] /* Note: It would be nice to add the following two extension methods: diff --git a/tests/neg/named-tuples.check b/tests/neg/named-tuples.check index 067b6eeb937c..d9b6d686a587 100644 --- a/tests/neg/named-tuples.check +++ b/tests/neg/named-tuples.check @@ -21,80 +21,87 @@ | Required: (String, Int) | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:19:20 ------------------------------------------------------ -19 | val _: NameOnly = person // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:19:25 ------------------------------------------------------ +19 | val _: (String, Int) = (name = "", age = 0) // error + | ^^^^^^^^^^^^^^^^^^^^ + | Found: (name : String, age : Int) + | Required: (String, Int) + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:20:20 ------------------------------------------------------ +20 | val _: NameOnly = person // error | ^^^^^^ | Found: (Test.person : (name : String, age : Int)) | Required: Test.NameOnly | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:20:18 ------------------------------------------------------ -20 | val _: Person = nameOnly // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:21:18 ------------------------------------------------------ +21 | val _: Person = nameOnly // error | ^^^^^^^^ | Found: (Test.nameOnly : (name : String)) | Required: Test.Person | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:22:36 ------------------------------------------------------ -22 | val _: (age: Int, name: String) = person // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:23:36 ------------------------------------------------------ +23 | val _: (age: Int, name: String) = person // error | ^^^^^^ | Found: (Test.person : (name : String, age : Int)) | Required: (age : Int, name : String) | | longer explanation available when compiling with `-explain` --- Error: tests/neg/named-tuples.scala:24:17 --------------------------------------------------------------------------- -24 | val (name = x, agee = y) = person // error +-- Error: tests/neg/named-tuples.scala:25:17 --------------------------------------------------------------------------- +25 | val (name = x, agee = y) = person // error | ^^^^^^^^ | No element named `agee` is defined in selector type (name : String, age : Int) --- Error: tests/neg/named-tuples.scala:27:10 --------------------------------------------------------------------------- -27 | case (name = n, age = a) => () // error // error +-- Error: tests/neg/named-tuples.scala:28:10 --------------------------------------------------------------------------- +28 | case (name = n, age = a) => () // error // error | ^^^^^^^^ | No element named `name` is defined in selector type (String, Int) --- Error: tests/neg/named-tuples.scala:27:20 --------------------------------------------------------------------------- -27 | case (name = n, age = a) => () // error // error +-- Error: tests/neg/named-tuples.scala:28:20 --------------------------------------------------------------------------- +28 | case (name = n, age = a) => () // error // error | ^^^^^^^ | No element named `age` is defined in selector type (String, Int) --- [E172] Type Error: tests/neg/named-tuples.scala:29:27 --------------------------------------------------------------- -29 | val pp = person ++ (1, 2) // error +-- [E172] Type Error: tests/neg/named-tuples.scala:30:27 --------------------------------------------------------------- +30 | val pp = person ++ (1, 2) // error | ^ | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). --- [E172] Type Error: tests/neg/named-tuples.scala:32:18 --------------------------------------------------------------- -32 | person ++ (1, 2) match // error +-- [E172] Type Error: tests/neg/named-tuples.scala:33:18 --------------------------------------------------------------- +33 | person ++ (1, 2) match // error | ^ | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). --- Error: tests/neg/named-tuples.scala:35:17 --------------------------------------------------------------------------- -35 | val bad = ("", age = 10) // error +-- Error: tests/neg/named-tuples.scala:36:17 --------------------------------------------------------------------------- +36 | val bad = ("", age = 10) // error | ^^^^^^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:38:20 --------------------------------------------------------------------------- -38 | case (name = n, age) => () // error +-- Error: tests/neg/named-tuples.scala:39:20 --------------------------------------------------------------------------- +39 | case (name = n, age) => () // error | ^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:39:16 --------------------------------------------------------------------------- -39 | case (name, age = a) => () // error +-- Error: tests/neg/named-tuples.scala:40:16 --------------------------------------------------------------------------- +40 | case (name, age = a) => () // error | ^^^^^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:42:10 --------------------------------------------------------------------------- -42 | case (age = x) => // error +-- Error: tests/neg/named-tuples.scala:43:10 --------------------------------------------------------------------------- +43 | case (age = x) => // error | ^^^^^^^ | No element named `age` is defined in selector type Tuple --- [E172] Type Error: tests/neg/named-tuples.scala:44:27 --------------------------------------------------------------- -44 | val p2 = person ++ person // error +-- [E172] Type Error: tests/neg/named-tuples.scala:45:27 --------------------------------------------------------------- +45 | val p2 = person ++ person // error | ^ |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("name" : String), ("age" : String))] =:= (true : Boolean). --- [E172] Type Error: tests/neg/named-tuples.scala:45:43 --------------------------------------------------------------- -45 | val p3 = person ++ (first = 11, age = 33) // error +-- [E172] Type Error: tests/neg/named-tuples.scala:46:43 --------------------------------------------------------------- +46 | val p3 = person ++ (first = 11, age = 33) // error | ^ |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("first" : String), ("age" : String))] =:= (true : Boolean). --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:47:22 ------------------------------------------------------ -47 | val p5 = person.zip(first = 11, age = 33) // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:48:22 ------------------------------------------------------ +48 | val p5 = person.zip(first = 11, age = 33) // error | ^^^^^^^^^^^^^^^^^^^^ | Found: (first : Int, age : Int) | Required: NamedTuple.NamedTuple[(("name" : String), ("age" : String)), Tuple] | | longer explanation available when compiling with `-explain` --- Warning: tests/neg/named-tuples.scala:24:29 ------------------------------------------------------------------------- -24 | val (name = x, agee = y) = person // error +-- Warning: tests/neg/named-tuples.scala:25:29 ------------------------------------------------------------------------- +25 | val (name = x, agee = y) = person // error | ^^^^^^ |pattern's type (String, Int) is more specialized than the right hand side expression's type (name : String, age : Int) | diff --git a/tests/neg/named-tuples.scala b/tests/neg/named-tuples.scala index 5d1f3777dd73..7dcf2221ec40 100644 --- a/tests/neg/named-tuples.scala +++ b/tests/neg/named-tuples.scala @@ -16,6 +16,7 @@ import language.experimental.namedTuples val nameOnly = (name = "Louis") val y: (String, Int) = person // error + val _: (String, Int) = (name = "", age = 0) // error val _: NameOnly = person // error val _: Person = nameOnly // error diff --git a/tests/run/named-tuples.check b/tests/run/named-tuples.check index c53a2f52ff09..6485aefafa9a 100644 --- a/tests/run/named-tuples.check +++ b/tests/run/named-tuples.check @@ -7,3 +7,4 @@ no match Bob is younger than Bill Bob is younger than Lucy Bill is younger than Lucy +(((Lausanne,Pully),Preverenges),((1003,1009),1028)) diff --git a/tests/run/named-tuples.scala b/tests/run/named-tuples.scala index 0c9e3fb4d455..29b058adab18 100644 --- a/tests/run/named-tuples.scala +++ b/tests/run/named-tuples.scala @@ -1,5 +1,5 @@ import language.experimental.namedTuples -import NamedTuple.toTuple +import NamedTuple.* type Person = (name: String, age: Int) val bob = (name = "Bob", age = 33): (name: String, age: Int) @@ -9,8 +9,8 @@ type Uni = (uni: Double) val uni = (uni = 1.0) val _: Uni = uni -type AddressInfo = (city: String, zip: Int) -val addr = (city = "Lausanne", zip = 1003) +type AddressInfo = (city: String, zipCode: Int) +val addr = (city = "Lausanne", zipCode = 1003) val _: AddressInfo = addr type CombinedInfo = NamedTuple.Concat[Person, AddressInfo] @@ -95,5 +95,13 @@ val _: CombinedInfo = bob ++ addr val x: Person = bob1 // bob1 still has type Person with the unswapped elements case _ => assert(false) + val addr2 = (city = "Pully", zipCode = 1009) + val addr3 = addr.zip(addr2) + val addr4 = addr3.zip("Preverenges", 1028) + println(addr4) + + + + From e0a11cd3d28c1b78dea46cfd6865d5707b71161f Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 5 Dec 2023 14:08:37 +0100 Subject: [PATCH 267/465] Harden NamedTuple handling against ill-formed NamedTuples --- compiler/src/dotty/tools/dotc/core/TypeUtils.scala | 1 + tests/neg/named-tuples-3.scala | 7 +++++++ 2 files changed, 8 insertions(+) create mode 100644 tests/neg/named-tuples-3.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index 7ac0df05b268..0dafc6515928 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -124,6 +124,7 @@ class TypeUtils { case defn.NamedTuple(nmes, vals) => val names = nmes.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil).map: case ConstantType(Constant(str: String)) => str.toTermName + case t => throw TypeError(em"Malformed NamedTuple: names must be string types, but $t was found.") val values = vals.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil) names.zip(values) case t => diff --git a/tests/neg/named-tuples-3.scala b/tests/neg/named-tuples-3.scala new file mode 100644 index 000000000000..55b13bbe12c0 --- /dev/null +++ b/tests/neg/named-tuples-3.scala @@ -0,0 +1,7 @@ +import language.experimental.namedTuples + +def f: NamedTuple.NamedTuple[(Int, Any), (Int, String)] = ??? // error + +type Person = (name: Int, age: String) + +val p: Person = f From b9d86fe471e0cdb8a3e7b93d36c7afaf80b7cc6f Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 5 Dec 2023 19:44:32 +0100 Subject: [PATCH 268/465] Fix test --- tests/neg/named-tuples-3.check | 4 ++++ tests/neg/named-tuples-3.scala | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 tests/neg/named-tuples-3.check diff --git a/tests/neg/named-tuples-3.check b/tests/neg/named-tuples-3.check new file mode 100644 index 000000000000..131f6164748b --- /dev/null +++ b/tests/neg/named-tuples-3.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/named-tuples-3.scala:7:16 -------------------------------------------------------------------------- +7 |val p: Person = f // error + | ^ + | Malformed NamedTuple: names must be string types, but Int was found. diff --git a/tests/neg/named-tuples-3.scala b/tests/neg/named-tuples-3.scala index 55b13bbe12c0..0f1215338b0a 100644 --- a/tests/neg/named-tuples-3.scala +++ b/tests/neg/named-tuples-3.scala @@ -1,7 +1,7 @@ import language.experimental.namedTuples -def f: NamedTuple.NamedTuple[(Int, Any), (Int, String)] = ??? // error +def f: NamedTuple.NamedTuple[(Int, Any), (Int, String)] = ??? type Person = (name: Int, age: String) -val p: Person = f +val p: Person = f // error From 0fbdb497c40aee5b3e17c22bf9030f55ddcc3bca Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 8 Dec 2023 11:37:45 +0100 Subject: [PATCH 269/465] Simplify tupleElementTypes unapply handling --- .../src/dotty/tools/dotc/core/TypeUtils.scala | 1 + .../tools/dotc/transform/PatternMatcher.scala | 4 +- .../dotty/tools/dotc/typer/Applications.scala | 44 +++++++------------ .../src/dotty/tools/dotc/typer/Typer.scala | 1 + 4 files changed, 22 insertions(+), 28 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index 0dafc6515928..d4be03e9aae4 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -8,6 +8,7 @@ import Names.{Name, TermName} import Constants.Constant class TypeUtils { + /** A decorator that provides methods on types * that are needed in the transformer pipeline. */ diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index a7f987b8b2f3..11c782b04ec6 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -371,7 +371,9 @@ object PatternMatcher { val arity = productArity(unappType, unapp.srcPos) unapplyProductSeqPlan(unappResult, args, arity) else if unappResult.info <:< defn.NonEmptyTupleTypeRef then - val components = (0 until foldApplyTupleType(unappResult.denot.info).length).toList.map(tupleApp(_, ref(unappResult))) + val components = + (0 until unappResult.denot.info.tupleElementTypes.getOrElse(Nil).length) + .toList.map(tupleApp(_, ref(unappResult))) matchArgsPlan(components, args, onSuccess) else { assert(isGetMatch(unappType)) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 082c8bf3d1db..43e6fe30f370 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -150,7 +150,7 @@ object Applications { (0 until argsNum).map(i => if (i < arity - 1) selTps(i) else elemTp).toList } - def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: SrcPos)(using Context): List[Type] = { + def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: SrcPos)(using Context): List[Type] = def getName(fn: Tree): Name = fn match case TypeApply(fn, _) => getName(fn) @@ -165,46 +165,36 @@ object Applications { Nil } - def unapplySeq(tp: Type)(fallback: => List[Type]): List[Type] = { + def unapplySeq(tp: Type)(fallback: => List[Type]): List[Type] = val elemTp = unapplySeqTypeElemTp(tp) - if (elemTp.exists) args.map(Function.const(elemTp)) - else if (isProductSeqMatch(tp, args.length, pos)) productSeqSelectors(tp, args.length, pos) - else if tp.derivesFrom(defn.NonEmptyTupleClass) then foldApplyTupleType(tp) + if elemTp.exists then + args.map(Function.const(elemTp)) + else if isProductSeqMatch(tp, args.length, pos) then + productSeqSelectors(tp, args.length, pos) + else if tp.derivesFrom(defn.NonEmptyTupleClass) then + tp.tupleElementTypes.getOrElse(Nil) else fallback - } - if (unapplyName == nme.unapplySeq) - unapplySeq(unapplyResult) { + if unapplyName == nme.unapplySeq then + unapplySeq(unapplyResult): if (isGetMatch(unapplyResult, pos)) unapplySeq(getTp)(fail) else fail - } - else { + else assert(unapplyName == nme.unapply) - if (isProductMatch(unapplyResult, args.length, pos)) + if isProductMatch(unapplyResult, args.length, pos) then productSelectorTypes(unapplyResult, pos) - else if (isGetMatch(unapplyResult, pos)) + else if isGetMatch(unapplyResult, pos) then getUnapplySelectors(getTp, args, pos) - else if (unapplyResult.widenSingleton isRef defn.BooleanClass) + else if unapplyResult.derivesFrom(defn.BooleanClass) then Nil - else if (defn.isProductSubType(unapplyResult) && productArity(unapplyResult, pos) != 0) + else if defn.isProductSubType(unapplyResult) && productArity(unapplyResult, pos) != 0 then productSelectorTypes(unapplyResult, pos) // this will cause a "wrong number of arguments in pattern" error later on, // which is better than the message in `fail`. else if unapplyResult.derivesFrom(defn.NonEmptyTupleClass) then - foldApplyTupleType(unapplyResult) + unapplyResult.tupleElementTypes.getOrElse(Nil) else fail - } - } - - def foldApplyTupleType(tp: Type)(using Context): List[Type] = - object tupleFold extends TypeAccumulator[List[Type]]: - override def apply(accum: List[Type], t: Type): List[Type] = - t match - case AppliedType(tycon, x :: x2 :: Nil) if tycon.typeSymbol == defn.PairClass => - apply(x :: accum, x2) - case x => foldOver(accum, x) - end tupleFold - tupleFold(Nil, tp).reverse + end unapplyArgs def wrapDefs(defs: mutable.ListBuffer[Tree] | Null, tree: Tree)(using Context): Tree = if (defs != null && defs.nonEmpty) tpd.Block(defs.toList, tree) else tree diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index a510a6eaa578..5dda242df4ff 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2460,6 +2460,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer || body1.isInstanceOf[Literal] val symTp = if isStableIdentifierOrLiteral || pt.isNamedTupleType then pt + // need to combine tuple element types with expected named type else if isWildcardStarArg(body1) || pt == defn.ImplicitScrutineeTypeRef || body1.tpe <:< pt // There is some strange interaction with gadt matching. From 6f53dcdb37ea6036c75eca5639e685ac93b6eaab Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 8 Dec 2023 11:47:38 +0100 Subject: [PATCH 270/465] Fix pattern matching for get matches --- .../tools/dotc/transform/PatternMatcher.scala | 4 +- .../dotty/tools/dotc/typer/Applications.scala | 147 +++++++++++------- .../src/dotty/tools/dotc/typer/Checking.scala | 4 +- .../src/dotty/tools/dotc/typer/Typer.scala | 50 +++--- tests/run/named-patterns.check | 5 + tests/run/named-patterns.scala | 20 ++- 6 files changed, 145 insertions(+), 85 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 11c782b04ec6..8856bd10bf08 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -379,7 +379,7 @@ object PatternMatcher { assert(isGetMatch(unappType)) val argsPlan = { val get = ref(unappResult).select(nme.get, _.info.isParameterless) - val arity = productArity(get.tpe, unapp.srcPos) + val arity = productArity(get.tpe.stripNamedTuple, unapp.srcPos) if (isUnapplySeq) letAbstract(get) { getResult => if unapplySeqTypeElemTp(get.tpe).exists @@ -390,7 +390,7 @@ object PatternMatcher { letAbstract(get) { getResult => val selectors = if (args.tail.isEmpty) ref(getResult) :: Nil - else productSelectors(get.tpe).map(ref(getResult).select(_)) + else productSelectors(getResult.info).map(ref(getResult).select(_)) matchArgsPlan(selectors, args, onSuccess) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 43e6fe30f370..f7fe5be7a1c7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -18,6 +18,7 @@ import Names.* import StdNames.* import ContextOps.* import NameKinds.DefaultGetterName +import Typer.tryEither import ProtoTypes.* import Inferencing.* import reporting.* @@ -135,14 +136,6 @@ object Applications { sels.takeWhile(_.exists).toList } - def getUnapplySelectors(tp: Type, args: List[untpd.Tree], pos: SrcPos)(using Context): List[Type] = - if (args.length > 1 && !(tp.derivesFrom(defn.SeqClass))) { - val sels = productSelectorTypes(tp, pos) - if (sels.length == args.length) sels - else tp :: Nil - } - else tp :: Nil - def productSeqSelectors(tp: Type, argsNum: Int, pos: SrcPos)(using Context): List[Type] = { val selTps = productSelectorTypes(tp, pos) val arity = selTps.length @@ -150,22 +143,30 @@ object Applications { (0 until argsNum).map(i => if (i < arity - 1) selTps(i) else elemTp).toList } - def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: SrcPos)(using Context): List[Type] = - def getName(fn: Tree): Name = + /** A utility class that matches results of unapplys with patterns. Two queriable members: + * val argTypes: List[Type] + * def typedPatterns(qual: untpd.Tree, typer: Typer): List[Tree] + * TODO: Move into Applications trait. No need to keep it outside. But it's a large + * refactor, so do this when the rest is merged. + */ + class UnapplyArgs(unapplyResult: Type, unapplyFn: Tree, unadaptedArgs: List[untpd.Tree], pos: SrcPos)(using Context): + private var args = unadaptedArgs + + private def getName(fn: Tree): Name = fn match case TypeApply(fn, _) => getName(fn) case Apply(fn, _) => getName(fn) case fn: RefTree => fn.name - val unapplyName = getName(unapplyFn) // tolerate structural `unapply`, which does not have a symbol + private val unapplyName = getName(unapplyFn) // tolerate structural `unapply`, which does not have a symbol - def getTp = extractorMemberType(unapplyResult, nme.get, pos) + private def getTp = extractorMemberType(unapplyResult, nme.get, pos) - def fail = { + private def fail = { report.error(UnapplyInvalidReturnType(unapplyResult, unapplyName), pos) Nil } - def unapplySeq(tp: Type)(fallback: => List[Type]): List[Type] = + private def unapplySeq(tp: Type)(fallback: => List[Type]): List[Type] = val elemTp = unapplySeqTypeElemTp(tp) if elemTp.exists then args.map(Function.const(elemTp)) @@ -175,26 +176,84 @@ object Applications { tp.tupleElementTypes.getOrElse(Nil) else fallback - if unapplyName == nme.unapplySeq then - unapplySeq(unapplyResult): - if (isGetMatch(unapplyResult, pos)) unapplySeq(getTp)(fail) - else fail - else - assert(unapplyName == nme.unapply) - if isProductMatch(unapplyResult, args.length, pos) then - productSelectorTypes(unapplyResult, pos) - else if isGetMatch(unapplyResult, pos) then - getUnapplySelectors(getTp, args, pos) - else if unapplyResult.derivesFrom(defn.BooleanClass) then - Nil - else if defn.isProductSubType(unapplyResult) && productArity(unapplyResult, pos) != 0 then - productSelectorTypes(unapplyResult, pos) - // this will cause a "wrong number of arguments in pattern" error later on, - // which is better than the message in `fail`. - else if unapplyResult.derivesFrom(defn.NonEmptyTupleClass) then - unapplyResult.tupleElementTypes.getOrElse(Nil) - else fail - end unapplyArgs + private def tryAdaptPatternArgs(elems: List[untpd.Tree], pt: Type)(using Context): Option[List[untpd.Tree]] = + tryEither[Option[List[untpd.Tree]]] + (Some(desugar.adaptPatternArgs(elems, pt))) + ((_, _) => None) + + private def getUnapplySelectors(tp: Type)(using Context): List[Type] = + if args.length > 1 && !(tp.derivesFrom(defn.SeqClass)) then + productUnapplySelectors(tp).getOrElse: + // There are unapplys with return types which have `get` and `_1, ..., _n` + // as members, but which are not subtypes of Product. So `productUnapplySelectors` + // would return None for these, but they are still valid types + // for a get match. A test case is pos/extractors.scala. + val sels = productSelectorTypes(tp, pos) + if (sels.length == args.length) sels + else tp :: Nil + else tp :: Nil + + private def productUnapplySelectors(tp: Type)(using Context): Option[List[Type]] = + if defn.isProductSubType(tp) then + tryAdaptPatternArgs(args, tp) match + case Some(args1) if isProductMatch(tp, args1.length, pos) => + args = args1 + Some(productSelectorTypes(tp, pos)) + case _ => None + else tp.widen.normalized.dealias match + case tp @ defn.NamedTuple(_, tt) => + tryAdaptPatternArgs(args, tp) match + case Some(args1) => + args = args1 + tt.tupleElementTypes + case _ => None + case _ => None + + /** The computed argument types which will be the scutinees of the sub-patterns. */ + val argTypes: List[Type] = + if unapplyName == nme.unapplySeq then + unapplySeq(unapplyResult): + if (isGetMatch(unapplyResult, pos)) unapplySeq(getTp)(fail) + else fail + else + assert(unapplyName == nme.unapply) + productUnapplySelectors(unapplyResult).getOrElse: + if isGetMatch(unapplyResult, pos) then + getUnapplySelectors(getTp) + else if unapplyResult.derivesFrom(defn.BooleanClass) then + Nil + else if unapplyResult.derivesFrom(defn.NonEmptyTupleClass) then + unapplyResult.tupleElementTypes.getOrElse(Nil) + else if defn.isProductSubType(unapplyResult) && productArity(unapplyResult, pos) != 0 then + productSelectorTypes(unapplyResult, pos) + // this will cause a "wrong number of arguments in pattern" error later on, + // which is better than the message in `fail`. + else fail + + /** The typed pattens of this unapply */ + def typedPatterns(qual: untpd.Tree, typer: Typer): List[Tree] = + unapp.println(i"unapplyQual = $qual, unapplyArgs = ${unapplyResult} with $argTypes / $args") + for argType <- argTypes do + assert(!isBounds(argType), unapplyResult.show) + val alignedArgs = argTypes match + case argType :: Nil + if args.lengthCompare(1) > 0 + && Feature.autoTuplingEnabled + && defn.isTupleNType(argType) => + untpd.Tuple(args) :: Nil + case _ => + args + val alignedArgTypes = + if argTypes.length == alignedArgs.length then + argTypes + else + report.error(UnapplyInvalidNumberOfArguments(qual, argTypes), pos) + argTypes.take(args.length) ++ + List.fill(argTypes.length - args.length)(WildcardType) + alignedArgs.lazyZip(alignedArgTypes).map(typer.typed(_, _)) + .showing(i"unapply patterns = $result", unapp) + + end UnapplyArgs def wrapDefs(defs: mutable.ListBuffer[Tree] | Null, tree: Tree)(using Context): Tree = if (defs != null && defs.nonEmpty) tpd.Block(defs.toList, tree) else tree @@ -1549,25 +1608,9 @@ trait Applications extends Compatibility { typedExpr(untpd.TypedSplice(Apply(unapplyFn, dummyArg :: Nil))) inlinedUnapplyFnAndApp(dummyArg, unapplyAppCall) - var argTypes = unapplyArgs(unapplyApp.tpe, unapplyFn, args, tree.srcPos) - for (argType <- argTypes) assert(!isBounds(argType), unapplyApp.tpe.show) - val bunchedArgs = argTypes match { - case argType :: Nil => - if args.lengthCompare(1) > 0 - && Feature.autoTuplingEnabled - && defn.isTupleNType(argType) - then untpd.Tuple(args) :: Nil - else args - case _ => args - } - if (argTypes.length != bunchedArgs.length) { - report.error(UnapplyInvalidNumberOfArguments(qual, argTypes), tree.srcPos) - argTypes = argTypes.take(args.length) ++ - List.fill(argTypes.length - args.length)(WildcardType) - } - val unapplyPatterns = bunchedArgs.lazyZip(argTypes) map (typed(_, _)) + val unapplyPatterns = UnapplyArgs(unapplyApp.tpe, unapplyFn, unadaptedArgs, tree.srcPos) + .typedPatterns(qual, this) val result = assignType(cpy.UnApply(tree)(newUnapplyFn, unapplyImplicits(dummyArg, unapplyApp), unapplyPatterns), ownType) - unapp.println(s"unapply patterns = $unapplyPatterns") if (ownType.stripped eq selType.stripped) || ownType.isError then result else tryWithTypeTest(Typed(result, TypeTree(ownType)), selType) case tp => diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 3c74e9f4ed90..662a4feb867a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -29,7 +29,7 @@ import config.Printers.{typr, patmatch} import NameKinds.DefaultGetterName import NameOps.* import SymDenotations.{NoCompleter, NoDenotation} -import Applications.unapplyArgs +import Applications.UnapplyArgs import Inferencing.isFullyDefined import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotePattern} import transform.ValueClasses.underlyingOfValueClass @@ -982,7 +982,7 @@ trait Checking { case UnApply(fn, implicits, pats) => check(pat, pt) && (isIrrefutable(fn, pats.length) || fail(pat, pt, Reason.RefutableExtractor)) && { - val argPts = unapplyArgs(fn.tpe.widen.finalResultType, fn, pats, pat.srcPos) + val argPts = UnapplyArgs(fn.tpe.widen.finalResultType, fn, pats, pat.srcPos).argTypes pats.corresponds(argPts)(recur) } case Alternative(pats) => diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 5dda242df4ff..038693d3d7e1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -113,6 +113,31 @@ object Typer { def rememberSearchFailure(tree: tpd.Tree, fail: SearchFailure) = tree.putAttachment(HiddenSearchFailure, fail :: tree.attachmentOrElse(HiddenSearchFailure, Nil)) + + def tryEither[T](op: Context ?=> T)(fallBack: (T, TyperState) => T)(using Context): T = { + val nestedCtx = ctx.fresh.setNewTyperState() + val result = op(using nestedCtx) + if (nestedCtx.reporter.hasErrors && !nestedCtx.reporter.hasStickyErrors) { + record("tryEither.fallBack") + fallBack(result, nestedCtx.typerState) + } + else { + record("tryEither.commit") + nestedCtx.typerState.commit() + result + } + } + + /** Try `op1`, if there are errors, try `op2`, if `op2` also causes errors, fall back + * to errors and result of `op1`. + */ + def tryAlternatively[T](op1: Context ?=> T)(op2: Context ?=> T)(using Context): T = + tryEither(op1) { (failedVal, failedState) => + tryEither(op2) { (_, _) => + failedState.commit() + failedVal + } + } } /** Typecheck trees, the main entry point is `typed`. * @@ -3441,31 +3466,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedPattern(tree: untpd.Tree, selType: Type = WildcardType)(using Context): Tree = withMode(Mode.Pattern)(typed(tree, selType)) - def tryEither[T](op: Context ?=> T)(fallBack: (T, TyperState) => T)(using Context): T = { - val nestedCtx = ctx.fresh.setNewTyperState() - val result = op(using nestedCtx) - if (nestedCtx.reporter.hasErrors && !nestedCtx.reporter.hasStickyErrors) { - record("tryEither.fallBack") - fallBack(result, nestedCtx.typerState) - } - else { - record("tryEither.commit") - nestedCtx.typerState.commit() - result - } - } - - /** Try `op1`, if there are errors, try `op2`, if `op2` also causes errors, fall back - * to errors and result of `op1`. - */ - def tryAlternatively[T](op1: Context ?=> T)(op2: Context ?=> T)(using Context): T = - tryEither(op1) { (failedVal, failedState) => - tryEither(op2) { (_, _) => - failedState.commit() - failedVal - } - } - /** Is `pt` a prototype of an `apply` selection, or a parameterless function yielding one? */ def isApplyProto(pt: Type)(using Context): Boolean = pt.revealIgnored match { case pt: SelectionProto => pt.name == nme.apply diff --git a/tests/run/named-patterns.check b/tests/run/named-patterns.check index ba8dbb8b21f7..0b9f3661b7ba 100644 --- a/tests/run/named-patterns.check +++ b/tests/run/named-patterns.check @@ -3,6 +3,11 @@ name Bob age 22 age 22, name Bob Bob, 22 +name Bob, age 22 +name (Bob,22) +age (Bob,22) +age 22, name Bob +Bob, 22 1003 Lausanne, Rue de la Gare 44 1003 Lausanne Rue de la Gare in Lausanne diff --git a/tests/run/named-patterns.scala b/tests/run/named-patterns.scala index 1e7e0697e782..73753c855073 100644 --- a/tests/run/named-patterns.scala +++ b/tests/run/named-patterns.scala @@ -6,6 +6,10 @@ object Test1: object Person: def unapply(p: Person): (name: String, age: Int) = (p.name, p.age) + class Person2(val name: String, val age: Int) + object Person2: + def unapply(p: Person2): Option[(name: String, age: Int)] = Some((p.name, p.age)) + case class Address(city: String, zip: Int, street: String, number: Int) @main def Test = @@ -21,6 +25,18 @@ object Test1: bob match case Person(age, name) => println(s"$age, $name") + val bob2 = Person2("Bob", 22) + bob2 match + case Person2(name = n, age = a) => println(s"name $n, age $a") + bob2 match + case Person2(name = n) => println(s"name $n") + bob2 match + case Person2(age = a) => println(s"age $a") + bob2 match + case Person2(age = a, name = n) => println(s"age $a, name $n") + bob2 match + case Person2(age, name) => println(s"$age, $name") + val addr = Address("Lausanne", 1003, "Rue de la Gare", 44) addr match case Address(city = c, zip = z, street = s, number = n) => @@ -37,7 +53,3 @@ object Test1: addr match case Address(c, z, s, number) => println(s"$z $c, $s $number") - - - - From b44f15d477da2b71854b3036f016ac868e0a9eb4 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 14 Dec 2023 16:02:46 +0100 Subject: [PATCH 271/465] Another fix for named get patterns Also, add deep matches to tests --- .../dotty/tools/dotc/typer/Applications.scala | 7 ++++++- tests/run/named-patterns.check | 9 +++++++-- tests/run/named-patterns.scala | 19 +++++++++++++++++++ 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index f7fe5be7a1c7..76d057f15408 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -182,7 +182,12 @@ object Applications { ((_, _) => None) private def getUnapplySelectors(tp: Type)(using Context): List[Type] = - if args.length > 1 && !(tp.derivesFrom(defn.SeqClass)) then + // We treat patterns as product elements if + // they are named, or there is more than one pattern + val isProduct = args match + case x :: xs => x.isInstanceOf[untpd.NamedArg] || xs.nonEmpty + case _ => false + if isProduct && !tp.derivesFrom(defn.SeqClass) then productUnapplySelectors(tp).getOrElse: // There are unapplys with return types which have `get` and `_1, ..., _n` // as members, but which are not subtypes of Product. So `productUnapplySelectors` diff --git a/tests/run/named-patterns.check b/tests/run/named-patterns.check index 0b9f3661b7ba..9ccc08d67069 100644 --- a/tests/run/named-patterns.check +++ b/tests/run/named-patterns.check @@ -4,8 +4,8 @@ age 22 age 22, name Bob Bob, 22 name Bob, age 22 -name (Bob,22) -age (Bob,22) +name Bob +age 22 age 22, name Bob Bob, 22 1003 Lausanne, Rue de la Gare 44 @@ -13,3 +13,8 @@ Bob, 22 Rue de la Gare in Lausanne 1003 Lausanne, Rue de la Gare 44 1003 Lausanne, Rue de la Gare 44 +Bob, aged 22, in 1003 Lausanne, Rue de la Gare 44 +Bob in 1003 Lausanne +aged 22 in Rue de la Gare in Lausanne +Bob, aged 22 in 1003 Lausanne, Rue de la Gare 44 +Bob, aged 22 in 1003 Lausanne, Rue de la Gare 44 diff --git a/tests/run/named-patterns.scala b/tests/run/named-patterns.scala index 73753c855073..7c24dc8d683a 100644 --- a/tests/run/named-patterns.scala +++ b/tests/run/named-patterns.scala @@ -53,3 +53,22 @@ object Test1: addr match case Address(c, z, s, number) => println(s"$z $c, $s $number") + + type Person3 = (p: Person2, addr: Address) + + val p3 = (p = bob2, addr = addr) + p3 match + case (addr = Address(city = c, zip = z, street = s, number = n), p = Person2(name = nn, age = a)) => + println(s"$nn, aged $a, in $z $c, $s $n") + p3 match + case (p = Person2(name = nn), addr = Address(zip = z, city = c)) => + println(s"$nn in $z $c") + p3 match + case (p = Person2(age = a), addr = Address(city = c, street = s)) => + println(s"aged $a in $s in $c") + p3 match + case (Person2(age = a, name = nn), Address(number = n, street = s, zip = z, city = c)) => + println(s"$nn, aged $a in $z $c, $s $n") + p3 match + case (Person2(nn, a), Address(c, z, s, number)) => + println(s"$nn, aged $a in $z $c, $s $number") From 02aa578d26335ee1ce211339e33bf993a7c804ee Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 14 Dec 2023 18:51:03 +0100 Subject: [PATCH 272/465] Avoid widening into unreducible types when inferring types This is a general improvement, independent of named tuples. --- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 7 ++++++- tests/pos/named-tuple-widen.scala | 9 +++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 tests/pos/named-tuple-widen.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 012464f71d9b..e67e60dd45ea 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -385,7 +385,12 @@ object TypeOps: (tp.tp1.dealias, tp.tp2.dealias) match case (tp1 @ AppliedType(tycon1, args1), tp2 @ AppliedType(tycon2, args2)) if tycon1.typeSymbol == tycon2.typeSymbol && (tycon1 =:= tycon2) => - mergeRefinedOrApplied(tp1, tp2) + mergeRefinedOrApplied(tp1, tp2) match + case tp: AppliedType if tp.isUnreducibleWild => + // fall back to or-dominators rather tahn inferring a type that would + // caue an unreducible type error later. + approximateOr(tp1, tp2) + case tp => tp case (tp1, tp2) => approximateOr(tp1, tp2) case _ => diff --git a/tests/pos/named-tuple-widen.scala b/tests/pos/named-tuple-widen.scala new file mode 100644 index 000000000000..410832e04c17 --- /dev/null +++ b/tests/pos/named-tuple-widen.scala @@ -0,0 +1,9 @@ +import language.experimental.namedTuples + +class A +class B +val y1: (a1: A, b1: B) = ??? +val y2: (a2: A, b2: B) = ??? +var z1 = if ??? then y1 else y2 // -- what is the type of z2 +var z2: NamedTuple.AnyNamedTuple = z1 +val _ = z1 = z2 \ No newline at end of file From b7115e795afb0a5507044bdf2a50b4ad2e6ccb03 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 12 Jan 2024 14:59:15 +0100 Subject: [PATCH 273/465] Fix rebase breakage --- compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 8856bd10bf08..f22a17f7fd27 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -364,7 +364,6 @@ object PatternMatcher { val selectors = productSelectors(unappType).take(args.length) .map(ref(unappResult).select(_)) matchArgsPlan(selectors, args, onSuccess) - } else if isUnapplySeq && unapplySeqTypeElemTp(unappType.finalResultType).exists then unapplySeqPlan(unappResult, args) else if isUnapplySeq && isProductSeqMatch(unappType, args.length, unapp.srcPos) then From 5c9bb5f29b1da50779ac3700f3641b3c9a13fdc9 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 12 Jan 2024 18:40:19 +0100 Subject: [PATCH 274/465] Make NamedTuples work under new MatchType spec --- library/src/scala/NamedTuple.scala | 23 ++++++++++++------- tests/pos/named-tuples-strawman-2.scala | 21 ++++++++++------- .../stdlibExperimentalDefinitions.scala | 2 ++ 3 files changed, 30 insertions(+), 16 deletions(-) diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index d5334cc2773d..58e342e74864 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -15,6 +15,8 @@ object NamedTuple: extension [V <: Tuple](x: V) inline def withNames[N <: Tuple]: NamedTuple[N, V] = x + export NamedTupleDecomposition.{Names, DropNames} + extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) /** The underlying tuple without the names */ @@ -103,14 +105,6 @@ object NamedTuple: end extension - /** The names of a named tuple, represented as a tuple of literal string values. */ - type Names[X <: AnyNamedTuple] <: Tuple = X match - case NamedTuple[n, _] => n - - /** The value types of a named tuple represented as a regular tuple. */ - type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match - case NamedTuple[_, x] => x - /** The size of a named tuple, represented as a literal constant subtype of Int */ type Size[X <: AnyNamedTuple] = Tuple.Size[DropNames[X]] @@ -182,3 +176,16 @@ object NamedTuple: NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] end NamedTuple + +@experimental +/** Separate from NamedTuple object so that we can match on the opaque type NamedTuple. */ +object NamedTupleDecomposition: + import NamedTuple.* + + /** The names of a named tuple, represented as a tuple of literal string values. */ + type Names[X <: AnyNamedTuple] <: Tuple = X match + case NamedTuple[n, _] => n + + /** The value types of a named tuple represented as a regular tuple. */ + type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[_, x] => x diff --git a/tests/pos/named-tuples-strawman-2.scala b/tests/pos/named-tuples-strawman-2.scala index 7c18d063bcf4..709f537f8114 100644 --- a/tests/pos/named-tuples-strawman-2.scala +++ b/tests/pos/named-tuples-strawman-2.scala @@ -62,6 +62,17 @@ object TupleOps: inline def concatDistinct[X <: Tuple, Y <: Tuple](xs: X, ys: Y): ConcatDistinct[X, Y] = (xs ++ ys.filter[Y, [Elem] =>> ![Contains[X, Elem]]]).asInstanceOf[ConcatDistinct[X, Y]] +object NamedTupleDecomposition: + import NamedTupleOps.* + + /** The names of the named tuple type `NT` */ + type Names[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[n, _] => n + + /** The value types of the named tuple type `NT` */ + type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[_, x] => x + object NamedTupleOps: import TupleOps.* @@ -69,17 +80,11 @@ object NamedTupleOps: opaque type NamedTuple[N <: Tuple, +X <: Tuple] >: X <: AnyNamedTuple = X + export NamedTupleDecomposition.* + object NamedTuple: def apply[N <: Tuple, X <: Tuple](x: X): NamedTuple[N, X] = x - /** The names of the named tuple type `NT` */ - type Names[NT <: AnyNamedTuple] <: Tuple = NT match - case NamedTuple[n, _] => n - - /** The value types of the named tuple type `NT` */ - type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match - case NamedTuple[_, x] => x - extension [NT <: AnyNamedTuple](x: NT) inline def toTuple: DropNames[NT] = x.asInstanceOf inline def names: Names[NT] = constValueTuple[Names[NT]] diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 26cad0668b37..48ff5407ac87 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -77,6 +77,8 @@ val experimentalDefinitionInLibrary = Set( // New feature: named tuples "scala.NamedTuple", "scala.NamedTuple$", + "scala.NamedTupleDecomposition", + "scala.NamedTupleDecomposition$", ) From 47588307161a9e64163bbda3d70215dc72edc35a Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 12 Jan 2024 18:40:55 +0100 Subject: [PATCH 275/465] Avoid TypeError exception in RefinedPrinter --- compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 2873325aecb6..7e8bc24d1aee 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -244,7 +244,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def appliedText(tp: Type): Text = tp match case tp @ AppliedType(tycon, args) => - val namedElems = tp.namedTupleElementTypesUpTo(200, normalize = false) + val namedElems = + try tp.namedTupleElementTypesUpTo(200, normalize = false) + catch case ex: TypeError => Nil if namedElems.nonEmpty then toTextNamedTuple(namedElems) else tp.tupleElementTypesUpTo(200, normalize = false) match From 18f600de794e5a985fa77e59cb4833c51d3e3fed Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 12 Jan 2024 18:41:31 +0100 Subject: [PATCH 276/465] Move named-tuples-strawman.ccala to pending --- tests/{ => pending}/pos/named-tuples-strawman.scala | 1 + 1 file changed, 1 insertion(+) rename tests/{ => pending}/pos/named-tuples-strawman.scala (96%) diff --git a/tests/pos/named-tuples-strawman.scala b/tests/pending/pos/named-tuples-strawman.scala similarity index 96% rename from tests/pos/named-tuples-strawman.scala rename to tests/pending/pos/named-tuples-strawman.scala index 859e1d1448e2..35675d1bfc76 100644 --- a/tests/pos/named-tuples-strawman.scala +++ b/tests/pending/pos/named-tuples-strawman.scala @@ -1,3 +1,4 @@ +// Currently does not compile because of #19434 object Test: object Named: From 5513ed6b334a5c2b63d0fa67ad142c47257ff85e Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 12 Jan 2024 18:49:15 +0100 Subject: [PATCH 277/465] Update check file --- tests/neg/named-tuples-3.check | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/neg/named-tuples-3.check b/tests/neg/named-tuples-3.check index 131f6164748b..2091c36191c0 100644 --- a/tests/neg/named-tuples-3.check +++ b/tests/neg/named-tuples-3.check @@ -1,4 +1,7 @@ --- Error: tests/neg/named-tuples-3.scala:7:16 -------------------------------------------------------------------------- +-- [E007] Type Mismatch Error: tests/neg/named-tuples-3.scala:7:16 ----------------------------------------------------- 7 |val p: Person = f // error | ^ - | Malformed NamedTuple: names must be string types, but Int was found. + | Found: NamedTuple.NamedTuple[(Int, Any), (Int, String)] + | Required: Person + | + | longer explanation available when compiling with `-explain` From fb1541adaf80a796d9cd8cccd59673e49b287570 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 14 Jan 2024 11:00:44 +0100 Subject: [PATCH 278/465] Better printing of NamedTuple type trees Use the sugared representation, not the raw NamedTuple type tree. --- .../src/dotty/tools/dotc/printing/RefinedPrinter.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 7e8bc24d1aee..5e15fd2ddd15 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -572,7 +572,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(AndTypePrec) { toText(args(0)) ~ " & " ~ atPrec(AndTypePrec + 1) { toText(args(1)) } } else if defn.isFunctionSymbol(tpt.symbol) && tpt.isInstanceOf[TypeTree] && tree.hasType && !printDebug - then changePrec(GlobalPrec) { toText(tree.typeOpt) } + then + changePrec(GlobalPrec) { toText(tree.typeOpt) } + else if tpt.symbol == defn.NamedTupleTypeRef.symbol + && !printDebug && tree.typeOpt.exists + then + toText(tree.typeOpt) else args match case arg :: _ if arg.isTerm => toTextLocal(tpt) ~ "(" ~ Text(args.map(argText), ", ") ~ ")" From cf09b19a63be7354c2f3fa744c22ddef1e8fe0e2 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 14 Jan 2024 11:06:53 +0100 Subject: [PATCH 279/465] Add FieldsOf type --- .../dotty/tools/dotc/core/Definitions.scala | 6 ++- .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../src/dotty/tools/dotc/core/TypeEval.scala | 27 ++++++++++-- .../reference/experimental/named-tuples.md | 44 ++++++++++++++++++- library/src/scala/NamedTuple.scala | 4 +- tests/neg/fieldsOf.scala | 13 ++++++ tests/pos/fieldsOf.scala | 18 ++++++++ tests/run/fieldsOf.check | 17 +++++++ 8 files changed, 123 insertions(+), 7 deletions(-) create mode 100644 tests/neg/fieldsOf.scala create mode 100644 tests/pos/fieldsOf.scala create mode 100644 tests/run/fieldsOf.check diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 5868da397fc3..4b91cff4dda2 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1318,6 +1318,9 @@ class Definitions { final def isCompiletime_S(sym: Symbol)(using Context): Boolean = sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass + final def isNamedTuple_FieldsOf(sym: Symbol)(using Context): Boolean = + sym.name == tpnme.FieldsOf && sym.owner == NamedTupleModule.moduleClass + private val compiletimePackageAnyTypes: Set[Name] = Set( tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString ) @@ -1346,7 +1349,7 @@ class Definitions { tpnme.Plus, tpnme.Length, tpnme.Substring, tpnme.Matches, tpnme.CharAt ) private val compiletimePackageOpTypes: Set[Name] = - Set(tpnme.S) + Set(tpnme.S, tpnme.FieldsOf) ++ compiletimePackageAnyTypes ++ compiletimePackageIntTypes ++ compiletimePackageLongTypes @@ -1359,6 +1362,7 @@ class Definitions { compiletimePackageOpTypes.contains(sym.name) && ( isCompiletime_S(sym) + || isNamedTuple_FieldsOf(sym) || sym.owner == CompiletimeOpsAnyModuleClass && compiletimePackageAnyTypes.contains(sym.name) || sym.owner == CompiletimeOpsIntModuleClass && compiletimePackageIntTypes.contains(sym.name) || sym.owner == CompiletimeOpsLongModuleClass && compiletimePackageLongTypes.contains(sym.name) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index ebd246d20575..7ab6750f7a0b 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -361,6 +361,7 @@ object StdNames { val Eql: N = "Eql" val EnumValue: N = "EnumValue" val ExistentialTypeTree: N = "ExistentialTypeTree" + val FieldsOf: N = "FieldsOf" val Flag : N = "Flag" val Ident: N = "Ident" val Import: N = "Import" diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index b5684b07f181..643b83882648 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -6,11 +6,14 @@ import Types.*, Contexts.*, Symbols.*, Constants.*, Decorators.* import config.Printers.typr import reporting.trace import StdNames.tpnme +import Flags.CaseClass +import TypeOps.nestedPairs object TypeEval: def tryCompiletimeConstantFold(tp: AppliedType)(using Context): Type = tp.tycon match case tycon: TypeRef if defn.isCompiletimeAppliedType(tycon.symbol) => + extension (tp: Type) def fixForEvaluation: Type = tp.normalized.dealias match // enable operations for constant singleton terms. E.g.: @@ -94,6 +97,21 @@ object TypeEval: throw TypeError(em"${e.getMessage.nn}") ConstantType(Constant(result)) + def fieldsOf: Option[Type] = + expectArgsNum(1) + val arg = tp.args.head + val cls = arg.classSymbol + if cls.is(CaseClass) then + val fields = cls.caseAccessors + val fieldLabels = fields.map: field => + ConstantType(Constant(field.name.toString)) + val fieldTypes = fields.map(arg.memberInfo) + Some: + defn.NamedTupleTypeRef.appliedTo: + nestedPairs(fieldLabels) :: nestedPairs(fieldTypes) :: Nil + else + None + def constantFold1[T](extractor: Type => Option[T], op: T => Any): Option[Type] = expectArgsNum(1) extractor(tp.args.head).map(a => runConstantOp(op(a))) @@ -122,11 +140,14 @@ object TypeEval: yield runConstantOp(op(a, b, c)) trace(i"compiletime constant fold $tp", typr, show = true) { - val name = tycon.symbol.name - val owner = tycon.symbol.owner + val sym = tycon.symbol + val name = sym.name + val owner = sym.owner val constantType = - if defn.isCompiletime_S(tycon.symbol) then + if defn.isCompiletime_S(sym) then constantFold1(natValue, _ + 1) + else if defn.isNamedTuple_FieldsOf(sym) then + fieldsOf else if owner == defn.CompiletimeOpsAnyModuleClass then name match case tpnme.Equals => constantFold2(constValue, _ == _) case tpnme.NotEquals => constantFold2(constValue, _ != _) diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md index f9ba87382e32..7dd7049d1126 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -100,6 +100,24 @@ The translation of named tuples to instances of `NamedTuple` is fixed by the spe - All tuple operations also work with named tuples "out of the box". - Macro libraries can rely on this expansion. +### The FieldsOf Type + +The `NamedTuple` object contains a type definition +```scala + type FieldsOf[T] <: AnyNamedTuple +``` +`FieldsOf` is treated specially by the compiler. When `FieldsOf` is applied to +an argument type that is an instance of a case class, the type expands to the named +tuple consisting of all the fields of that case class. Here, fields means: elements of the first parameter section. For instance, assuming +```scala +case class City(zip: Int, name: String, population: Int) +``` +then `FieldsOf[City]` is the named tuple +```scala +(zip: Int, name: String, population: Int) +``` +The same works for enum cases expanding to case classes. + ### Restrictions The following restrictions apply to named tuple elements: @@ -130,7 +148,29 @@ SimpleExpr ::= ... | '(' NamedExprInParens {‘,’ NamedExprInParens} ')' NamedExprInParens ::= id '=' ExprInParens -SimplePattern ::= ... - | '(' NamedPattern {‘,’ NamedPattern} ')' +Patterns ::= Pattern {‘,’ Pattern} + | NamedPattern {‘,’ NamedPattern} NamedPattern ::= id '=' Pattern ``` + +### Named Pattern Matching + +We allow named patterns not just for named tuples but also for case classes. +For instance: +```scala +city match + case c @ City(name = "London") => println(p.population) + case City(name = n, zip = 1026, population = pop) => println(pop) +``` + +Named constructor patterns are analogous to named tuple patterns. In both cases + + - either all fields are named or none is, + - every name must match the name some field of the selector, + - names can come in any order, + - not all fields of the selector need to be matched. + +This revives SIP 43, with a much simpler desugaring than originally proposed. +Named patterns are compatible with extensible pattern matching simply because +`unapply` results can be named tuples. + diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index 58e342e74864..f6f3087f79d7 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -175,10 +175,12 @@ object NamedTuple: case true => NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] + type FieldsOf[T] <: AnyNamedTuple + end NamedTuple -@experimental /** Separate from NamedTuple object so that we can match on the opaque type NamedTuple. */ +@experimental object NamedTupleDecomposition: import NamedTuple.* diff --git a/tests/neg/fieldsOf.scala b/tests/neg/fieldsOf.scala new file mode 100644 index 000000000000..2c88b18f977d --- /dev/null +++ b/tests/neg/fieldsOf.scala @@ -0,0 +1,13 @@ +import NamedTuple.FieldsOf + +case class Person(name: String, age: Int) +class Anon(name: String, age: Int) +def foo[T](): FieldsOf[T] = ??? + +def test = + var x: FieldsOf[Person] = ??? + x = foo[Person]() // ok + x = foo[Anon]() // error + x = foo() // error + + diff --git a/tests/pos/fieldsOf.scala b/tests/pos/fieldsOf.scala new file mode 100644 index 000000000000..3ce14f36cf28 --- /dev/null +++ b/tests/pos/fieldsOf.scala @@ -0,0 +1,18 @@ +import NamedTuple.FieldsOf + +case class Person(name: String, age: Int) + +type PF = FieldsOf[Person] + +def foo[T]: FieldsOf[T] = ??? + +class Anon(name: String, age: Int) + +def test = + var x: FieldsOf[Person] = ??? + val y: (name: String, age: Int) = x + x = y + x = foo[Person] + //x = foo[Anon] // error + + diff --git a/tests/run/fieldsOf.check b/tests/run/fieldsOf.check new file mode 100644 index 000000000000..beb79c056527 --- /dev/null +++ b/tests/run/fieldsOf.check @@ -0,0 +1,17 @@ +-- [E007] Type Mismatch Error: ../neg/fieldsOf.scala:10:15 --------------------- +10 | x = foo[Anon]() // error + | ^^^^^^^^^^^ + | Found: NamedTuple.FieldsOf[Anon] + | Required: (name : String, age : Int) + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: ../neg/fieldsOf.scala:11:9 ---------------------- +11 | x = foo() // error + | ^^^^^ + | Found: NamedTuple.FieldsOf[T] + | Required: (name : String, age : Int) + | + | where: T is a type variable + | + | longer explanation available when compiling with `-explain` +2 errors found From 4baa5094a943236d001c6fbad6abb208f3bbb2d2 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 14 Jan 2024 11:08:49 +0100 Subject: [PATCH 280/465] Describe and add tests for source incompabilities --- .../reference/experimental/named-tuples.md | 21 +++++++++++++++++++ tests/pos/namedtuple-src-incompat.scala | 17 +++++++++++++++ 2 files changed, 38 insertions(+) create mode 100644 tests/pos/namedtuple-src-incompat.scala diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md index 7dd7049d1126..ea0996435213 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -174,3 +174,24 @@ This revives SIP 43, with a much simpler desugaring than originally proposed. Named patterns are compatible with extensible pattern matching simply because `unapply` results can be named tuples. +### Source Incompatibilities + +There are some source incompatibilities involving named tuples of length one. +First, what was previously classified as an assignment could now be interpreted as a named tuple. Example: +```scala +var age: Int +(age = 1) +``` +This was an assignment in parentheses before, and is a named tuple of arity one now. It is however not idiomatic Scala code, since assignments are not usually enclosed in parentheses. + +Second, what was a named argument to an infix operator can now be interpreted as a named tuple. +```scala +class C: + infix def f(age: Int) +val c: C +``` +then +```scala +c f (age = 1) +``` +will now construct a tuple as second operand instead of passing a named parameter. diff --git a/tests/pos/namedtuple-src-incompat.scala b/tests/pos/namedtuple-src-incompat.scala new file mode 100644 index 000000000000..57451a4321b7 --- /dev/null +++ b/tests/pos/namedtuple-src-incompat.scala @@ -0,0 +1,17 @@ +import language.experimental.namedTuples +var age = 22 +val x = (age = 1) +val _: (age: Int) = x +val x2 = {age = 1} +val _: Unit = x2 + +class C: + infix def id[T](age: T): T = age + +def test = + val c: C = ??? + val y = c id (age = 1) + val _: (age: Int) = y + val y2 = c.id(age = 1) + val _: Int = y2 + From 702dcd5e4267046268439201a7382c0ffa40669c Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 14 Jan 2024 17:21:51 +0100 Subject: [PATCH 281/465] Rename NamedTuple.FieldsOf --> NamedTuple.From --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 8 ++++---- compiler/src/dotty/tools/dotc/core/StdNames.scala | 2 +- compiler/src/dotty/tools/dotc/core/TypeEval.scala | 2 +- docs/_docs/reference/experimental/named-tuples.md | 8 ++++---- library/src/scala/NamedTuple.scala | 2 +- tests/neg/fieldsOf.scala | 6 ++---- tests/pos/fieldsOf.scala | 8 +++----- 7 files changed, 16 insertions(+), 20 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 4b91cff4dda2..15880207b3c8 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1318,8 +1318,8 @@ class Definitions { final def isCompiletime_S(sym: Symbol)(using Context): Boolean = sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass - final def isNamedTuple_FieldsOf(sym: Symbol)(using Context): Boolean = - sym.name == tpnme.FieldsOf && sym.owner == NamedTupleModule.moduleClass + final def isNamedTuple_From(sym: Symbol)(using Context): Boolean = + sym.name == tpnme.From && sym.owner == NamedTupleModule.moduleClass private val compiletimePackageAnyTypes: Set[Name] = Set( tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString @@ -1349,7 +1349,7 @@ class Definitions { tpnme.Plus, tpnme.Length, tpnme.Substring, tpnme.Matches, tpnme.CharAt ) private val compiletimePackageOpTypes: Set[Name] = - Set(tpnme.S, tpnme.FieldsOf) + Set(tpnme.S, tpnme.From) ++ compiletimePackageAnyTypes ++ compiletimePackageIntTypes ++ compiletimePackageLongTypes @@ -1362,7 +1362,7 @@ class Definitions { compiletimePackageOpTypes.contains(sym.name) && ( isCompiletime_S(sym) - || isNamedTuple_FieldsOf(sym) + || isNamedTuple_From(sym) || sym.owner == CompiletimeOpsAnyModuleClass && compiletimePackageAnyTypes.contains(sym.name) || sym.owner == CompiletimeOpsIntModuleClass && compiletimePackageIntTypes.contains(sym.name) || sym.owner == CompiletimeOpsLongModuleClass && compiletimePackageLongTypes.contains(sym.name) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 7ab6750f7a0b..38240d03864c 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -361,8 +361,8 @@ object StdNames { val Eql: N = "Eql" val EnumValue: N = "EnumValue" val ExistentialTypeTree: N = "ExistentialTypeTree" - val FieldsOf: N = "FieldsOf" val Flag : N = "Flag" + val From: N = "From" val Ident: N = "Ident" val Import: N = "Import" val Literal: N = "Literal" diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index 643b83882648..b16a89a1aeb4 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -146,7 +146,7 @@ object TypeEval: val constantType = if defn.isCompiletime_S(sym) then constantFold1(natValue, _ + 1) - else if defn.isNamedTuple_FieldsOf(sym) then + else if defn.isNamedTuple_From(sym) then fieldsOf else if owner == defn.CompiletimeOpsAnyModuleClass then name match case tpnme.Equals => constantFold2(constValue, _ == _) diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md index ea0996435213..6ee8bc9bcdec 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -100,19 +100,19 @@ The translation of named tuples to instances of `NamedTuple` is fixed by the spe - All tuple operations also work with named tuples "out of the box". - Macro libraries can rely on this expansion. -### The FieldsOf Type +### The NamedTuple.From Type The `NamedTuple` object contains a type definition ```scala - type FieldsOf[T] <: AnyNamedTuple + type From[T] <: AnyNamedTuple ``` -`FieldsOf` is treated specially by the compiler. When `FieldsOf` is applied to +`From` is treated specially by the compiler. When `NamedTuple.From` is applied to an argument type that is an instance of a case class, the type expands to the named tuple consisting of all the fields of that case class. Here, fields means: elements of the first parameter section. For instance, assuming ```scala case class City(zip: Int, name: String, population: Int) ``` -then `FieldsOf[City]` is the named tuple +then `NamedTuple.From[City]` is the named tuple ```scala (zip: Int, name: String, population: Int) ``` diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index f6f3087f79d7..b06bc599f9fd 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -175,7 +175,7 @@ object NamedTuple: case true => NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] - type FieldsOf[T] <: AnyNamedTuple + type From[T] <: AnyNamedTuple end NamedTuple diff --git a/tests/neg/fieldsOf.scala b/tests/neg/fieldsOf.scala index 2c88b18f977d..d3539070b556 100644 --- a/tests/neg/fieldsOf.scala +++ b/tests/neg/fieldsOf.scala @@ -1,11 +1,9 @@ -import NamedTuple.FieldsOf - case class Person(name: String, age: Int) class Anon(name: String, age: Int) -def foo[T](): FieldsOf[T] = ??? +def foo[T](): NamedTuple.From[T] = ??? def test = - var x: FieldsOf[Person] = ??? + var x: NamedTuple.From[Person] = ??? x = foo[Person]() // ok x = foo[Anon]() // error x = foo() // error diff --git a/tests/pos/fieldsOf.scala b/tests/pos/fieldsOf.scala index 3ce14f36cf28..08f20a1f7e8e 100644 --- a/tests/pos/fieldsOf.scala +++ b/tests/pos/fieldsOf.scala @@ -1,15 +1,13 @@ -import NamedTuple.FieldsOf - case class Person(name: String, age: Int) -type PF = FieldsOf[Person] +type PF = NamedTuple.From[Person] -def foo[T]: FieldsOf[T] = ??? +def foo[T]: NamedTuple.From[T] = ??? class Anon(name: String, age: Int) def test = - var x: FieldsOf[Person] = ??? + var x: NamedTuple.From[Person] = ??? val y: (name: String, age: Int) = x x = y x = foo[Person] From 1e31d16dec1ee6d17c099336eed814562498e0bb Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 5 Feb 2024 18:16:09 +0100 Subject: [PATCH 282/465] Implement Fields as a Selectable type member Subclasses of Selectable can instantiate Fields to a named tuple type that provides possible selection names and their types on instances of the type. See: https://contributors.scala-lang.org/t/expanding-changing-selectable-based-on-upcoming-named-tuples-feature/6395/5 --- compiler/src/dotty/tools/dotc/core/StdNames.scala | 1 + compiler/src/dotty/tools/dotc/typer/Typer.scala | 13 +++++++++++++ library/src/scala/Selectable.scala | 3 ++- 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 38240d03864c..dd8e23a0030f 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -362,6 +362,7 @@ object StdNames { val EnumValue: N = "EnumValue" val ExistentialTypeTree: N = "ExistentialTypeTree" val Flag : N = "Flag" + val Fields: N = "Fields" val From: N = "From" val Ident: N = "Ident" val Import: N = "Import" diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 038693d3d7e1..58f08768d9bf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -798,6 +798,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else typedDynamicSelect(tree2, Nil, pt) else + if qual.tpe.derivesFrom(defn.SelectableClass) + && selName.isTermName && !isDynamicExpansion(tree) + && !pt.isInstanceOf[FunOrPolyProto] && pt != LhsProto + then + val fieldsType = qual.tpe.select(tpnme.Fields).dealias.simplified + val fields = fieldsType.namedTupleElementTypes + typr.println(i"try dyn select $qual, $selName, $fields") + fields.find(_._1 == selName) match + case Some((fieldName, fieldType)) => + val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) + val sel = typedDynamicSelect(tree2, Nil, pt) + return sel.cast(fieldType) + case _ => assignType(tree, rawType match case rawType: NamedType => diff --git a/library/src/scala/Selectable.scala b/library/src/scala/Selectable.scala index 74004a350679..93c799dd124b 100644 --- a/library/src/scala/Selectable.scala +++ b/library/src/scala/Selectable.scala @@ -22,7 +22,8 @@ package scala * In this case the call will synthesize `Class` arguments for the erasure of * all formal parameter types of the method in the structural type. */ -trait Selectable extends Any +trait Selectable extends Any: + type Fields // TODO: add <: NamedTyple.AnyNamedTuple when NamedTuple is no longer experimental object Selectable: /* Scala 2 compat + allowing for cross-compilation: From 22e6c89282ad64d458a467f115a1a77ae5194854 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 5 Feb 2024 18:52:15 +0100 Subject: [PATCH 283/465] Refactor typedSelect I usually try to avoid explicit returns, but here they do make the code easier to read. --- .../src/dotty/tools/dotc/typer/Typer.scala | 176 ++++++++++-------- 1 file changed, 99 insertions(+), 77 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 58f08768d9bf..52908a4cf9bc 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -735,88 +735,110 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkLegalValue(select, pt) ConstFold(select) + // If regular selection is typeable, we are done if checkedType.exists then - finish(tree, qual, checkedType) - else if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then - // Simplify `m.apply(...)` to `m(...)` - qual - else if couldInstantiateTypeVar(qual.tpe.widen) then + return finish(tree, qual, checkedType) + + // Otherwise, simplify `m.apply(...)` to `m(...)` + if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then + return qual + + // Otherwise, if there's a simply visible type variable in the result, try again + // with a more defined qualifier type. There's a second trial where we try to instantiate + // all type variables in `qual.tpe.widen`, but that is done only after we search for + // extension methods or conversions. + if couldInstantiateTypeVar(qual.tpe.widen) then // there's a simply visible type variable in the result; try again with a more defined qualifier type // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, // but that is done only after we search for extension methods or conversions. - typedSelect(tree, pt, qual) - else - val namedTupleElems = qual.tpe.widen.namedTupleElementTypes - val nameIdx = namedTupleElems.indexWhere(_._1 == selName) - if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then - typed( - untpd.Apply( - untpd.Select(untpd.TypedSplice(qual), nme.apply), - untpd.Literal(Constant(nameIdx))), - pt) - else if qual.tpe.isSmallGenericTuple then - val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) - typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) + return typedSelect(tree, pt, qual) + + // Otherwise, try to expand a named tuple selection + val namedTupleElems = qual.tpe.widen.namedTupleElementTypes + val nameIdx = namedTupleElems.indexWhere(_._1 == selName) + if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then + return typed( + untpd.Apply( + untpd.Select(untpd.TypedSplice(qual), nme.apply), + untpd.Literal(Constant(nameIdx))), + pt) + + // Otherwise, map combinations of A *: B *: .... EmptyTuple with nesting levels <= 22 + // to the Tuple class of the right arity and select from that one + if qual.tpe.isSmallGenericTuple then + val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) + return typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) + + // Otherwise try an extension or conversion + if selName.isTermName then + val tree1 = tryExtensionOrConversion( + tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) + if !tree1.isEmpty then + return tree1 + + // Otherwise, try a GADT approximation if we're trying to select a member + // Member lookup cannot take GADTs into account b/c of cache, so we + // approximate types based on GADT constraints instead. For an example, + // see MemberHealing in gadt-approximation-interaction.scala. + if ctx.gadt.isNarrowing then + val wtp = qual.tpe.widen + gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") + val gadtApprox = Inferencing.approximateGADT(wtp) + gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") + val qual1 = qual.cast(gadtApprox) + val tree1 = cpy.Select(tree0)(qual1, selName) + val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) + if checkedType1.exists then + gadts.println(i"Member selection healed by GADT approximation") + return finish(tree1, qual1, checkedType1) + + if qual1.tpe.isSmallGenericTuple then + gadts.println(i"Tuple member selection healed by GADT approximation") + return typedSelect(tree, pt, qual1) + + val tree2 = tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) + if !tree2.isEmpty then + return tree2 + + // Otherwise, if there are uninstantiated type variables in the qualifier type, + // instantiate them and try again + if canDefineFurther(qual.tpe.widen) then + return typedSelect(tree, pt, qual) + + def dynamicSelect = + val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) + if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then + assignType(tree2, TryDynamicCallType) else - val tree1 = - if selName.isTypeName then EmptyTree - else tryExtensionOrConversion( - tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) - .orElse { - if ctx.gadt.isNarrowing then - // try GADT approximation if we're trying to select a member - // Member lookup cannot take GADTs into account b/c of cache, so we - // approximate types based on GADT constraints instead. For an example, - // see MemberHealing in gadt-approximation-interaction.scala. - val wtp = qual.tpe.widen - gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") - val gadtApprox = Inferencing.approximateGADT(wtp) - gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") - val qual1 = qual.cast(gadtApprox) - val tree1 = cpy.Select(tree0)(qual1, selName) - val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) - if checkedType1.exists then - gadts.println(i"Member selection healed by GADT approximation") - finish(tree1, qual1, checkedType1) - else if qual1.tpe.isSmallGenericTuple then - gadts.println(i"Tuple member selection healed by GADT approximation") - typedSelect(tree, pt, qual1) - else - tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) - else EmptyTree - } - if !tree1.isEmpty then - tree1 - else if canDefineFurther(qual.tpe.widen) then - typedSelect(tree, pt, qual) - else if qual.tpe.derivesFrom(defn.DynamicClass) - && selName.isTermName && !isDynamicExpansion(tree) - then - val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) - if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then - assignType(tree2, TryDynamicCallType) - else - typedDynamicSelect(tree2, Nil, pt) - else - if qual.tpe.derivesFrom(defn.SelectableClass) - && selName.isTermName && !isDynamicExpansion(tree) - && !pt.isInstanceOf[FunOrPolyProto] && pt != LhsProto - then - val fieldsType = qual.tpe.select(tpnme.Fields).dealias.simplified - val fields = fieldsType.namedTupleElementTypes - typr.println(i"try dyn select $qual, $selName, $fields") - fields.find(_._1 == selName) match - case Some((fieldName, fieldType)) => - val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) - val sel = typedDynamicSelect(tree2, Nil, pt) - return sel.cast(fieldType) - case _ => - assignType(tree, - rawType match - case rawType: NamedType => - inaccessibleErrorType(rawType, superAccess, tree.srcPos) - case _ => - notAMemberErrorType(tree, qual, pt)) + typedDynamicSelect(tree2, Nil, pt) + + // Otherwise, if the qualifier derives from class Dynamic, expand to a + // dynamic dispatch using selectDynamic or applyDynamic + if qual.tpe.derivesFrom(defn.DynamicClass) && selName.isTermName && !isDynamicExpansion(tree) then + return dynamicSelect + + // Otherwise, if the qualifier derives from class Selectable, + // and the selector name matches one of the element of the `Fields` type member, + // and the selector is neither applied nor assigned to, + // expand to a typed dynamic dispatch using selectDynamic wrapped in a cast + if qual.tpe.derivesFrom(defn.SelectableClass) && !isDynamicExpansion(tree) + && !pt.isInstanceOf[FunOrPolyProto] && pt != LhsProto + then + val fieldsType = qual.tpe.select(tpnme.Fields).dealias.simplified + val fields = fieldsType.namedTupleElementTypes + typr.println(i"try dyn select $qual, $selName, $fields") + fields.find(_._1 == selName) match + case Some((_, fieldType)) => + return dynamicSelect.cast(fieldType) + case _ => + + // Otherwise, report an error + assignType(tree, + rawType match + case rawType: NamedType => + inaccessibleErrorType(rawType, superAccess, tree.srcPos) + case _ => + notAMemberErrorType(tree, qual, pt)) end typedSelect def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { From d0888f61e830f517fb81ddddef68eb4aca088aca Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 5 Feb 2024 19:17:54 +0100 Subject: [PATCH 284/465] Add section on computable field names to reference doc page --- .../reference/experimental/named-tuples.md | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md index 6ee8bc9bcdec..95bcfb1d10fe 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -195,3 +195,44 @@ then c f (age = 1) ``` will now construct a tuple as second operand instead of passing a named parameter. + +### Computed Field Names + +The `Selectable` trait now has a `Fields` type member that can be instantiated +to a named tuple. + +```scala +trait Selectable: + type Fields <: NamedTuple.AnyNamedTuple +``` + +If `Fields` is instantiated in a subclass of `Selectable` to some named tuple type, +then the available fields and their types will be defined by that type. Assume `n: T` +is an element of the `Fields` type in some class `C` that implements `Selectable`, +that `c: C`, and that `n` is not otherwise legal as a name of a selection on `c`. +Then `c.n` is a legal selection, which expands to `c.selectDynamic("n").asInstanceOf[T]`. + +It is the task of the implementation of `selectDynamic` in `C` to ensure that its +computed result conforms to the predicted type `T` + +As an example, assume we have a query type `Q[T]` defined as follows: + +```scala +trait Q[T] extends Selectable: + type Fields = NamedTuple.Map[NamedTuple.From[T], Q] + def selectDynamic(fieldName: String) = ... +``` + +Assume in the user domain: +```scala +case class City(zipCode: Int, name: String, population: Int) +val city: Q[City] +``` +Then +```scala +city.zipCode +``` +has type `Q[Int]` and it expands to +```scala +city.selectDynamic("zipCode").asInstanceOf[Q[Int]] +``` From 69964b0f02c3f86c98f3a0c1ed3ba180499b4af9 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 6 Feb 2024 16:35:49 +0100 Subject: [PATCH 285/465] Fix fields as a selectable type member The selectDynamic call could already have influenced type variables in the expected type before we wrap it in a cast. Need to pass in the right expected type to the typedDynamicSelect. --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 52908a4cf9bc..e771df49af94 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -805,7 +805,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if canDefineFurther(qual.tpe.widen) then return typedSelect(tree, pt, qual) - def dynamicSelect = + def dynamicSelect(pt: Type) = val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then assignType(tree2, TryDynamicCallType) @@ -815,7 +815,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Otherwise, if the qualifier derives from class Dynamic, expand to a // dynamic dispatch using selectDynamic or applyDynamic if qual.tpe.derivesFrom(defn.DynamicClass) && selName.isTermName && !isDynamicExpansion(tree) then - return dynamicSelect + return dynamicSelect(pt) // Otherwise, if the qualifier derives from class Selectable, // and the selector name matches one of the element of the `Fields` type member, @@ -829,7 +829,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typr.println(i"try dyn select $qual, $selName, $fields") fields.find(_._1 == selName) match case Some((_, fieldType)) => - return dynamicSelect.cast(fieldType) + return dynamicSelect(fieldType).ensureConforms(fieldType) case _ => // Otherwise, report an error From a3409e0b884b3ae9ed7209c225f6de34a436a3af Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 6 Feb 2024 16:36:59 +0100 Subject: [PATCH 286/465] Make NamedTuple.From work for named tuple arguments NamedTyple.From should be the identity for named tuple arguments --- compiler/src/dotty/tools/dotc/core/TypeEval.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index b16a89a1aeb4..af4f1e0153dd 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -109,8 +109,9 @@ object TypeEval: Some: defn.NamedTupleTypeRef.appliedTo: nestedPairs(fieldLabels) :: nestedPairs(fieldTypes) :: Nil - else - None + else arg.widenDealias match + case arg @ defn.NamedTuple(_, _) => Some(arg) + case _ => None def constantFold1[T](extractor: Type => Option[T], op: T => Any): Option[Type] = expectArgsNum(1) From 111674c3f209a988efff4782452cc4ea16378ddd Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 7 Feb 2024 14:07:44 +0100 Subject: [PATCH 287/465] Fix NamedArg term/type classification --- compiler/src/dotty/tools/dotc/ast/Trees.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 41899ed661f5..f45415b9ce0b 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -555,6 +555,8 @@ object Trees { case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { type ThisTree[+T <: Untyped] = NamedArg[T] + override def isTerm = arg.isTerm + override def isType = arg.isType } /** name = arg, outside a parameter list */ From 1fd5962f2c1bb2a1eb6c8fb5e0da0156a780f8dd Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 10 Feb 2024 12:47:43 +0100 Subject: [PATCH 288/465] Rename inMode to withModeBits inMode is useful, but its name is can easily be misinterpreted. Hopefully, withModeBits is better. Fixes rebase breakage where inMode was made private, but needs to be public since it is now used in Desugar. --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 2 +- compiler/src/dotty/tools/dotc/core/Contexts.scala | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 39a30a185c72..9be2f9ea64a4 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1493,7 +1493,7 @@ object desugar { if names.isEmpty || ctx.mode.is(Mode.Pattern) then tup else - def namesTuple = inMode(ctx.mode &~ Mode.Pattern | Mode.Type): + def namesTuple = withModeBits(ctx.mode &~ Mode.Pattern | Mode.Type): tuple(Tuple( names.map: name => SingletonTypeTree(Literal(Constant(name.toString))).withSpan(tree.span)), diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 29cb83000fde..d0c30a665289 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -95,14 +95,14 @@ object Contexts { inline def atPhaseNoEarlier[T](limit: Phase)(inline op: Context ?=> T)(using Context): T = op(using if !limit.exists || limit <= ctx.phase then ctx else ctx.withPhase(limit)) - inline private def inMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = + inline def withModeBits[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = op(using if mode != ctx.mode then ctx.fresh.setMode(mode) else ctx) inline def withMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = - inMode(ctx.mode | mode)(op) + withModeBits(ctx.mode | mode)(op) inline def withoutMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = - inMode(ctx.mode &~ mode)(op) + withModeBits(ctx.mode &~ mode)(op) /** A context is passed basically everywhere in dotc. * This is convenient but carries the risk of captured contexts in From c0b792f659f07913206df5cd4d3188df4d31f61d Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 10 Feb 2024 13:30:19 +0100 Subject: [PATCH 289/465] Revert NamedArg term/type classification Strip NamedArgs instead when making a term/type classification of tuples. The previous classification of NamedArgs as terms or types broke the completion test suite. It seems that completion relies on NamedArgs being neither types nor terms. And in a sense, that's correct. They are not. --- compiler/src/dotty/tools/dotc/ast/TreeInfo.scala | 4 ++++ compiler/src/dotty/tools/dotc/ast/Trees.scala | 2 -- compiler/src/dotty/tools/dotc/ast/untpd.scala | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index fbf3cfe163b2..6ad3c6a41e98 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -108,6 +108,10 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => tree + def stripNamedArg(tree: Tree) = tree match + case NamedArg(_, arg) => arg + case _ => tree + /** The number of arguments in an application */ def numArgs(tree: Tree): Int = unsplice(tree) match { case Apply(fn, args) => numArgs(fn) + args.length diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index f45415b9ce0b..41899ed661f5 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -555,8 +555,6 @@ object Trees { case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { type ThisTree[+T <: Untyped] = NamedArg[T] - override def isTerm = arg.isTerm - override def isType = arg.isType } /** name = arg, outside a parameter list */ diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index fa13bd6610ba..0dfe52c421d9 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -107,7 +107,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def forwardTo: Tree = t } case class Tuple(trees: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree { - override def isTerm: Boolean = trees.isEmpty || trees.head.isTerm + override def isTerm: Boolean = trees.isEmpty || stripNamedArg(trees.head).isTerm override def isType: Boolean = !isTerm } case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree From 611861b6015ac32ebb94c87e0193dcdec0c7f3c8 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 10 Feb 2024 13:43:45 +0100 Subject: [PATCH 290/465] Update MimaFilters --- project/MiMaFilters.scala | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 5ccb70ad6fdf..064bb9cc2260 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -10,6 +10,14 @@ object MiMaFilters { Build.mimaPreviousDottyVersion -> Seq( ProblemFilters.exclude[DirectMissingMethodProblem]("scala.annotation.experimental.this"), ProblemFilters.exclude[FinalClassProblem]("scala.annotation.experimental"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Tuple.fromArray"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Tuple.fromIArray"), + ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple.helpers"), + ProblemFilters.exclude[MissingClassProblem]("scala.Tuple$helpers$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromArray"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromIArray"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.namedTuples"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), ), // Additions since last LTS From 640da16e7f337084f6b8cb7a95b1481b6fbc85e0 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 16 Feb 2024 18:34:05 +0100 Subject: [PATCH 291/465] Fix rebase breakage in Parsers.scala --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index d24dd2882ad6..e1f355f30c40 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1660,7 +1660,6 @@ object Parsers { NamedArg(name, convertToElem(tpt)).withSpan(t.span) case _ => t - var isValParamList = false if in.token == LPAREN then in.nextToken() if in.token == RPAREN then @@ -1676,7 +1675,6 @@ object Parsers { in.currentRegion.withCommasExpected: funArgType() match case Ident(name) if name != tpnme.WILDCARD && in.isColon => - isValParamList = true def funParam(start: Offset, mods: Modifiers) = atSpan(start): addErased() @@ -1714,9 +1712,9 @@ object Parsers { cpy.Function(arg)(args, sanitize(res)) case arg => arg - val args1 = args.mapConserve(sanitize) - if isValParamList || in.isArrow || isPureArrow then + + if in.isArrow || isPureArrow || erasedArgs.contains(true) then functionRest(args) else val tuple = atSpan(start): From 67c0af2bd2e38d1418ad3ff294d938b57ccb9c7d Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 16 Feb 2024 18:34:55 +0100 Subject: [PATCH 292/465] Add tyql test case It now work with just named tuples, no other extensions are needed. --- tests/run/tyql.scala | 205 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 205 insertions(+) create mode 100644 tests/run/tyql.scala diff --git a/tests/run/tyql.scala b/tests/run/tyql.scala new file mode 100644 index 000000000000..35777e9a4c13 --- /dev/null +++ b/tests/run/tyql.scala @@ -0,0 +1,205 @@ +import language.experimental.namedTuples +import NamedTuple.{NamedTuple, AnyNamedTuple} + +/* This is a demonstrator that shows how to map regular for expressions to + * internal data that can be optimized by a query engine. It needs NamedTuples + * and type classes but no macros. It's so far very provisional and experimental, + * intended as a basis for further exploration. + */ + +/** The type of expressions in the query language */ +trait Expr[Result] extends Selectable: + + /** This type is used to support selection with any of the field names + * defined by Fields. + */ + type Fields = NamedTuple.Map[NamedTuple.From[Result], Expr] + + /** A selection of a field name defined by Fields is implemented by `selectDynamic`. + * The implementation will add a cast to the right Expr type corresponding + * to the field type. + */ + def selectDynamic(fieldName: String) = Expr.Select(this, fieldName) + + /** Member methods to implement universal equality on Expr level. */ + def == (other: Expr[?]): Expr[Boolean] = Expr.Eq(this, other) + def != (other: Expr[?]): Expr[Boolean] = Expr.Ne(this, other) + +object Expr: + + /** Sample extension methods for individual types */ + extension (x: Expr[Int]) + def > (y: Expr[Int]): Expr[Boolean] = Gt(x, y) + def > (y: Int): Expr[Boolean] = Gt(x, IntLit(y)) + extension (x: Expr[Boolean]) + def &&(y: Expr[Boolean]): Expr[Boolean] = And(x, y) + def || (y: Expr[Boolean]): Expr[Boolean] = Or(x, y) + + // Note: All field names of constructors in the query language are prefixed with `$` + // so that we don't accidentally pick a field name of a constructor class where we want + // a name in the domain model instead. + + // Some sample constructors for Exprs + case class Gt($x: Expr[Int], $y: Expr[Int]) extends Expr[Boolean] + case class Plus(x: Expr[Int], y: Expr[Int]) extends Expr[Int] + case class And($x: Expr[Boolean], $y: Expr[Boolean]) extends Expr[Boolean] + case class Or($x: Expr[Boolean], $y: Expr[Boolean]) extends Expr[Boolean] + + // So far Select is weakly typed, so `selectDynamic` is easy to implement. + // Todo: Make it strongly typed like the other cases + case class Select[A]($x: Expr[A], $name: String) extends Expr + + case class Single[S <: String, A]($x: Expr[A]) + extends Expr[NamedTuple[S *: EmptyTuple, A *: EmptyTuple]] + + case class Concat[A <: AnyNamedTuple, B <: AnyNamedTuple]($x: Expr[A], $y: Expr[B]) + extends Expr[NamedTuple.Concat[A, B]] + + case class Join[A <: AnyNamedTuple](a: A) + extends Expr[NamedTuple.Map[A, StripExpr]] + + type StripExpr[E] = E match + case Expr[b] => b + + // Also weakly typed in the arguents since these two classes model universal equality */ + case class Eq($x: Expr[?], $y: Expr[?]) extends Expr[Boolean] + case class Ne($x: Expr[?], $y: Expr[?]) extends Expr[Boolean] + + /** References are placeholders for parameters */ + private var refCount = 0 + + case class Ref[A]($name: String = "") extends Expr[A]: + val id = refCount + refCount += 1 + override def toString = s"ref$id(${$name})" + + /** Literals are type-specific, tailored to the types that the DB supports */ + case class IntLit($value: Int) extends Expr[Int] + + /** Scala values can be lifted into literals by conversions */ + given Conversion[Int, IntLit] = IntLit(_) + + /** The internal representation of a function `A => B` + * Query languages are ususally first-order, so Fun is not an Expr + */ + case class Fun[A, B](param: Ref[A], f: B) + + type Pred[A] = Fun[A, Expr[Boolean]] + + /** Explicit conversion from + * (name_1: Expr[T_1], ..., name_n: Expr[T_n]) + * to + * Expr[(name_1: T_1, ..., name_n: T_n)] + */ + extension [A <: AnyNamedTuple](x: A) def toRow: Join[A] = Join(x) + + /** Same as _.toRow, as an implicit conversion */ + given [A <: AnyNamedTuple]: Conversion[A, Expr.Join[A]] = Expr.Join(_) + +end Expr + +/** The type of database queries. So far, we have queries + * that represent whole DB tables and queries that reify + * for-expressions as data. + */ +trait Query[A] + +object Query: + import Expr.{Pred, Fun, Ref} + + case class Filter[A]($q: Query[A], $p: Pred[A]) extends Query[A] + case class Map[A, B]($q: Query[A], $f: Fun[A, Expr[B]]) extends Query[B] + case class FlatMap[A, B]($q: Query[A], $f: Fun[A, Query[B]]) extends Query[B] + + // Extension methods to support for-expression syntax for queries + extension [R](x: Query[R]) + + def withFilter(p: Ref[R] => Expr[Boolean]): Query[R] = + val ref = Ref[R]() + Filter(x, Fun(ref, p(ref))) + + def map[B](f: Ref[R] => Expr[B]): Query[B] = + val ref = Ref[R]() + Map(x, Fun(ref, f(ref))) + + def flatMap[B](f: Ref[R] => Query[B]): Query[B] = + val ref = Ref[R]() + FlatMap(x, Fun(ref, f(ref))) +end Query + +/** The type of query references to database tables */ +case class Table[R]($name: String) extends Query[R] + +// Everything below is code using the model ----------------------------- + +// Some sample types +case class City(zipCode: Int, name: String, population: Int) +type Address = (city: City, street: String, number: Int) +type Person = (name: String, age: Int, addr: Address) + +@main def Test = + + val cities = Table[City]("cities") + + val q1 = cities.map: c => + c.zipCode + val q2 = cities.withFilter: city => + city.population > 10_000 + .map: city => + city.name + + val q3 = + for + city <- cities + if city.population > 10_000 + yield city.name + + val q4 = + for + city <- cities + alt <- cities + if city.name == alt.name && city.zipCode != alt.zipCode + yield + city + + val addresses = Table[Address]("addresses") + val q5 = + for + city <- cities + addr <- addresses + if addr.street == city.name + yield + (name = city.name, num = addr.number) + + val q6 = + cities.map: city => + (name = city.name, zipCode = city.zipCode) + + def run[T](q: Query[T]): Iterator[T] = ??? + + def x1: Iterator[Int] = run(q1) + def x2: Iterator[String] = run(q2) + def x3: Iterator[String] = run(q3) + def x4: Iterator[City] = run(q4) + def x5: Iterator[(name: String, num: Int)] = run(q5) + def x6: Iterator[(name: String, zipCode: Int)] = run(q6) + + println(q1) + println(q2) + println(q3) + println(q4) + println(q5) + println(q6) + +/* The following is not needed currently + +/** A type class for types that can map to a database table */ +trait Row: + type Self + type Fields = NamedTuple.From[Self] + type FieldExprs = NamedTuple.Map[Fields, Expr] + + //def toFields(x: Self): Fields = ??? + //def fromFields(x: Fields): Self = ??? + +*/ \ No newline at end of file From 2cd5d7e0c626a90ce0508cb44290a546eb4d5527 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 17 Feb 2024 17:48:21 +0100 Subject: [PATCH 293/465] Introduce auto-conversion from named tuples to tuples --- .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../dotty/tools/dotc/typer/Implicits.scala | 2 + .../src/dotty/tools/dotc/typer/Typer.scala | 4 +- .../reference/experimental/named-tuples.md | 35 +++++++-- tests/neg/named-tuples.check | 76 ++++++++----------- tests/neg/named-tuples.scala | 18 +++-- tests/run/named-tuples.scala | 7 ++ 7 files changed, 86 insertions(+), 57 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index dd8e23a0030f..62d7afa22ed2 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -623,6 +623,7 @@ object StdNames { val throws: N = "throws" val toArray: N = "toArray" val toList: N = "toList" + val toTuple: N = "toTuple" val toObjectArray : N = "toObjectArray" val toSeq: N = "toSeq" val toString_ : N = "toString" diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 9f2e0628e70e..bc19e97b85d8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -857,6 +857,8 @@ trait Implicits: || inferView(dummyTreeOfType(from), to) (using ctx.fresh.addMode(Mode.ImplicitExploration).setExploreTyperState()).isSuccess // TODO: investigate why we can't TyperState#test here + || from.widen.isNamedTupleType && to.derivesFrom(defn.TupleClass) + && from.widen.stripNamedTuple <:< to ) /** Find an implicit conversion to apply to given tree `from` so that the diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index e771df49af94..c392c195482c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4319,7 +4319,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _: SelectionProto => tree // adaptations for selections are handled in typedSelect case _ if ctx.mode.is(Mode.ImplicitsEnabled) && tree.tpe.isValueType => - if pt.isRef(defn.AnyValClass, skipRefined = false) + if tree.tpe.widen.isNamedTupleType && pt.derivesFrom(defn.TupleClass) then + readapt(typed(untpd.Select(untpd.TypedSplice(tree), nme.toTuple))) + else if pt.isRef(defn.AnyValClass, skipRefined = false) || pt.isRef(defn.ObjectClass, skipRefined = false) then recover(TooUnspecific(pt)) diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md index 95bcfb1d10fe..8eded366dc87 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -22,25 +22,45 @@ name for two different elements. Fields of named tuples can be selected by their name, as in the line `p.age < 18` above. -### Conformance +### Conformance and Convertibility The order of names in a named tuple matters. For instance, the type `Person` above and the type `(age: Int, name: String)` would be different, incompatible types. Values of named tuple types can also be be defined using regular tuples. For instance: ```scala -val x: Person = ("Laura", 25) +val Laura: Person = ("Laura", 25) def register(person: Person) = ... register(person = ("Silvain", 16)) register(("Silvain", 16)) ``` -This follows since a regular tuple `(T_1, ..., T_n)` is treated as a subtype of a named tuple `(N_1 = T_1, ..., N_n = T_n)` with the same element types. On the other hand, named tuples do not conform to unnamed tuples, so the following is an error: +This follows since a regular tuple `(T_1, ..., T_n)` is treated as a subtype of a named tuple `(N_1 = T_1, ..., N_n = T_n)` with the same element types. + +In the other direction, one can convert a named tuple to an unnamed tuple with the `toTuple` method. Example: +```scala +val x: (String, Int) = Bob.toTuple // ok +``` +`toTuple` is defined as an extension method in the `NamedTuple` object. +It returns the given tuple unchanged and simply "forgets" the names. + +A `.toTuple` selection is inserted implicitly by the compiler if it encounters a named tuple but the expected type is a regular tuple. So the following works as well: ```scala -val x: (String, Int) = Bob // error: type mismatch +val x: (String, Int) = Bob // works, expanded to Bob.toTuple ``` -One can convert a named tuple to an unnamed tuple with the `toTuple` method, so the following works: +The difference between subtyping in one direction and automatic `.toTuple` conversions in the other is relatively minor. The main difference is that `.toTuple` conversions don't work inside type constructors. So the following is OK: ```scala -val x: (String, Int) = Bob.toTuple // ok + val names = List("Laura", "Silvain") + val ages = List(25, 16) + val persons: List[Person] = names.zip(ages) +``` +But the following would be illegal. +```scala + val persons: List[Person] = List(Bob, Laura) + val pairs: List[(String, Int)] = persons // error +``` +We would need an explicit `_.toTuple` selection to express this: +```scala + val pairs: List[(String, Int)] = persons.map(_.toTuple) ``` Note that conformance rules for named tuples are analogous to the rules for named parameters. One can assign parameters by position to a named parameter list. ```scala @@ -54,8 +74,7 @@ But one cannot use a name to pass an argument to an unnamed parameter: f(2) // OK f(param = 2) // Not OK ``` -The rules for tuples are analogous. Unnamed tuples conform to named tuple types, but the opposite does not hold. - +The rules for tuples are analogous. Unnamed tuples conform to named tuple types, but the opposite requires a conversion. ### Pattern Matching diff --git a/tests/neg/named-tuples.check b/tests/neg/named-tuples.check index d9b6d686a587..9735879fc494 100644 --- a/tests/neg/named-tuples.check +++ b/tests/neg/named-tuples.check @@ -14,20 +14,6 @@ 12 | type Illformed2 = (name: String, age: Int, name: Boolean) // error | ^^^^^^^^^^^^^ | Duplicate tuple element name --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:18:25 ------------------------------------------------------ -18 | val y: (String, Int) = person // error - | ^^^^^^ - | Found: (Test.person : (name : String, age : Int)) - | Required: (String, Int) - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:19:25 ------------------------------------------------------ -19 | val _: (String, Int) = (name = "", age = 0) // error - | ^^^^^^^^^^^^^^^^^^^^ - | Found: (name : String, age : Int) - | Required: (String, Int) - | - | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:20:20 ------------------------------------------------------ 20 | val _: NameOnly = person // error | ^^^^^^ @@ -42,66 +28,70 @@ | Required: Test.Person | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:23:36 ------------------------------------------------------ -23 | val _: (age: Int, name: String) = person // error +-- [E008] Not Found Error: tests/neg/named-tuples.scala:22:9 ----------------------------------------------------------- +22 | person._1 // error + | ^^^^^^^^^ + | value _1 is not a member of (name : String, age : Int) +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:24:36 ------------------------------------------------------ +24 | val _: (age: Int, name: String) = person // error | ^^^^^^ | Found: (Test.person : (name : String, age : Int)) | Required: (age : Int, name : String) | | longer explanation available when compiling with `-explain` --- Error: tests/neg/named-tuples.scala:25:17 --------------------------------------------------------------------------- -25 | val (name = x, agee = y) = person // error +-- Error: tests/neg/named-tuples.scala:26:17 --------------------------------------------------------------------------- +26 | val (name = x, agee = y) = person // error | ^^^^^^^^ | No element named `agee` is defined in selector type (name : String, age : Int) --- Error: tests/neg/named-tuples.scala:28:10 --------------------------------------------------------------------------- -28 | case (name = n, age = a) => () // error // error +-- Error: tests/neg/named-tuples.scala:29:10 --------------------------------------------------------------------------- +29 | case (name = n, age = a) => () // error // error | ^^^^^^^^ | No element named `name` is defined in selector type (String, Int) --- Error: tests/neg/named-tuples.scala:28:20 --------------------------------------------------------------------------- -28 | case (name = n, age = a) => () // error // error +-- Error: tests/neg/named-tuples.scala:29:20 --------------------------------------------------------------------------- +29 | case (name = n, age = a) => () // error // error | ^^^^^^^ | No element named `age` is defined in selector type (String, Int) --- [E172] Type Error: tests/neg/named-tuples.scala:30:27 --------------------------------------------------------------- -30 | val pp = person ++ (1, 2) // error +-- [E172] Type Error: tests/neg/named-tuples.scala:31:27 --------------------------------------------------------------- +31 | val pp = person ++ (1, 2) // error | ^ | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). --- [E172] Type Error: tests/neg/named-tuples.scala:33:18 --------------------------------------------------------------- -33 | person ++ (1, 2) match // error +-- [E172] Type Error: tests/neg/named-tuples.scala:34:18 --------------------------------------------------------------- +34 | person ++ (1, 2) match // error | ^ | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). --- Error: tests/neg/named-tuples.scala:36:17 --------------------------------------------------------------------------- -36 | val bad = ("", age = 10) // error +-- Error: tests/neg/named-tuples.scala:37:17 --------------------------------------------------------------------------- +37 | val bad = ("", age = 10) // error | ^^^^^^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:39:20 --------------------------------------------------------------------------- -39 | case (name = n, age) => () // error +-- Error: tests/neg/named-tuples.scala:40:20 --------------------------------------------------------------------------- +40 | case (name = n, age) => () // error | ^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:40:16 --------------------------------------------------------------------------- -40 | case (name, age = a) => () // error +-- Error: tests/neg/named-tuples.scala:41:16 --------------------------------------------------------------------------- +41 | case (name, age = a) => () // error | ^^^^^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:43:10 --------------------------------------------------------------------------- -43 | case (age = x) => // error +-- Error: tests/neg/named-tuples.scala:44:10 --------------------------------------------------------------------------- +44 | case (age = x) => // error | ^^^^^^^ | No element named `age` is defined in selector type Tuple --- [E172] Type Error: tests/neg/named-tuples.scala:45:27 --------------------------------------------------------------- -45 | val p2 = person ++ person // error +-- [E172] Type Error: tests/neg/named-tuples.scala:46:27 --------------------------------------------------------------- +46 | val p2 = person ++ person // error | ^ |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("name" : String), ("age" : String))] =:= (true : Boolean). --- [E172] Type Error: tests/neg/named-tuples.scala:46:43 --------------------------------------------------------------- -46 | val p3 = person ++ (first = 11, age = 33) // error +-- [E172] Type Error: tests/neg/named-tuples.scala:47:43 --------------------------------------------------------------- +47 | val p3 = person ++ (first = 11, age = 33) // error | ^ |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("first" : String), ("age" : String))] =:= (true : Boolean). --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:48:22 ------------------------------------------------------ -48 | val p5 = person.zip(first = 11, age = 33) // error - | ^^^^^^^^^^^^^^^^^^^^ +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:49:22 ------------------------------------------------------ +49 | val p5 = person.zip((first = 11, age = 33)) // error + | ^^^^^^^^^^^^^^^^^^^^^^ | Found: (first : Int, age : Int) | Required: NamedTuple.NamedTuple[(("name" : String), ("age" : String)), Tuple] | | longer explanation available when compiling with `-explain` --- Warning: tests/neg/named-tuples.scala:25:29 ------------------------------------------------------------------------- -25 | val (name = x, agee = y) = person // error +-- Warning: tests/neg/named-tuples.scala:26:29 ------------------------------------------------------------------------- +26 | val (name = x, agee = y) = person // error | ^^^^^^ |pattern's type (String, Int) is more specialized than the right hand side expression's type (name : String, age : Int) | diff --git a/tests/neg/named-tuples.scala b/tests/neg/named-tuples.scala index 7dcf2221ec40..b2148244e1ba 100644 --- a/tests/neg/named-tuples.scala +++ b/tests/neg/named-tuples.scala @@ -15,10 +15,11 @@ import language.experimental.namedTuples val nameOnly = (name = "Louis") - val y: (String, Int) = person // error - val _: (String, Int) = (name = "", age = 0) // error + val y: (String, Int) = person // ok, conversion + val _: (String, Int) = (name = "", age = 0) // ok, conversion val _: NameOnly = person // error val _: Person = nameOnly // error + person._1 // error val _: (age: Int, name: String) = person // error @@ -45,8 +46,15 @@ import language.experimental.namedTuples val p2 = person ++ person // error val p3 = person ++ (first = 11, age = 33) // error val p4 = person.zip(person) // ok - val p5 = person.zip(first = 11, age = 33) // error - - + val p5 = person.zip((first = 11, age = 33)) // error + // Note: this one depends on the details of the conversion named -> unnamed + // we do a conversion only of the expected type is a tuple. If we used a + // regular implicit conversion, then (first = 11, age = 33) would be converted + // to (Int, Int) and that would be upcast to (name: Int, age: Int), which + // would hide an error. So we have be careful that the "downwards" conversion + // is specific and does not apply to a different "upwards" type. + // The same problem happens if we assume named <: unnamed. In that case we would first + // upcast (first: Int, age: Int) to (Int, Int), and then use the downwards + // conversion to (name: Int, age: Int). This one would be harder to guard against. diff --git a/tests/run/named-tuples.scala b/tests/run/named-tuples.scala index 29b058adab18..676c21a0e434 100644 --- a/tests/run/named-tuples.scala +++ b/tests/run/named-tuples.scala @@ -100,6 +100,13 @@ val _: CombinedInfo = bob ++ addr val addr4 = addr3.zip("Preverenges", 1028) println(addr4) + // testing conversions +object Conv: + + val p: (String, Int) = bob + def f22(x: (String, Int)) = x._1 + def f22(x: String) = x + f22(bob) From d8b7595f48898fd9cf77bf0b777217fc93b0f869 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 18 Feb 2024 12:19:42 +0100 Subject: [PATCH 294/465] Update documentation following review suggestions Co-authored-by: Dale Wijnand --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 4 +--- .../reference/experimental/named-tuples.md | 9 +++++++-- library/src/scala/NamedTuple.scala | 17 ++++++++++++++--- library/src/scala/Tuple.scala | 4 ++++ 4 files changed, 26 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 9be2f9ea64a4..537822d67594 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1466,7 +1466,7 @@ object desugar { case _ => elems end checkWellFormedTupleElems - /** Translate tuple expressions of arity <= 22 + /** Translate tuple expressions * * () ==> () * (t) ==> t @@ -1509,8 +1509,6 @@ object desugar { * expected type `pt` to each other. This means: * - If `elems` are named pattern elements, rearrange them to match `pt`. * This requires all names in `elems` to be also present in `pt`. - * - If `elems` are unnamed elements, and `pt` is a named tuple, drop all - * tuple element names from `pt`. */ def adaptPatternArgs(elems: List[Tree], pt: Type)(using Context): List[Tree] = diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md index 8eded366dc87..6a0baebe36e6 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -127,7 +127,8 @@ The `NamedTuple` object contains a type definition ``` `From` is treated specially by the compiler. When `NamedTuple.From` is applied to an argument type that is an instance of a case class, the type expands to the named -tuple consisting of all the fields of that case class. Here, fields means: elements of the first parameter section. For instance, assuming +tuple consisting of all the fields of that case class. +Here, _fields_ means: elements of the first parameter section. For instance, assuming ```scala case class City(zip: Int, name: String, population: Int) ``` @@ -135,7 +136,11 @@ then `NamedTuple.From[City]` is the named tuple ```scala (zip: Int, name: String, population: Int) ``` -The same works for enum cases expanding to case classes. +The same works for enum cases expanding to case classes, abstract types with case classes as upper bound, alias types expanding to case classes +and singleton types with case classes as underlying type. + +`From` is also defined on named tuples. If `NT` is a named tuple type, then `From[NT] = NT`. + ### Restrictions diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index b06bc599f9fd..a787ea85d37c 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -5,9 +5,16 @@ import compiletime.ops.boolean.* @experimental object NamedTuple: - opaque type AnyNamedTuple = Any + /** The type to which named tuples get mapped to. For instance, + * (name: String, age: Int) + * gets mapped to + * NamedTuple[("name", "age"), (String, Int)] + */ opaque type NamedTuple[N <: Tuple, +V <: Tuple] >: V <: AnyNamedTuple = V + /** A type which is a supertype of all named tuples */ + opaque type AnyNamedTuple = Any + def apply[N <: Tuple, V <: Tuple](x: V): NamedTuple[N, V] = x def unapply[N <: Tuple, V <: Tuple](x: NamedTuple[N, V]): Some[V] = Some(x) @@ -25,8 +32,8 @@ object NamedTuple: /** The number of elements in this tuple */ inline def size: Tuple.Size[V] = toTuple.size - // This intentionally works for empty named tuples as well. I think NnEmptyTuple is a dead end - // and should be reverted, justy like NonEmptyList is also appealing at first, but a bad idea + // This intentionally works for empty named tuples as well. I think NonEmptyTuple is a dead end + // and should be reverted, just like NonEmptyList is also appealing at first, but a bad idea // in the end. /** The value (without the name) at index `n` of this tuple */ @@ -175,6 +182,10 @@ object NamedTuple: case true => NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] + /** A type specially treated by the compiler to represent all fields of a + * class argument `T` as a named tuple. Or, if `T` is already a named tyuple, + * `From[T]` is the same as `T`. + */ type From[T] <: AnyNamedTuple end NamedTuple diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index e128fa8f0e81..8bd78013210b 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -114,6 +114,10 @@ object Tuple: case S[n1] => Elem[xs, n1] /** The type of the first element of a tuple */ + // Only bounded by `<: Tuple` not `<: NonEmptyTuple` + // even though it only matches non-empty tuples. + // Avoids bounds check failures from an irreducible type + // like `Tuple.Head[Tuple.Tail[X]]` type Head[X <: Tuple] = X match case x *: _ => x From e0eb2471759e42fb88187a194825c787fb6530f9 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 24 Apr 2024 18:50:16 +0200 Subject: [PATCH 295/465] Drop TreeInfo's `dropNamedArg` --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 4 ++-- compiler/src/dotty/tools/dotc/ast/TreeInfo.scala | 4 ---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 537822d67594..ff260c0efc16 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1461,7 +1461,7 @@ object desugar { mismatchOpt match case Some(misMatch) => report.error(em"Illegal combination of named and unnamed tuple elements", misMatch.srcPos) - elems.mapConserve(dropNamedArg) + elems.mapConserve(stripNamedArg) case None => elems case _ => elems end checkWellFormedTupleElems @@ -1475,7 +1475,7 @@ object desugar { def tuple(tree: Tuple, pt: Type)(using Context): Tree = var elems = checkWellFormedTupleElems(tree.trees) if ctx.mode.is(Mode.Pattern) then elems = adaptPatternArgs(elems, pt) - val elemValues = elems.mapConserve(dropNamedArg) + val elemValues = elems.mapConserve(stripNamedArg) val tup = val arity = elems.length if arity <= Definitions.MaxTupleArity then diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 6ad3c6a41e98..941e7b8f1219 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -248,10 +248,6 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => def hasNamedArg(args: List[Any]): Boolean = args exists isNamedArg val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[?]] - def dropNamedArg(arg: Tree) = arg match - case NamedArg(_, arg1) => arg1 - case arg => arg - /** Is this pattern node a catch-all (wildcard or variable) pattern? */ def isDefaultCase(cdef: CaseDef): Boolean = cdef match { case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) From 4279a5828c691925b3840255d127470f5dd51eff Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 24 Apr 2024 18:50:43 +0200 Subject: [PATCH 296/465] Print wildcard types in named tuples correctly --- .../tools/dotc/printing/RefinedPrinter.scala | 2 +- tests/neg/named-tuples.check | 71 +++++++++++-------- tests/neg/named-tuples.scala | 3 +- 3 files changed, 44 insertions(+), 32 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 5e15fd2ddd15..0329f0639d87 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -207,7 +207,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextNamedTuple(elems: List[(TermName, Type)]): Text = val elemsText = atPrec(GlobalPrec): - Text(elems.map((name, tp) => toText(name) ~ " : " ~ toText(tp)), ", ") + Text(elems.map((name, tp) => toText(name) ~ " : " ~ argText(tp)), ", ") "(" ~ elemsText ~ ")" def isInfixType(tp: Type): Boolean = tp match diff --git a/tests/neg/named-tuples.check b/tests/neg/named-tuples.check index 9735879fc494..791a10f20b16 100644 --- a/tests/neg/named-tuples.check +++ b/tests/neg/named-tuples.check @@ -28,70 +28,81 @@ | Required: Test.Person | | longer explanation available when compiling with `-explain` --- [E008] Not Found Error: tests/neg/named-tuples.scala:22:9 ----------------------------------------------------------- -22 | person._1 // error +-- [E172] Type Error: tests/neg/named-tuples.scala:22:41 --------------------------------------------------------------- +22 | val _: Person = (name = "") ++ nameOnly // error + | ^ + | Cannot prove that Tuple.Disjoint[Tuple1[("name" : String)], Tuple1[("name" : String)]] =:= (true : Boolean). +-- [E008] Not Found Error: tests/neg/named-tuples.scala:23:9 ----------------------------------------------------------- +23 | person._1 // error | ^^^^^^^^^ | value _1 is not a member of (name : String, age : Int) --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:24:36 ------------------------------------------------------ -24 | val _: (age: Int, name: String) = person // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:25:36 ------------------------------------------------------ +25 | val _: (age: Int, name: String) = person // error | ^^^^^^ | Found: (Test.person : (name : String, age : Int)) | Required: (age : Int, name : String) | | longer explanation available when compiling with `-explain` --- Error: tests/neg/named-tuples.scala:26:17 --------------------------------------------------------------------------- -26 | val (name = x, agee = y) = person // error +-- Error: tests/neg/named-tuples.scala:27:17 --------------------------------------------------------------------------- +27 | val (name = x, agee = y) = person // error | ^^^^^^^^ | No element named `agee` is defined in selector type (name : String, age : Int) --- Error: tests/neg/named-tuples.scala:29:10 --------------------------------------------------------------------------- -29 | case (name = n, age = a) => () // error // error +-- Error: tests/neg/named-tuples.scala:30:10 --------------------------------------------------------------------------- +30 | case (name = n, age = a) => () // error // error | ^^^^^^^^ | No element named `name` is defined in selector type (String, Int) --- Error: tests/neg/named-tuples.scala:29:20 --------------------------------------------------------------------------- -29 | case (name = n, age = a) => () // error // error +-- Error: tests/neg/named-tuples.scala:30:20 --------------------------------------------------------------------------- +30 | case (name = n, age = a) => () // error // error | ^^^^^^^ | No element named `age` is defined in selector type (String, Int) --- [E172] Type Error: tests/neg/named-tuples.scala:31:27 --------------------------------------------------------------- -31 | val pp = person ++ (1, 2) // error +-- [E172] Type Error: tests/neg/named-tuples.scala:32:27 --------------------------------------------------------------- +32 | val pp = person ++ (1, 2) // error | ^ | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). --- [E172] Type Error: tests/neg/named-tuples.scala:34:18 --------------------------------------------------------------- -34 | person ++ (1, 2) match // error +-- [E172] Type Error: tests/neg/named-tuples.scala:35:18 --------------------------------------------------------------- +35 | person ++ (1, 2) match // error | ^ | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). --- Error: tests/neg/named-tuples.scala:37:17 --------------------------------------------------------------------------- -37 | val bad = ("", age = 10) // error +-- Error: tests/neg/named-tuples.scala:38:17 --------------------------------------------------------------------------- +38 | val bad = ("", age = 10) // error | ^^^^^^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:40:20 --------------------------------------------------------------------------- -40 | case (name = n, age) => () // error +-- Error: tests/neg/named-tuples.scala:41:20 --------------------------------------------------------------------------- +41 | case (name = n, age) => () // error | ^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:41:16 --------------------------------------------------------------------------- -41 | case (name, age = a) => () // error +-- Error: tests/neg/named-tuples.scala:42:16 --------------------------------------------------------------------------- +42 | case (name, age = a) => () // error | ^^^^^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:44:10 --------------------------------------------------------------------------- -44 | case (age = x) => // error +-- Error: tests/neg/named-tuples.scala:45:10 --------------------------------------------------------------------------- +45 | case (age = x) => // error | ^^^^^^^ | No element named `age` is defined in selector type Tuple --- [E172] Type Error: tests/neg/named-tuples.scala:46:27 --------------------------------------------------------------- -46 | val p2 = person ++ person // error +-- [E172] Type Error: tests/neg/named-tuples.scala:47:27 --------------------------------------------------------------- +47 | val p2 = person ++ person // error | ^ |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("name" : String), ("age" : String))] =:= (true : Boolean). --- [E172] Type Error: tests/neg/named-tuples.scala:47:43 --------------------------------------------------------------- -47 | val p3 = person ++ (first = 11, age = 33) // error +-- [E172] Type Error: tests/neg/named-tuples.scala:48:43 --------------------------------------------------------------- +48 | val p3 = person ++ (first = 11, age = 33) // error | ^ |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("first" : String), ("age" : String))] =:= (true : Boolean). --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:49:22 ------------------------------------------------------ -49 | val p5 = person.zip((first = 11, age = 33)) // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:50:22 ------------------------------------------------------ +50 | val p5 = person.zip((first = 11, age = 33)) // error | ^^^^^^^^^^^^^^^^^^^^^^ | Found: (first : Int, age : Int) | Required: NamedTuple.NamedTuple[(("name" : String), ("age" : String)), Tuple] | | longer explanation available when compiling with `-explain` --- Warning: tests/neg/named-tuples.scala:26:29 ------------------------------------------------------------------------- -26 | val (name = x, agee = y) = person // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:61:32 ------------------------------------------------------ +61 | val typo: (name: ?, age: ?) = (name = "he", ag = 1) // error + | ^^^^^^^^^^^^^^^^^^^^^ + | Found: (name : String, ag : Int) + | Required: (name : ?, age : ?) + | + | longer explanation available when compiling with `-explain` +-- Warning: tests/neg/named-tuples.scala:27:29 ------------------------------------------------------------------------- +27 | val (name = x, agee = y) = person // error | ^^^^^^ |pattern's type (String, Int) is more specialized than the right hand side expression's type (name : String, age : Int) | diff --git a/tests/neg/named-tuples.scala b/tests/neg/named-tuples.scala index b2148244e1ba..8f78f7915206 100644 --- a/tests/neg/named-tuples.scala +++ b/tests/neg/named-tuples.scala @@ -19,6 +19,7 @@ import language.experimental.namedTuples val _: (String, Int) = (name = "", age = 0) // ok, conversion val _: NameOnly = person // error val _: Person = nameOnly // error + val _: Person = (name = "") ++ nameOnly // error person._1 // error val _: (age: Int, name: String) = person // error @@ -57,4 +58,4 @@ import language.experimental.namedTuples // upcast (first: Int, age: Int) to (Int, Int), and then use the downwards // conversion to (name: Int, age: Int). This one would be harder to guard against. - + val typo: (name: ?, age: ?) = (name = "he", ag = 1) // error From c0bd1e44da49f54b6e6c7717d9dc42d54655fac2 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 24 Apr 2024 18:51:52 +0200 Subject: [PATCH 297/465] Don't abbreviate tuple bindings if right-hand-side is named We need to go through an explicit pattern match to drop the names. --- .../src/dotty/tools/dotc/ast/Desugar.scala | 3 +- tests/pos/named-tuples.check | 10 ------ tests/run/fieldsOf.check | 17 --------- tests/run/named-patmatch.scala | 36 +++++++++++++++++++ 4 files changed, 38 insertions(+), 28 deletions(-) delete mode 100644 tests/pos/named-tuples.check delete mode 100644 tests/run/fieldsOf.check create mode 100644 tests/run/named-patmatch.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index ff260c0efc16..1801a7fada7c 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1254,8 +1254,9 @@ object desugar { pats.forall(isVarPattern) case _ => false } + val isMatchingTuple: Tree => Boolean = { - case Tuple(es) => isTuplePattern(es.length) + case Tuple(es) => isTuplePattern(es.length) && !hasNamedArg(es) case _ => false } diff --git a/tests/pos/named-tuples.check b/tests/pos/named-tuples.check deleted file mode 100644 index 24928c7dbdac..000000000000 --- a/tests/pos/named-tuples.check +++ /dev/null @@ -1,10 +0,0 @@ -(Bob,33) -33 -Bob -(Bob,33,Lausanne,1003) -33 -no match -Bob is younger than Bill -Bob is younger than Lucy -Bill is younger than Lucy -matched elements (name, Bob), (age, 33) diff --git a/tests/run/fieldsOf.check b/tests/run/fieldsOf.check deleted file mode 100644 index beb79c056527..000000000000 --- a/tests/run/fieldsOf.check +++ /dev/null @@ -1,17 +0,0 @@ --- [E007] Type Mismatch Error: ../neg/fieldsOf.scala:10:15 --------------------- -10 | x = foo[Anon]() // error - | ^^^^^^^^^^^ - | Found: NamedTuple.FieldsOf[Anon] - | Required: (name : String, age : Int) - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: ../neg/fieldsOf.scala:11:9 ---------------------- -11 | x = foo() // error - | ^^^^^ - | Found: NamedTuple.FieldsOf[T] - | Required: (name : String, age : Int) - | - | where: T is a type variable - | - | longer explanation available when compiling with `-explain` -2 errors found diff --git a/tests/run/named-patmatch.scala b/tests/run/named-patmatch.scala new file mode 100644 index 000000000000..e62497e4aa8f --- /dev/null +++ b/tests/run/named-patmatch.scala @@ -0,0 +1,36 @@ +import annotation.experimental +import language.experimental.namedTuples + +@main def Test = + locally: + val (x = x, y = y) = (x = 11, y = 22) + assert(x == 11 && y == 22) + + locally: + val (x = a, y = b) = (x = 1, y = 2) + assert(a == 1 && b == 2) + + locally: + val (x = a, y = b) = (x = 1, y = 2) + assert(a == 1 && b == 2) + + locally: + val (x, y) = (x = 1, y = 2) + assert(x == 1 && y == 2) + + locally: + val (a, b) = (x = 1, y = 2) + assert(a == 1 && b == 2) + + (x = 1, y = 2) match + case (x = x, y = y) => assert(x == 1 && y == 2) + + (x = 1, y = 2) match + case (x, y) => assert(x == 1 && y == 2) + + (x = 1, y = 2) match + case (a, b) => assert(a == 1 && b == 2) + + + + From b997f3d6d846f5ddbaac6dfde689621a2eaacefd Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 24 Apr 2024 19:20:10 +0200 Subject: [PATCH 298/465] Strip named tuple from scrutinee when testing refutability of a named pattern match --- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 4 ++-- compiler/src/dotty/tools/dotc/typer/Checking.scala | 12 +++++++++--- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index e67e60dd45ea..8461c0f091fe 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -387,8 +387,8 @@ object TypeOps: if tycon1.typeSymbol == tycon2.typeSymbol && (tycon1 =:= tycon2) => mergeRefinedOrApplied(tp1, tp2) match case tp: AppliedType if tp.isUnreducibleWild => - // fall back to or-dominators rather tahn inferring a type that would - // caue an unreducible type error later. + // fall back to or-dominators rather than inferring a type that would + // cause an unreducible type error later. approximateOr(tp1, tp2) case tp => tp case (tp1, tp2) => diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 662a4feb867a..7745c620312c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -966,10 +966,16 @@ trait Checking { false } - def check(pat: Tree, pt: Type): Boolean = + // Is scrutinee type `pt` a subtype of `pat.tpe`, after stripping named tuples + // and accounting for large generic tuples? + // Named tuples need to be stripped off, since names are dropped in patterns + def conforms(pat: Tree, pt: Type): Boolean = pt.isTupleXXLExtract(pat.tpe) // See isTupleXXLExtract, fixes TupleXXL parameter type - || pt <:< pat.tpe - || fail(pat, pt, Reason.NonConforming) + || pt.stripNamedTuple <:< pat.tpe + || (pt.widen ne pt) && conforms(pat, pt.widen) + + def check(pat: Tree, pt: Type): Boolean = + conforms(pat, pt) || fail(pat, pt, Reason.NonConforming) def recur(pat: Tree, pt: Type): Boolean = !sourceVersion.isAtLeast(`3.2`) From 2206d8883c0095f69642ba373074ad7060bcc1df Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 25 Apr 2024 00:21:21 +0200 Subject: [PATCH 299/465] Fix tuple selection in pattern matcher The pattern matcher selects tuples up to 22 using _1, _2, ... But if the scrutinee is a named tuple this only works if it is cast to a regular tuple first. --- .../dotty/tools/dotc/transform/PatternMatcher.scala | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index f22a17f7fd27..0b8507f3b6c7 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -347,13 +347,20 @@ object PatternMatcher { def tupleApp(i: Int, receiver: Tree) = // manually inlining the call to NonEmptyTuple#apply, because it's an inline method ref(defn.RuntimeTuplesModule) .select(defn.RuntimeTuples_apply) - .appliedTo(receiver, Literal(Constant(i))) + .appliedTo( + receiver.ensureConforms(defn.NonEmptyTupleTypeRef), // If scrutinee is a named tuple, cast to underlying tuple + Literal(Constant(i))) if (isSyntheticScala2Unapply(unapp.symbol) && caseAccessors.length == args.length) - def tupleSel(sym: Symbol) = ref(scrutinee).select(sym) + def tupleSel(sym: Symbol) = + // If scrutinee is a named tuple, cast to underlying tuple, so that we can + // continue to select with _1, _2, ... + ref(scrutinee).ensureConforms(scrutinee.info.stripNamedTuple).select(sym) val isGenericTuple = defn.isTupleClass(caseClass) && !defn.isTupleNType(tree.tpe match { case tp: OrType => tp.join case tp => tp }) // widen even hard unions, to see if it's a union of tuples - val components = if isGenericTuple then caseAccessors.indices.toList.map(tupleApp(_, ref(scrutinee))) else caseAccessors.map(tupleSel) + val components = + if isGenericTuple then caseAccessors.indices.toList.map(tupleApp(_, ref(scrutinee))) + else caseAccessors.map(tupleSel) matchArgsPlan(components, args, onSuccess) else if unappType.isRef(defn.BooleanClass) then TestPlan(GuardTest, unapp, unapp.span, onSuccess) From ca19f1a5589ca70b65afb89eda3725bc405245ac Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 25 Apr 2024 13:21:51 +0200 Subject: [PATCH 300/465] Fixes to NamedTuple --- library/src/scala/NamedTuple.scala | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index a787ea85d37c..d4e5a72eb1fc 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -46,13 +46,15 @@ object NamedTuple: inline def head: Tuple.Elem[V, 0] = apply(0) /** The tuple consisting of all elements of this tuple except the first one */ - inline def tail: Tuple.Drop[V, 1] = toTuple.drop(1) + inline def tail: NamedTuple[Tuple.Tail[N], Tuple.Tail[V]] = + toTuple.drop(1).asInstanceOf[NamedTuple[Tuple.Tail[N], Tuple.Tail[V]]] /** The last element value of this tuple */ inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] /** The tuple consisting of all elements of this tuple except the last one */ - inline def init: Tuple.Init[V] = toTuple.take(size - 1).asInstanceOf[Tuple.Init[V]] + inline def init: NamedTuple[Tuple.Init[N], Tuple.Init[V]] = + toTuple.take(size - 1).asInstanceOf[NamedTuple[Tuple.Init[N], Tuple.Init[V]]] /** The tuple consisting of the first `n` elements of this tuple, or all * elements if `n` exceeds `size`. @@ -67,7 +69,11 @@ object NamedTuple: toTuple.drop(n) /** The tuple `(x.take(n), x.drop(n))` */ - inline def splitAt(n: Int): NamedTuple[Tuple.Split[N, n.type], Tuple.Split[V, n.type]] = + inline def splitAt(n: Int): + (NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]], + NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]]) = + // would be nice if this could have type `Split[NamedTuple[N, V]]` instead, but + // we get a type error then. Similar for other methods here. toTuple.splitAt(n) /** The tuple consisting of all elements of this tuple followed by all elements @@ -188,6 +194,12 @@ object NamedTuple: */ type From[T] <: AnyNamedTuple + /** The type of the empty named tuple */ + type Empty = EmptyTuple.type + + /** The empty named tuple */ + val Empty: Empty = EmptyTuple.asInstanceOf[Empty] + end NamedTuple /** Separate from NamedTuple object so that we can match on the opaque type NamedTuple. */ @@ -202,3 +214,4 @@ object NamedTupleDecomposition: /** The value types of a named tuple represented as a regular tuple. */ type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match case NamedTuple[_, x] => x + From 984fe6291faae9ea222226bedf523cdabae64701 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 25 Apr 2024 13:22:32 +0200 Subject: [PATCH 301/465] More tests --- tests/neg/named-tuples.check | 8 -- .../pos/named-tuples-strawman.scala | 0 tests/pos/selectable-fields.scala | 3 + tests/run/named-tuple-ops.scala | 89 +++++++++++++++++++ 4 files changed, 92 insertions(+), 8 deletions(-) rename tests/{pending => }/pos/named-tuples-strawman.scala (100%) create mode 100644 tests/pos/selectable-fields.scala create mode 100644 tests/run/named-tuple-ops.scala diff --git a/tests/neg/named-tuples.check b/tests/neg/named-tuples.check index 791a10f20b16..db3cc703722f 100644 --- a/tests/neg/named-tuples.check +++ b/tests/neg/named-tuples.check @@ -101,11 +101,3 @@ | Required: (name : ?, age : ?) | | longer explanation available when compiling with `-explain` --- Warning: tests/neg/named-tuples.scala:27:29 ------------------------------------------------------------------------- -27 | val (name = x, agee = y) = person // error - | ^^^^^^ - |pattern's type (String, Int) is more specialized than the right hand side expression's type (name : String, age : Int) - | - |If the narrowing is intentional, this can be communicated by adding `: @unchecked` after the expression, - |which may result in a MatchError at runtime. - |This patch can be rewritten automatically under -rewrite -source 3.2-migration. diff --git a/tests/pending/pos/named-tuples-strawman.scala b/tests/pos/named-tuples-strawman.scala similarity index 100% rename from tests/pending/pos/named-tuples-strawman.scala rename to tests/pos/named-tuples-strawman.scala diff --git a/tests/pos/selectable-fields.scala b/tests/pos/selectable-fields.scala new file mode 100644 index 000000000000..65b024fdfcd6 --- /dev/null +++ b/tests/pos/selectable-fields.scala @@ -0,0 +1,3 @@ +val foo1 = new Selectable: + type Fields = (xyz: Int) + def selectDynamic(name: String): Any = 23 diff --git a/tests/run/named-tuple-ops.scala b/tests/run/named-tuple-ops.scala new file mode 100644 index 000000000000..076ab5028c6c --- /dev/null +++ b/tests/run/named-tuple-ops.scala @@ -0,0 +1,89 @@ +//> using options -source future +import language.experimental.namedTuples +import scala.compiletime.asMatchable + +type City = (name: String, zip: Int, pop: Int) +type Raw = (String, Int, Int) + +type Coord = (x: Double, y: Double) +type Labels = (x: String, y: String) + +@main def Test = + val city: City = (name = "Lausanne", zip = 1000, pop = 140000) + val coord: Coord = (x = 1.0, y = 0.0) + val labels: Labels = (x = "west", y = "north") + + val size: 3 = city.size + assert(city.size == 3) + + val zip: Int = city(1) + assert(zip == 1000) + + val name: String = city.head + assert(name == "Lausanne") + + val zip_pop: (zip: Int, pop: Int) = city.tail + val (_: Int, _: Int) = zip_pop + assert(zip_pop == (zip = 1000, pop = 140000)) + + val cinit = city.init + val _: (name: String, zip: Int) = cinit + assert(cinit == (name = "Lausanne", zip = 1000)) + + val ctake1: (name: String) = city.take(1) + assert(ctake1 == (name = "Lausanne")) + + val cdrop1 = city.drop(1) + val _: (zip: Int, pop: Int) = cdrop1 + assert(cdrop1 == zip_pop) + + val cdrop3 = city.drop(3) + val _: NamedTuple.Empty = cdrop3 + assert(cdrop3 == NamedTuple.Empty) + + val cdrop4 = city.drop(4) + val _: NamedTuple.Empty = cdrop4 + assert(cdrop4 == NamedTuple.Empty) + + val csplit = city.splitAt(1) + val _: ((name: String), (zip: Int, pop: Int)) = csplit + assert(csplit == ((name = "Lausanne"), zip_pop)) + + val city_coord = city ++ coord + val _: NamedTuple.Concat[City, Coord] = city_coord + val _: (name: String, zip: Int, pop: Int, x: Double, y: Double) = city_coord + assert(city_coord == (name = "Lausanne", zip = 1000, pop = 140000, x = 1.0, y = 0.0)) + + type IntToString[X] = X match + case Int => String + case _ => X + + val intToString = [X] => (x: X) => x.asMatchable match + case x: Int => x.toString + case x => x + + val citymap = city.map[IntToString](intToString.asInstanceOf) + val _: (name: String, zip: String, pop: String) = citymap + assert(citymap == (name = "Lausanne", zip = "1000", pop = "140000")) + + val cityreverse = city.reverse + val _: (pop: Int, zip: Int, name: String) = cityreverse + assert(cityreverse == (pop = 140000, zip = 1000, name = "Lausanne")) + + val zipped = coord.zip(labels) + val _: (x: (Double, String), y: (Double, String)) = zipped + val (x3, y3) = zipped + val _: (Double, String) = x3 + assert(zipped == (x = (1.0, "west"), y = (0.0, "north"))) + + val zippedRaw = ((1.0, "west"), (0.0, "north")) + val (x1: (Double, String), x2: (Double, String)) = zippedRaw + + val cityFields = city.toList + val _: List[String | Int] = cityFields + assert(cityFields == List("Lausanne", 1000, 140000)) + + val citArr = city.toArray + val _: List[String | Int] = cityFields + assert(cityFields == List("Lausanne", 1000, 140000)) + From 21bcfef05ffccd29bc7131b8f3190f24ef43b329 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 25 Apr 2024 14:04:16 +0200 Subject: [PATCH 302/465] Mention restriction against `_1`, `_2`, ... as named tuple labels --- docs/_docs/reference/experimental/named-tuples.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md index 6a0baebe36e6..3867b4d13f15 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -159,6 +159,7 @@ The following restrictions apply to named tuple elements: (tuple: Tuple) match case (age = x) => // error ``` + 4. Regular selector names `_1`, `_2`, ... are not allowed as names in named tuples. ### Syntax From 92d22c9efdf64895c296a9e239a6803eae04e767 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 26 Apr 2024 20:06:28 +0200 Subject: [PATCH 303/465] Address review comments on Tuple.scala --- library/src/scala/NamedTuple.scala | 4 +-- library/src/scala/Tuple.scala | 40 +++++-------------------- tests/pos/named-tuples-strawman-2.scala | 8 ++--- tests/pos/tuple-ops.scala | 18 ----------- 4 files changed, 13 insertions(+), 57 deletions(-) diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index d4e5a72eb1fc..dc6e6c3144f6 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -184,8 +184,8 @@ object NamedTuple: * @syntax markdown */ type Zip[X <: AnyNamedTuple, Y <: AnyNamedTuple] = - Tuple.Conforms[Names[X], Names[Y]] match - case true => + Names[X] match + case Names[Y] => NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] /** A type specially treated by the compiler to represent all fields of a diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 8bd78013210b..30f0e44ecf45 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -118,6 +118,7 @@ object Tuple: // even though it only matches non-empty tuples. // Avoids bounds check failures from an irreducible type // like `Tuple.Head[Tuple.Tail[X]]` + // Other types that don't reduce for empty tuples follow the same principle. type Head[X <: Tuple] = X match case x *: _ => x @@ -273,22 +274,6 @@ object Tuple: */ type Union[T <: Tuple] = Fold[T, Nothing, [x, y] =>> x | y] - /** A type level Boolean indicating whether the tuple `X` conforms - * to the tuple `Y`. This means: - * - the two tuples have the same number of elements - * - for corresponding elements `x` in `X` and `y` in `Y`, `x` matches `y`. - * @pre The elements of `X` are assumed to be singleton types - */ - type Conforms[X <: Tuple, Y <: Tuple] <: Boolean = Y match - case EmptyTuple => - X match - case EmptyTuple => true - case _ => false - case y *: ys => - X match - case `y` *: xs => Conforms[xs, ys] - case _ => false - /** A type level Boolean indicating whether the tuple `X` has an element * that matches `Y`. * @pre The elements of `X` are assumed to be singleton types @@ -350,25 +335,14 @@ object Tuple: extension [X <: Tuple](inline x: X) - /** The index (starting at 0) of the first element in the type `X` of `x` - * that matches type `Y`. + /** The index (starting at 0) of the first occurrence of y.type in the type `X` of `x` + * or Size[X] if no such element exists. */ - inline def indexOfType[Y] = constValue[IndexOf[X, Y]] + transparent inline def indexOf(y: Any): Int = constValue[IndexOf[X, y.type]] - /** A boolean indicating whether there is an element in the type `X` of `x` - * that matches type `Y`. + /** A boolean indicating whether there is an element `y.type` in the type `X` of `x` */ - inline def containsType[Y] = constValue[Contains[X, Y]] - - /* Note: It would be nice to add the following two extension methods: - - inline def indexOf[Y: Precise](y: Y) = constValue[IndexOf[X, Y]] - inline def containsType[Y: Precise](y: Y) = constValue[Contains[X, Y]] - - because we could then move indexOf/contains completely to the value level. - But this requires `Y` to be inferred precisely, and therefore a mechanism - like the `Precise` context bound used above, which does not yet exist. - */ + transparent inline def contains(y: Any): Boolean = constValue[Contains[X, y.type]] end extension @@ -380,7 +354,7 @@ object Tuple: using eqHead: CanEqual[H1, H2], eqTail: CanEqual[T1, T2] ): CanEqual[H1 *: T1, H2 *: T2] = CanEqual.derived - object helpers: + private object helpers: /** Used to implement IndicesWhere */ type IndicesWhereHelper[X <: Tuple, P[_] <: Boolean, N <: Int] <: Tuple = X match diff --git a/tests/pos/named-tuples-strawman-2.scala b/tests/pos/named-tuples-strawman-2.scala index 709f537f8114..4b32dd83f2eb 100644 --- a/tests/pos/named-tuples-strawman-2.scala +++ b/tests/pos/named-tuples-strawman-2.scala @@ -52,7 +52,7 @@ object TupleOps: case EmptyTuple => Y *: EmptyTuple inline def appendIfDistinct[X <: Tuple, Y](xs: X, y: Y): AppendIfDistinct[X, Y] = - (if xs.containsType[Y] then xs else xs :* y).asInstanceOf[AppendIfDistinct[X, Y]] + (if xs.contains(y) then xs else xs :* y).asInstanceOf[AppendIfDistinct[X, Y]] /** `X` with all elements from `Y` that do not occur in `X` appended */ type ConcatDistinct[X <: Tuple, Y <: Tuple] <: Tuple = Y match @@ -137,10 +137,10 @@ object NamedTupleOps: val x1: IndexOf[Names, "first"] = constValue val _: 0 = x1 - val x2: IndexOf[Names, "age"] = names.indexOfType["age"] + val x2: IndexOf[Names, "age"] = names.indexOf("age") val _: 2 = x2 - val x3: IndexOf[Names, "what?"] = names.indexOfType["what?"] + val x3: IndexOf[Names, "what?"] = names.indexOf("what?") val _: 3 = x3 type Releases = "first" *: "middle" *: EmptyTuple @@ -149,7 +149,7 @@ object NamedTupleOps: val releases: Releases = ("first", "middle") val releaseValues: ReleaseValues = (1.0, true) - val x4 = values.updateOrAppend(names.indexOfType["age"], 11) + val x4 = values.updateOrAppend(names.indexOf("age"), 11) //updateOrAppend[Values](values)[IndexOf[Names, "age"], 11](indexOf[Names](names)["age"]("age"), 11) val _: ("Bob", "Miller", 11) = x4 assert(("Bob", "Miller", 11) == x4) diff --git a/tests/pos/tuple-ops.scala b/tests/pos/tuple-ops.scala index df708e669e0f..739b1ebeeb02 100644 --- a/tests/pos/tuple-ops.scala +++ b/tests/pos/tuple-ops.scala @@ -2,18 +2,6 @@ import language.experimental.namedTuples import Tuple.* def test = - val x1: Conforms[(1, 2), (1, 2)] = ??? - val _: true = x1 - - val x2: Conforms[(1, 2), (1, 3)] = ??? - val _: false = x2 - - val x3: Conforms[(1, 2), (1, 2, 4)] = ??? - val _: false = x2 - - val x4: Conforms[(1, 2, 4), (1, 2)] = ??? - val _: false = x2 - summon[Disjoint[(1, 2, 3), (4, 5)] =:= true] summon[Disjoint[(1, 2, 6), (4, 5)] =:= true] summon[Disjoint[(1, 2, 6), EmptyTuple] =:= true] @@ -23,12 +11,6 @@ def test = summon[Contains[(1, 2, 3), 2] =:= true] summon[Contains[(1, 2, 3), 4] =:= false] - summon[Conforms[(1, 2, 3), (1, 2, 3)] =:= true] - summon[Conforms[(1, 2, 3), (1, 2)] =:= false] - summon[Conforms[(1, 2, 3), (1, 2, 4)] =:= false] - summon[Conforms[(1, 2, 3), (Int, 2, 3)] =:= true] -// summon[Conforms[(Int, 2, 3), (1, 2, 3)] =:= true] // error, reduction gets stuck - summon[Disjoint[(1, 2, 3), (4, 2)] =:= false] summon[Disjoint[("a", "b"), ("b", "c")] =:= false] summon[Disjoint[(1, 2, 6), Tuple1[2]] =:= false] From 1613ee181173325f0dae326a4e54898b0c4bc315 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 27 Apr 2024 10:32:01 +0200 Subject: [PATCH 304/465] Reject recursive dynamicSelect corner case --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 10 ++++++++-- tests/neg/unselectable-fields.check | 4 ++++ .../unselectable-fields.scala} | 3 +++ 3 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 tests/neg/unselectable-fields.check rename tests/{pos/selectable-fields.scala => neg/unselectable-fields.scala} (77%) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index c392c195482c..a44d9b9512db 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -822,14 +822,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // and the selector is neither applied nor assigned to, // expand to a typed dynamic dispatch using selectDynamic wrapped in a cast if qual.tpe.derivesFrom(defn.SelectableClass) && !isDynamicExpansion(tree) - && !pt.isInstanceOf[FunOrPolyProto] && pt != LhsProto + && !pt.isInstanceOf[FunOrPolyProto] && pt != LhsProto then val fieldsType = qual.tpe.select(tpnme.Fields).dealias.simplified val fields = fieldsType.namedTupleElementTypes typr.println(i"try dyn select $qual, $selName, $fields") fields.find(_._1 == selName) match case Some((_, fieldType)) => - return dynamicSelect(fieldType).ensureConforms(fieldType) + val dynSelected = dynamicSelect(fieldType) + dynSelected match + case Apply(sel: Select, _) if !sel.denot.symbol.exists => + // Reject corner case where selectDynamic needs annother selectDynamic to be called. E.g. as in neg/unselectable-fields.scala. + report.error(i"Cannot use selectDynamic here since it it needs another selectDynamic to be invoked", tree.srcPos) + case _ => + return dynSelected.ensureConforms(fieldType) case _ => // Otherwise, report an error diff --git a/tests/neg/unselectable-fields.check b/tests/neg/unselectable-fields.check new file mode 100644 index 000000000000..06992eded299 --- /dev/null +++ b/tests/neg/unselectable-fields.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/unselectable-fields.scala:4:13 --------------------------------------------------------------------- +4 |val _ = foo1.xyz // error + | ^^^^^^^^ + | Cannot use selectDynamic here since it it needs another selectDynamic to be invoked diff --git a/tests/pos/selectable-fields.scala b/tests/neg/unselectable-fields.scala similarity index 77% rename from tests/pos/selectable-fields.scala rename to tests/neg/unselectable-fields.scala index 65b024fdfcd6..7abe49d24764 100644 --- a/tests/pos/selectable-fields.scala +++ b/tests/neg/unselectable-fields.scala @@ -1,3 +1,6 @@ val foo1 = new Selectable: type Fields = (xyz: Int) def selectDynamic(name: String): Any = 23 +val _ = foo1.xyz // error + + From a04d3a7429e3ac3964bd8d867c71708d54e0c264 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 27 Apr 2024 12:53:17 +0200 Subject: [PATCH 305/465] Update compiler/src/dotty/tools/dotc/typer/Typer.scala --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index a44d9b9512db..46982cf1406d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -833,7 +833,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer dynSelected match case Apply(sel: Select, _) if !sel.denot.symbol.exists => // Reject corner case where selectDynamic needs annother selectDynamic to be called. E.g. as in neg/unselectable-fields.scala. - report.error(i"Cannot use selectDynamic here since it it needs another selectDynamic to be invoked", tree.srcPos) + report.error(i"Cannot use selectDynamic here since it needs another selectDynamic to be invoked", tree.srcPos) case _ => return dynSelected.ensureConforms(fieldType) case _ => From 37b1bd208713b5558b40fe6c388fa8028baed902 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 27 Apr 2024 13:33:36 +0200 Subject: [PATCH 306/465] Update tests/neg/unselectable-fields.check --- tests/neg/unselectable-fields.check | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/neg/unselectable-fields.check b/tests/neg/unselectable-fields.check index 06992eded299..f7f0bf51a6bc 100644 --- a/tests/neg/unselectable-fields.check +++ b/tests/neg/unselectable-fields.check @@ -1,4 +1,4 @@ -- Error: tests/neg/unselectable-fields.scala:4:13 --------------------------------------------------------------------- 4 |val _ = foo1.xyz // error | ^^^^^^^^ - | Cannot use selectDynamic here since it it needs another selectDynamic to be invoked + | Cannot use selectDynamic here since it needs another selectDynamic to be invoked From 3f8f6c694bddeeafe3bb16cf8171c8b0dd789464 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 11:30:27 +0200 Subject: [PATCH 307/465] Use consistent naming and drop remaining braces in tuple type ops --- library/src/scala/Tuple.scala | 67 ++++++++++++++----------------- tests/neg/print-tuple-union.check | 2 +- tests/neg/wildcard-match.check | 7 ++-- 3 files changed, 34 insertions(+), 42 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 30f0e44ecf45..b643f606baae 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -22,13 +22,13 @@ sealed trait Tuple extends Product: runtime.Tuples.toIArray(this) /** Return a copy of `this` tuple with an element appended */ - inline def :* [This >: this.type <: Tuple, L] (x: L): Append[This, L] = + inline def :* [This >: this.type <: Tuple, L](x: L): Append[This, L] = runtime.Tuples.append(x, this).asInstanceOf[Append[This, L]] /** Return a new tuple by prepending the element to `this` tuple. * This operation is O(this.size) */ - inline def *: [H, This >: this.type <: Tuple] (x: H): H *: This = + inline def *: [H, This >: this.type <: Tuple](x: H): H *: This = runtime.Tuples.cons(x, this).asInstanceOf[H *: This] /** Return a new tuple by concatenating `this` tuple with `that` tuple. @@ -104,14 +104,13 @@ object Tuple: /** The size of a tuple, represented as a literal constant subtype of Int */ type Size[X <: Tuple] <: Int = X match case EmptyTuple => 0 - case x *: xs => S[Size[xs]] + case _ *: xs => S[Size[xs]] /** The type of the element at position N in the tuple X */ type Elem[X <: Tuple, N <: Int] = X match - case x *: xs => - N match - case 0 => x - case S[n1] => Elem[xs, n1] + case x *: xs => N match + case 0 => x + case S[n1] => Elem[xs, n1] /** The type of the first element of a tuple */ // Only bounded by `<: Tuple` not `<: NonEmptyTuple` @@ -134,8 +133,7 @@ object Tuple: /** The type of the initial part of a tuple without its last element */ type Init[X <: Tuple] <: Tuple = X match case _ *: EmptyTuple => EmptyTuple - case x *: xs => - x *: Init[xs] + case x *: xs => x *: Init[xs] /** The type of the tuple consisting of the first `N` elements of `X`, * or all elements if `N` exceeds `Size[X]`. @@ -149,27 +147,24 @@ object Tuple: /** The type of the tuple consisting of all elements of `X` except the first `N` ones, * or no elements if `N` exceeds `Size[X]`. */ - type Drop[X <: Tuple, N <: Int] <: Tuple = N match { + type Drop[X <: Tuple, N <: Int] <: Tuple = N match case 0 => X - case S[n1] => X match { + case S[n1] => X match case EmptyTuple => EmptyTuple - case x *: xs => Drop[xs, n1] - } - } + case _ *: xs => Drop[xs, n1] /** The pair type `(Take(X, N), Drop[X, N]). */ type Split[X <: Tuple, N <: Int] = (Take[X, N], Drop[X, N]) /** Type of a tuple with an element appended */ - type Append[X <: Tuple, Y] <: NonEmptyTuple = X match { + type Append[X <: Tuple, Y] <: NonEmptyTuple = X match case EmptyTuple => Y *: EmptyTuple case x *: xs => x *: Append[xs, Y] - } /** Type of the concatenation of two tuples `X` and `Y` */ type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match case EmptyTuple => Y - case x1 *: xs1 => x1 *: Concat[xs1, Y] + case x *: xs => x *: Concat[xs, Y] /** An infix shorthand for `Concat[X, Y]` */ infix type ++[X <: Tuple, +Y <: Tuple] = Concat[X, Y] @@ -179,27 +174,27 @@ object Tuple: */ type IndexOf[X <: Tuple, Y] <: Int = X match case Y *: _ => 0 - case x *: xs => S[IndexOf[xs, Y]] + case _ *: xs => S[IndexOf[xs, Y]] case EmptyTuple => 0 /** Fold a tuple `(T1, ..., Tn)` into `F[T1, F[... F[Tn, Z]...]]]` */ - type Fold[Tup <: Tuple, Z, F[_, _]] = Tup match + type Fold[X <: Tuple, Z, F[_, _]] = X match case EmptyTuple => Z - case h *: t => F[h, Fold[t, Z, F]] + case x *: xs => F[x, Fold[xs, Z, F]] /** The type of tuple `X` mapped with the type-level function `F`. * If `X = (T1, ..., Ti)` then `Map[X, F] = `(F[T1], ..., F[Ti])`. */ - type Map[Tup <: Tuple, F[_ <: Union[Tup]]] <: Tuple = Tup match + type Map[X <: Tuple, F[_ <: Union[X]]] <: Tuple = X match case EmptyTuple => EmptyTuple - case h *: t => F[h] *: Map[t, F] + case x *: xs => F[x] *: Map[xs, F] /** The type of tuple `X` flat-mapped with the type-level function `F`. * If `X = (T1, ..., Ti)` then `FlatMap[X, F] = `F[T1] ++ ... ++ F[Ti]` */ - type FlatMap[Tup <: Tuple, F[_ <: Union[Tup]] <: Tuple] <: Tuple = Tup match + type FlatMap[X <: Tuple, F[_ <: Union[X]] <: Tuple] <: Tuple = X match case EmptyTuple => EmptyTuple - case h *: t => Concat[F[h], FlatMap[t, F]] + case x *: xs => Concat[F[x], FlatMap[xs, F]] // TODO: implement term level analogue /** The type of the tuple consisting of all elements of tuple `X` that have types @@ -217,9 +212,9 @@ object Tuple: */ type Filter[X <: Tuple, P[_] <: Boolean] <: Tuple = X match case EmptyTuple => EmptyTuple - case h *: t => P[h] match - case true => h *: Filter[t, P] - case false => Filter[t, P] + case x *: xs => P[x] match + case true => x *: Filter[xs, P] + case false => Filter[xs, P] /** A tuple consisting of those indices `N` of tuple `X` where the predicate `P` * is true for `Elem[X, N]`. Indices are type level values <: Int. @@ -242,17 +237,16 @@ object Tuple: * ``` * @syntax markdown */ - type Zip[T1 <: Tuple, T2 <: Tuple] <: Tuple = (T1, T2) match - case (h1 *: t1, h2 *: t2) => (h1, h2) *: Zip[t1, t2] + type Zip[X <: Tuple, Y <: Tuple] <: Tuple = (X, Y) match + case (x *: xs, y *: ys) => (x, y) *: Zip[xs, ys] case (EmptyTuple, _) => EmptyTuple case (_, EmptyTuple) => EmptyTuple case _ => Tuple /** Converts a tuple `(F[T1], ..., F[Tn])` to `(T1, ... Tn)` */ - type InverseMap[X <: Tuple, F[_]] <: Tuple = X match { - case F[x] *: t => x *: InverseMap[t, F] + type InverseMap[X <: Tuple, F[_]] <: Tuple = X match + case F[x] *: xs => x *: InverseMap[xs, F] case EmptyTuple => EmptyTuple - } /** Implicit evidence. IsMappedBy[F][X] is present in the implicit scope iff * X is a tuple for which each element's type is constructed via `F`. E.g. @@ -280,7 +274,7 @@ object Tuple: */ type Contains[X <: Tuple, Y] <: Boolean = X match case Y *: _ => true - case x *: xs => Contains[xs, Y] + case _ *: xs => Contains[xs, Y] case EmptyTuple => false /** A type level Boolean indicating whether the type `Y` contains @@ -288,10 +282,9 @@ object Tuple: * @pre The elements of `X` and `Y` are assumed to be singleton types */ type Disjoint[X <: Tuple, Y <: Tuple] <: Boolean = X match - case x *: xs => - Contains[Y, x] match - case true => false - case false => Disjoint[xs, Y] + case x *: xs => Contains[Y, x] match + case true => false + case false => Disjoint[xs, Y] case EmptyTuple => true /** Empty tuple */ diff --git a/tests/neg/print-tuple-union.check b/tests/neg/print-tuple-union.check index f3754aa5b17e..7d2c019de5a6 100644 --- a/tests/neg/print-tuple-union.check +++ b/tests/neg/print-tuple-union.check @@ -13,6 +13,6 @@ | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case | - | case h *: t => h | Tuple.Fold[t, Nothing, [x, y] =>> x | y] + | case x *: xs => x | Tuple.Fold[xs, Nothing, [x, y] =>> x | y] | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/wildcard-match.check b/tests/neg/wildcard-match.check index d405326c3d2b..fd20443c0a9f 100644 --- a/tests/neg/wildcard-match.check +++ b/tests/neg/wildcard-match.check @@ -87,8 +87,7 @@ | trying to reduce shapeless.tuples.length[T2] | trying to reduce Tuple.Size[shapeless.tuples.to[T2]] | failed since selector shapeless.tuples.to[T2] - | does not uniquely determine parameters x, xs in - | case x *: xs => scala.compiletime.ops.int.S[Tuple.Size[xs]] - | The computed bounds for the parameters are: - | x <: Int + | does not uniquely determine parameter xs in + | case _ *: xs => scala.compiletime.ops.int.S[Tuple.Size[xs]] + | The computed bounds for the parameter are: | xs <: (Int, Int) From 9627c08818cbe55e1d215a4192b675548c92a957 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 11:32:05 +0200 Subject: [PATCH 308/465] Add an infix shorthand for `Append[X, Y]` as is the case for `Concat` --- library/src/scala/Tuple.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index b643f606baae..34274c4b0413 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -22,7 +22,7 @@ sealed trait Tuple extends Product: runtime.Tuples.toIArray(this) /** Return a copy of `this` tuple with an element appended */ - inline def :* [This >: this.type <: Tuple, L](x: L): Append[This, L] = + inline def :* [This >: this.type <: Tuple, L](x: L): This :* L = runtime.Tuples.append(x, this).asInstanceOf[Append[This, L]] /** Return a new tuple by prepending the element to `this` tuple. @@ -34,7 +34,7 @@ sealed trait Tuple extends Product: /** Return a new tuple by concatenating `this` tuple with `that` tuple. * This operation is O(this.size + that.size) */ - inline def ++ [This >: this.type <: Tuple](that: Tuple): Concat[This, that.type] = + inline def ++ [This >: this.type <: Tuple](that: Tuple): This ++ that.type = runtime.Tuples.concat(this, that).asInstanceOf[Concat[This, that.type]] /** Return the size (or arity) of the tuple */ @@ -161,6 +161,9 @@ object Tuple: case EmptyTuple => Y *: EmptyTuple case x *: xs => x *: Append[xs, Y] + /** An infix shorthand for `Append[X, Y]` */ + infix type :*[X <: Tuple, Y] = Append[X, Y] + /** Type of the concatenation of two tuples `X` and `Y` */ type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match case EmptyTuple => Y From 8a4162f70761e9b2937b94a33562682a36af34da Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 11:53:11 +0200 Subject: [PATCH 309/465] Drop unreachable case from `type Zip` --- library/src/scala/Tuple.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 34274c4b0413..7afa96a067b0 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -244,7 +244,6 @@ object Tuple: case (x *: xs, y *: ys) => (x, y) *: Zip[xs, ys] case (EmptyTuple, _) => EmptyTuple case (_, EmptyTuple) => EmptyTuple - case _ => Tuple /** Converts a tuple `(F[T1], ..., F[Tn])` to `(T1, ... Tn)` */ type InverseMap[X <: Tuple, F[_]] <: Tuple = X match From 0ab9e7bb76bd60b8e0617f28b111aaeeee7babdb Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 11:53:50 +0200 Subject: [PATCH 310/465] Document `Concat` covariance in 2nd parameter --- library/src/scala/Tuple.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 7afa96a067b0..e46d5c5348d8 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -34,6 +34,8 @@ sealed trait Tuple extends Product: /** Return a new tuple by concatenating `this` tuple with `that` tuple. * This operation is O(this.size + that.size) */ + // Contrarily to `this`, `that` does not need a type parameter + // since `++` is covariant in its second argument. inline def ++ [This >: this.type <: Tuple](that: Tuple): This ++ that.type = runtime.Tuples.concat(this, that).asInstanceOf[Concat[This, that.type]] @@ -165,6 +167,7 @@ object Tuple: infix type :*[X <: Tuple, Y] = Append[X, Y] /** Type of the concatenation of two tuples `X` and `Y` */ + // Can be covariant in `Y` since it never appears as a match type scrutinee. type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match case EmptyTuple => Y case x *: xs => x *: Concat[xs, Y] From 075b7d148b0f16c54ac8f50d5af4e2536f5e821e Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 12:07:54 +0200 Subject: [PATCH 311/465] Refine bounds of `type Filter` predicate to only require being defined on the element types. Similar to what we have for `type FlatMap` --- library/src/scala/Tuple.scala | 8 ++++---- tests/pos/tuple-filter.scala | 3 +++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index e46d5c5348d8..fa598f3bd105 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -94,7 +94,7 @@ sealed trait Tuple extends Product: * for which the given type level predicate `P` reduces to the literal * constant `true`. */ - inline def filter[This >: this.type <: Tuple, P[_] <: Boolean]: Filter[This, P] = + inline def filter[This >: this.type <: Tuple, P[_ <: Union[This]] <: Boolean]: Filter[This, P] = val toInclude = constValueTuple[IndicesWhere[This, P]].toArray val arr = new Array[Object](toInclude.length) for i <- 0 until toInclude.length do @@ -216,7 +216,7 @@ object Tuple: * ``` * @syntax markdown */ - type Filter[X <: Tuple, P[_] <: Boolean] <: Tuple = X match + type Filter[X <: Tuple, P[_ <: Union[X]] <: Boolean] <: Tuple = X match case EmptyTuple => EmptyTuple case x *: xs => P[x] match case true => x *: Filter[xs, P] @@ -225,7 +225,7 @@ object Tuple: /** A tuple consisting of those indices `N` of tuple `X` where the predicate `P` * is true for `Elem[X, N]`. Indices are type level values <: Int. */ - type IndicesWhere[X <: Tuple, P[_] <: Boolean] = + type IndicesWhere[X <: Tuple, P[_ <: Union[X]] <: Boolean] = helpers.IndicesWhereHelper[X, P, 0] /** The type of the tuple consisting of all element values of @@ -355,7 +355,7 @@ object Tuple: private object helpers: /** Used to implement IndicesWhere */ - type IndicesWhereHelper[X <: Tuple, P[_] <: Boolean, N <: Int] <: Tuple = X match + type IndicesWhereHelper[X <: Tuple, P[_ <: Union[X]] <: Boolean, N <: Int] <: Tuple = X match case EmptyTuple => EmptyTuple case h *: t => P[h] match case true => N *: IndicesWhereHelper[t, P, S[N]] diff --git a/tests/pos/tuple-filter.scala b/tests/pos/tuple-filter.scala index 2c9638b2e47b..0964d2e982d9 100644 --- a/tests/pos/tuple-filter.scala +++ b/tests/pos/tuple-filter.scala @@ -8,3 +8,6 @@ def Test = summon[Tuple.Filter[(1, 2, 3, 4), P] =:= (1, 2, 4)] summon[Tuple.Filter[(1, 2, 3, 4), RejectAll] =:= EmptyTuple] summon[Tuple.Filter[EmptyTuple, P] =:= EmptyTuple] + + import compiletime.ops.int.< + summon[Tuple.Filter[(1, 4, 7, 2, 10, 3, 4), [X <: Int] =>> X < 5] =:= (1, 4, 2, 3, 4)] From c58c8c24129097ea6d372e4c6e45bbe1722a84d7 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 13:44:15 +0200 Subject: [PATCH 312/465] Do `contains` runtime operation based on term equality --- library/src/scala/Tuple.scala | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index fa598f3bd105..21aa9dbf598f 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -338,9 +338,15 @@ object Tuple: */ transparent inline def indexOf(y: Any): Int = constValue[IndexOf[X, y.type]] - /** A boolean indicating whether there is an element `y.type` in the type `X` of `x` - */ - transparent inline def contains(y: Any): Boolean = constValue[Contains[X, y.type]] + /** A boolean indicating whether there is an element `y.type` in the type `X` of `x` */ + // Note this isn't equivalent to `constValue[Contains[X, y.type]]` + // since it also accepts cases unknown at compiletime. + // Also note it would be unsound to use a type parameter for `y` in the + // type level `Contains`, since it is rightfully not covariant in `Y`. + inline def contains(y: Any): Contains[X, y.type] = + x.productIterator.contains(y).asInstanceOf[Contains[X, y.type]] + + // TODO containsType ? end extension From 57b17accced114c61c0222099b69f044657c86df Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 13:58:58 +0200 Subject: [PATCH 313/465] Do `indexOf` runtime operation based on term equality and refine `type IndexOf` doc --- library/src/scala/Tuple.scala | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 21aa9dbf598f..7f6923c976dd 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -176,7 +176,7 @@ object Tuple: infix type ++[X <: Tuple, +Y <: Tuple] = Concat[X, Y] /** The index of `Y` in tuple `X` as a literal constant Int, - * or `Size[X]` if `Y` does not occur in `X` + * or `Size[X]` if `Y` is disjoint from all element types in `X`. */ type IndexOf[X <: Tuple, Y] <: Int = X match case Y *: _ => 0 @@ -332,21 +332,22 @@ object Tuple: runtime.Tuples.fromProduct(product) extension [X <: Tuple](inline x: X) + // Note the two methods are not equivalent to using `constValue`, + // since they also allow cases unknown at compiletime. + // Also note it would be unsound to use a type parameter for `y` in the type level + // operations, since they are rightfully not covariant in their second parameter. /** The index (starting at 0) of the first occurrence of y.type in the type `X` of `x` * or Size[X] if no such element exists. */ - transparent inline def indexOf(y: Any): Int = constValue[IndexOf[X, y.type]] + inline def indexOf(y: Any): IndexOf[X, y.type] = + x.productIterator.indexOf(y).asInstanceOf[IndexOf[X, y.type]] /** A boolean indicating whether there is an element `y.type` in the type `X` of `x` */ - // Note this isn't equivalent to `constValue[Contains[X, y.type]]` - // since it also accepts cases unknown at compiletime. - // Also note it would be unsound to use a type parameter for `y` in the - // type level `Contains`, since it is rightfully not covariant in `Y`. inline def contains(y: Any): Contains[X, y.type] = x.productIterator.contains(y).asInstanceOf[Contains[X, y.type]] - // TODO containsType ? + // TODO indexOfType & containsType ? end extension From f427ec96c2254ba34576482540dd759b3419ea0e Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 14:04:45 +0200 Subject: [PATCH 314/465] Do `filter` runtime operation based on a term level predicate --- library/src/scala/Tuple.scala | 13 ++++--------- tests/pos/named-tuples-strawman-2.scala | 5 ++++- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 7f6923c976dd..208f6b464286 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -90,15 +90,10 @@ sealed trait Tuple extends Product: inline def reverseOnto[This >: this.type <: Tuple, Acc <: Tuple](acc: Acc): ReverseOnto[This, Acc] = (this.reverse ++ acc).asInstanceOf[ReverseOnto[This, Acc]] - /** A tuple consisting of all elements of this tuple that have types - * for which the given type level predicate `P` reduces to the literal - * constant `true`. - */ - inline def filter[This >: this.type <: Tuple, P[_ <: Union[This]] <: Boolean]: Filter[This, P] = - val toInclude = constValueTuple[IndicesWhere[This, P]].toArray - val arr = new Array[Object](toInclude.length) - for i <- 0 until toInclude.length do - arr(i) = this.productElement(toInclude(i).asInstanceOf[Int]).asInstanceOf[Object] + /** A tuple consisting of all elements of this tuple that satisfy the predicate `p`. */ + inline def filter[This >: this.type <: Tuple, P[_ <: Union[This]] <: Boolean] + (p: (x: Union[This]) => P[x.type]): Filter[This, P] = + val arr = this.toArray.filter(x => p(x.asInstanceOf[Union[This]])) Tuple.fromArray(arr).asInstanceOf[Filter[This, P]] object Tuple: diff --git a/tests/pos/named-tuples-strawman-2.scala b/tests/pos/named-tuples-strawman-2.scala index 4b32dd83f2eb..7cd763bb7b00 100644 --- a/tests/pos/named-tuples-strawman-2.scala +++ b/tests/pos/named-tuples-strawman-2.scala @@ -60,7 +60,10 @@ object TupleOps: case EmptyTuple => X inline def concatDistinct[X <: Tuple, Y <: Tuple](xs: X, ys: Y): ConcatDistinct[X, Y] = - (xs ++ ys.filter[Y, [Elem] =>> ![Contains[X, Elem]]]).asInstanceOf[ConcatDistinct[X, Y]] + // Note the type parameter is needed due to the invariance of compiletime.ops.boolean.! + extension [B <: Boolean](self: B) def negated: ![B] = (!self).asInstanceOf + val ysDistinct = ys.filter[Y, [y] =>> ![Contains[X, y]]](xs.contains(_).negated) + (xs ++ ysDistinct).asInstanceOf[ConcatDistinct[X, Y]] object NamedTupleDecomposition: import NamedTupleOps.* From 94b7c1f2f9f8996c8d5b68d28794349f3dcb2000 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 14:17:48 +0200 Subject: [PATCH 315/465] Mark `type Append` 2nd argument as covariant --- library/src/scala/Tuple.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 208f6b464286..e32a1bd8f124 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -154,12 +154,12 @@ object Tuple: type Split[X <: Tuple, N <: Int] = (Take[X, N], Drop[X, N]) /** Type of a tuple with an element appended */ - type Append[X <: Tuple, Y] <: NonEmptyTuple = X match + type Append[X <: Tuple, +Y] <: NonEmptyTuple = X match case EmptyTuple => Y *: EmptyTuple case x *: xs => x *: Append[xs, Y] /** An infix shorthand for `Append[X, Y]` */ - infix type :*[X <: Tuple, Y] = Append[X, Y] + infix type :*[X <: Tuple, +Y] = Append[X, Y] /** Type of the concatenation of two tuples `X` and `Y` */ // Can be covariant in `Y` since it never appears as a match type scrutinee. From 5df2120e841955ab2b30e5e7fd1c6bf5282ae3c7 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 14:52:42 +0200 Subject: [PATCH 316/465] Move `NonEmptyTuple` methods into `Tuple` This is for the same reason as we changed `type Head[X <: NonEmptyTuple] = ...` to `type Head[X <: Tuple] = ...` Also, this is no more unsafe than the other operations already defined for all tuples. `drop(1)` for example was always defined, even though `tail` wasn't. --- library/src/scala/Tuple.scala | 52 +++++++++++++------------- library/src/scala/runtime/Tuples.scala | 8 ++-- 2 files changed, 29 insertions(+), 31 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index e32a1bd8f124..1c009e4d65e6 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -31,6 +31,30 @@ sealed trait Tuple extends Product: inline def *: [H, This >: this.type <: Tuple](x: H): H *: This = runtime.Tuples.cons(x, this).asInstanceOf[H *: This] + /** Get the i-th element of this tuple. + * Equivalent to productElement but with a precise return type. + */ + inline def apply[This >: this.type <: Tuple](n: Int): Elem[This, n.type] = + runtime.Tuples.apply(this, n).asInstanceOf[Elem[This, n.type]] + + /** Get the head of this tuple */ + inline def head[This >: this.type <: Tuple]: Head[This] = + runtime.Tuples.apply(this, 0).asInstanceOf[Head[This]] + + /** Get the initial part of the tuple without its last element */ + inline def init[This >: this.type <: Tuple]: Init[This] = + runtime.Tuples.init(this).asInstanceOf[Init[This]] + + /** Get the last of this tuple */ + inline def last[This >: this.type <: Tuple]: Last[This] = + runtime.Tuples.last(this).asInstanceOf[Last[This]] + + /** Get the tail of this tuple. + * This operation is O(this.size) + */ + inline def tail[This >: this.type <: Tuple]: Tail[This] = + runtime.Tuples.tail(this).asInstanceOf[Tail[This]] + /** Return a new tuple by concatenating `this` tuple with `that` tuple. * This operation is O(this.size + that.size) */ @@ -375,33 +399,7 @@ case object EmptyTuple extends Tuple { } /** Tuple of arbitrary non-zero arity */ -sealed trait NonEmptyTuple extends Tuple { - import Tuple.* - - /** Get the i-th element of this tuple. - * Equivalent to productElement but with a precise return type. - */ - inline def apply[This >: this.type <: NonEmptyTuple](n: Int): Elem[This, n.type] = - runtime.Tuples.apply(this, n).asInstanceOf[Elem[This, n.type]] - - /** Get the head of this tuple */ - inline def head[This >: this.type <: NonEmptyTuple]: Head[This] = - runtime.Tuples.apply(this, 0).asInstanceOf[Head[This]] - - /** Get the initial part of the tuple without its last element */ - inline def init[This >: this.type <: NonEmptyTuple]: Init[This] = - runtime.Tuples.init(this).asInstanceOf[Init[This]] - - /** Get the last of this tuple */ - inline def last[This >: this.type <: NonEmptyTuple]: Last[This] = - runtime.Tuples.last(this).asInstanceOf[Last[This]] - - /** Get the tail of this tuple. - * This operation is O(this.size) - */ - inline def tail[This >: this.type <: NonEmptyTuple]: Tail[This] = - runtime.Tuples.tail(this).asInstanceOf[Tail[This]] -} +sealed trait NonEmptyTuple extends Tuple @showAsInfix sealed abstract class *:[+H, +T <: Tuple] extends NonEmptyTuple diff --git a/library/src/scala/runtime/Tuples.scala b/library/src/scala/runtime/Tuples.scala index be6904b9d1d0..8da21c777943 100644 --- a/library/src/scala/runtime/Tuples.scala +++ b/library/src/scala/runtime/Tuples.scala @@ -357,7 +357,7 @@ object Tuples { } } - def tail(self: NonEmptyTuple): Tuple = (self: Any) match { + def tail(self: Tuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlTail(xxl) case _ => specialCaseTail(self) } @@ -565,16 +565,16 @@ object Tuples { } } - def init(self: NonEmptyTuple): Tuple = (self: Any) match { + def init(self: Tuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlInit(xxl) case _ => specialCaseInit(self) } - def last(self: NonEmptyTuple): Any = (self: Any) match { + def last(self: Tuple): Any = (self: Any) match { case self: Product => self.productElement(self.productArity - 1) } - def apply(self: NonEmptyTuple, n: Int): Any = + def apply(self: Tuple, n: Int): Any = self.productElement(n) // Benchmarks showed that this is faster than doing (it1 zip it2).copyToArray(...) From 40c61388a9e7cae8777b67fbfd4e738c1374ced0 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 15:23:36 +0200 Subject: [PATCH 317/465] Reorder operations to be same between term and type level --- library/src/scala/Tuple.scala | 202 +++++++++++++++++----------------- 1 file changed, 101 insertions(+), 101 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 1c009e4d65e6..7f9e220b2bf2 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -21,18 +21,22 @@ sealed trait Tuple extends Product: inline def toIArray: IArray[Object] = runtime.Tuples.toIArray(this) - /** Return a copy of `this` tuple with an element appended */ - inline def :* [This >: this.type <: Tuple, L](x: L): This :* L = - runtime.Tuples.append(x, this).asInstanceOf[Append[This, L]] - /** Return a new tuple by prepending the element to `this` tuple. * This operation is O(this.size) */ inline def *: [H, This >: this.type <: Tuple](x: H): H *: This = runtime.Tuples.cons(x, this).asInstanceOf[H *: This] + /** Return a copy of `this` tuple with an element appended */ + inline def :* [This >: this.type <: Tuple, L](x: L): This :* L = + runtime.Tuples.append(x, this).asInstanceOf[Append[This, L]] + + /** Return the size (or arity) of the tuple */ + inline def size[This >: this.type <: Tuple]: Size[This] = + runtime.Tuples.size(this).asInstanceOf[Size[This]] + /** Get the i-th element of this tuple. - * Equivalent to productElement but with a precise return type. + * Equivalent to productElement but with a precise return type. */ inline def apply[This >: this.type <: Tuple](n: Int): Elem[This, n.type] = runtime.Tuples.apply(this, n).asInstanceOf[Elem[This, n.type]] @@ -41,19 +45,38 @@ sealed trait Tuple extends Product: inline def head[This >: this.type <: Tuple]: Head[This] = runtime.Tuples.apply(this, 0).asInstanceOf[Head[This]] - /** Get the initial part of the tuple without its last element */ - inline def init[This >: this.type <: Tuple]: Init[This] = - runtime.Tuples.init(this).asInstanceOf[Init[This]] + /** Get the tail of this tuple. + * This operation is O(this.size) + */ + inline def tail[This >: this.type <: Tuple]: Tail[This] = + runtime.Tuples.tail(this).asInstanceOf[Tail[This]] /** Get the last of this tuple */ inline def last[This >: this.type <: Tuple]: Last[This] = runtime.Tuples.last(this).asInstanceOf[Last[This]] - /** Get the tail of this tuple. - * This operation is O(this.size) + /** Get the initial part of the tuple without its last element */ + inline def init[This >: this.type <: Tuple]: Init[This] = + runtime.Tuples.init(this).asInstanceOf[Init[This]] + + /** Given a tuple `(a1, ..., am)`, returns the tuple `(a1, ..., an)` consisting + * of its first n elements. */ - inline def tail[This >: this.type <: Tuple]: Tail[This] = - runtime.Tuples.tail(this).asInstanceOf[Tail[This]] + inline def take[This >: this.type <: Tuple](n: Int): Take[This, n.type] = + runtime.Tuples.take(this, n).asInstanceOf[Take[This, n.type]] + + /** Given a tuple `(a1, ..., am)`, returns the tuple `(an+1, ..., am)` consisting + * all its elements except the first n ones. + */ + inline def drop[This >: this.type <: Tuple](n: Int): Drop[This, n.type] = + runtime.Tuples.drop(this, n).asInstanceOf[Drop[This, n.type]] + + /** Given a tuple `(a1, ..., am)`, returns a pair of the tuple `(a1, ..., an)` + * consisting of the first n elements, and the tuple `(an+1, ..., am)` consisting + * of the remaining elements. + */ + inline def splitAt[This >: this.type <: Tuple](n: Int): Split[This, n.type] = + runtime.Tuples.splitAt(this, n).asInstanceOf[Split[This, n.type]] /** Return a new tuple by concatenating `this` tuple with `that` tuple. * This operation is O(this.size + that.size) @@ -63,10 +86,6 @@ sealed trait Tuple extends Product: inline def ++ [This >: this.type <: Tuple](that: Tuple): This ++ that.type = runtime.Tuples.concat(this, that).asInstanceOf[Concat[This, that.type]] - /** Return the size (or arity) of the tuple */ - inline def size[This >: this.type <: Tuple]: Size[This] = - runtime.Tuples.size(this).asInstanceOf[Size[This]] - /** Given two tuples, `(a1, ..., an)` and `(a1, ..., an)`, returns a tuple * `((a1, b1), ..., (an, bn))`. If the two tuples have different sizes, * the extra elements of the larger tuple will be disregarded. @@ -85,24 +104,11 @@ sealed trait Tuple extends Product: inline def map[F[_]](f: [t] => t => F[t]): Map[this.type, F] = runtime.Tuples.map(this, f).asInstanceOf[Map[this.type, F]] - /** Given a tuple `(a1, ..., am)`, returns the tuple `(a1, ..., an)` consisting - * of its first n elements. - */ - inline def take[This >: this.type <: Tuple](n: Int): Take[This, n.type] = - runtime.Tuples.take(this, n).asInstanceOf[Take[This, n.type]] - - /** Given a tuple `(a1, ..., am)`, returns the tuple `(an+1, ..., am)` consisting - * all its elements except the first n ones. - */ - inline def drop[This >: this.type <: Tuple](n: Int): Drop[This, n.type] = - runtime.Tuples.drop(this, n).asInstanceOf[Drop[This, n.type]] - - /** Given a tuple `(a1, ..., am)`, returns a pair of the tuple `(a1, ..., an)` - * consisting of the first n elements, and the tuple `(an+1, ..., am)` consisting - * of the remaining elements. - */ - inline def splitAt[This >: this.type <: Tuple](n: Int): Split[This, n.type] = - runtime.Tuples.splitAt(this, n).asInstanceOf[Split[This, n.type]] + /** A tuple consisting of all elements of this tuple that satisfy the predicate `p`. */ + inline def filter[This >: this.type <: Tuple, P[_ <: Union[This]] <: Boolean] + (p: (x: Union[This]) => P[x.type]): Filter[This, P] = + val arr = this.toArray.filter(x => p(x.asInstanceOf[Union[This]])) + Tuple.fromArray(arr).asInstanceOf[Filter[This, P]] /** Given a tuple `(a1, ..., am)`, returns the reversed tuple `(am, ..., a1)` * consisting all its elements. @@ -114,14 +120,16 @@ sealed trait Tuple extends Product: inline def reverseOnto[This >: this.type <: Tuple, Acc <: Tuple](acc: Acc): ReverseOnto[This, Acc] = (this.reverse ++ acc).asInstanceOf[ReverseOnto[This, Acc]] - /** A tuple consisting of all elements of this tuple that satisfy the predicate `p`. */ - inline def filter[This >: this.type <: Tuple, P[_ <: Union[This]] <: Boolean] - (p: (x: Union[This]) => P[x.type]): Filter[This, P] = - val arr = this.toArray.filter(x => p(x.asInstanceOf[Union[This]])) - Tuple.fromArray(arr).asInstanceOf[Filter[This, P]] - object Tuple: + /** Type of a tuple with an element appended */ + type Append[X <: Tuple, +Y] <: NonEmptyTuple = X match + case EmptyTuple => Y *: EmptyTuple + case x *: xs => x *: Append[xs, Y] + + /** An infix shorthand for `Append[X, Y]` */ + infix type :*[X <: Tuple, +Y] = Append[X, Y] + /** The size of a tuple, represented as a literal constant subtype of Int */ type Size[X <: Tuple] <: Int = X match case EmptyTuple => 0 @@ -142,15 +150,15 @@ object Tuple: type Head[X <: Tuple] = X match case x *: _ => x + /** The type of a tuple consisting of all elements of tuple X except the first one */ + type Tail[X <: Tuple] <: Tuple = X match + case _ *: xs => xs + /** The type of the last element of a tuple */ type Last[X <: Tuple] = X match case x *: EmptyTuple => x case _ *: xs => Last[xs] - /** The type of a tuple consisting of all elements of tuple X except the first one */ - type Tail[X <: Tuple] <: Tuple = X match - case _ *: xs => xs - /** The type of the initial part of a tuple without its last element */ type Init[X <: Tuple] <: Tuple = X match case _ *: EmptyTuple => EmptyTuple @@ -177,14 +185,6 @@ object Tuple: /** The pair type `(Take(X, N), Drop[X, N]). */ type Split[X <: Tuple, N <: Int] = (Take[X, N], Drop[X, N]) - /** Type of a tuple with an element appended */ - type Append[X <: Tuple, +Y] <: NonEmptyTuple = X match - case EmptyTuple => Y *: EmptyTuple - case x *: xs => x *: Append[xs, Y] - - /** An infix shorthand for `Append[X, Y]` */ - infix type :*[X <: Tuple, +Y] = Append[X, Y] - /** Type of the concatenation of two tuples `X` and `Y` */ // Can be covariant in `Y` since it never appears as a match type scrutinee. type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match @@ -194,18 +194,25 @@ object Tuple: /** An infix shorthand for `Concat[X, Y]` */ infix type ++[X <: Tuple, +Y <: Tuple] = Concat[X, Y] - /** The index of `Y` in tuple `X` as a literal constant Int, - * or `Size[X]` if `Y` is disjoint from all element types in `X`. + /** The type of the tuple consisting of all element values of + * tuple `X` zipped with corresponding elements of tuple `Y`. + * If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * For example, if + * ``` + * X = (S1, ..., Si) + * Y = (T1, ..., Tj) where j >= i + * ``` + * then + * ``` + * Zip[X, Y] = ((S1, T1), ..., (Si, Ti)) + * ``` + * @syntax markdown */ - type IndexOf[X <: Tuple, Y] <: Int = X match - case Y *: _ => 0 - case _ *: xs => S[IndexOf[xs, Y]] - case EmptyTuple => 0 - - /** Fold a tuple `(T1, ..., Tn)` into `F[T1, F[... F[Tn, Z]...]]]` */ - type Fold[X <: Tuple, Z, F[_, _]] = X match - case EmptyTuple => Z - case x *: xs => F[x, Fold[xs, Z, F]] + type Zip[X <: Tuple, Y <: Tuple] <: Tuple = (X, Y) match + case (x *: xs, y *: ys) => (x, y) *: Zip[xs, ys] + case (EmptyTuple, _) => EmptyTuple + case (_, EmptyTuple) => EmptyTuple /** The type of tuple `X` mapped with the type-level function `F`. * If `X = (T1, ..., Ti)` then `Map[X, F] = `(F[T1], ..., F[Ti])`. @@ -214,6 +221,18 @@ object Tuple: case EmptyTuple => EmptyTuple case x *: xs => F[x] *: Map[xs, F] + /** Converts a tuple `(F[T1], ..., F[Tn])` to `(T1, ... Tn)` */ + type InverseMap[X <: Tuple, F[_]] <: Tuple = X match + case F[x] *: xs => x *: InverseMap[xs, F] + case EmptyTuple => EmptyTuple + + /** Implicit evidence. IsMappedBy[F][X] is present in the implicit scope iff + * X is a tuple for which each element's type is constructed via `F`. E.g. + * (F[A1], ..., F[An]), but not `(F[A1], B2, ..., F[An])` where B2 does not + * have the shape of `F[A]`. + */ + type IsMappedBy[F[_]] = [X <: Tuple] =>> X =:= Map[InverseMap[X, F], F] + /** The type of tuple `X` flat-mapped with the type-level function `F`. * If `X = (T1, ..., Ti)` then `FlatMap[X, F] = `F[T1] ++ ... ++ F[Ti]` */ @@ -241,44 +260,6 @@ object Tuple: case true => x *: Filter[xs, P] case false => Filter[xs, P] - /** A tuple consisting of those indices `N` of tuple `X` where the predicate `P` - * is true for `Elem[X, N]`. Indices are type level values <: Int. - */ - type IndicesWhere[X <: Tuple, P[_ <: Union[X]] <: Boolean] = - helpers.IndicesWhereHelper[X, P, 0] - - /** The type of the tuple consisting of all element values of - * tuple `X` zipped with corresponding elements of tuple `Y`. - * If the two tuples have different sizes, - * the extra elements of the larger tuple will be disregarded. - * For example, if - * ``` - * X = (S1, ..., Si) - * Y = (T1, ..., Tj) where j >= i - * ``` - * then - * ``` - * Zip[X, Y] = ((S1, T1), ..., (Si, Ti)) - * ``` - * @syntax markdown - */ - type Zip[X <: Tuple, Y <: Tuple] <: Tuple = (X, Y) match - case (x *: xs, y *: ys) => (x, y) *: Zip[xs, ys] - case (EmptyTuple, _) => EmptyTuple - case (_, EmptyTuple) => EmptyTuple - - /** Converts a tuple `(F[T1], ..., F[Tn])` to `(T1, ... Tn)` */ - type InverseMap[X <: Tuple, F[_]] <: Tuple = X match - case F[x] *: xs => x *: InverseMap[xs, F] - case EmptyTuple => EmptyTuple - - /** Implicit evidence. IsMappedBy[F][X] is present in the implicit scope iff - * X is a tuple for which each element's type is constructed via `F`. E.g. - * (F[A1], ..., F[An]), but not `(F[A1], B2, ..., F[An])` where B2 does not - * have the shape of `F[A]`. - */ - type IsMappedBy[F[_]] = [X <: Tuple] =>> X =:= Map[InverseMap[X, F], F] - /** A tuple with the elements of tuple `X` in reversed order */ type Reverse[X <: Tuple] = ReverseOnto[X, EmptyTuple] @@ -287,11 +268,30 @@ object Tuple: case x *: xs => ReverseOnto[xs, x *: Acc] case EmptyTuple => Acc + /** Fold a tuple `(T1, ..., Tn)` into `F[T1, F[... F[Tn, Z]...]]]` */ + type Fold[X <: Tuple, Z, F[_, _]] = X match + case EmptyTuple => Z + case x *: xs => F[x, Fold[xs, Z, F]] + /** Given a tuple `(T1, ..., Tn)`, returns a union of its * member types: `T1 | ... | Tn`. Returns `Nothing` if the tuple is empty. */ type Union[T <: Tuple] = Fold[T, Nothing, [x, y] =>> x | y] + /** The index of `Y` in tuple `X` as a literal constant Int, + * or `Size[X]` if `Y` is disjoint from all element types in `X`. + */ + type IndexOf[X <: Tuple, Y] <: Int = X match + case Y *: _ => 0 + case _ *: xs => S[IndexOf[xs, Y]] + case EmptyTuple => 0 + + /** A tuple consisting of those indices `N` of tuple `X` where the predicate `P` + * is true for `Elem[X, N]`. Indices are type level values <: Int. + */ + type IndicesWhere[X <: Tuple, P[_ <: Union[X]] <: Boolean] = + helpers.IndicesWhereHelper[X, P, 0] + /** A type level Boolean indicating whether the tuple `X` has an element * that matches `Y`. * @pre The elements of `X` are assumed to be singleton types From 03509b8c09c519cc9fc8e5d9479ce939d23642bc Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 15:34:25 +0200 Subject: [PATCH 318/465] Drop braces remaining at term level --- library/src/scala/Tuple.scala | 24 ++++++++---------------- 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 7f9e220b2bf2..a9401ea1be7e 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -246,10 +246,9 @@ object Tuple: * constant `true`. A predicate `P[X]` is a type that can be either `true` * or `false`. For example: * ```scala - * type IsString[x] <: Boolean = x match { + * type IsString[x] <: Boolean = x match * case String => true * case _ => false - * } * summon[Tuple.Filter[(1, "foo", 2, "bar"), IsString] =:= ("foo", "bar")] * ``` * @syntax markdown @@ -325,26 +324,21 @@ object Tuple: fromArray(xs, xs.length) /** Convert the first `n` elements of an array into a tuple of unknown arity and types */ - def fromArray[T](xs: Array[T], n: Int): Tuple = { - val xs2 = xs match { + def fromArray[T](xs: Array[T], n: Int): Tuple = + val xs2 = xs match case xs: Array[Object] => xs case xs => xs.map(_.asInstanceOf[Object]) - } runtime.Tuples.fromArray(xs2, n) - } /** Convert an immutable array into a tuple of unknown arity and types */ def fromIArray[T](xs: IArray[T]): Tuple = fromIArray(xs, xs.length) /** Convert the first `n` elements of an immutable array into a tuple of unknown arity and types */ - def fromIArray[T](xs: IArray[T], n: Int): Tuple = { - val xs2: IArray[Object] = xs match { + def fromIArray[T](xs: IArray[T], n: Int): Tuple = + val xs2: IArray[Object] = xs match case xs: IArray[Object] @unchecked => xs - case _ => - xs.map(_.asInstanceOf[Object]) - } + case _ => xs.map(_.asInstanceOf[Object]) runtime.Tuples.fromIArray(xs2, n) - } /** Convert a Product into a tuple of unknown arity and types */ def fromProduct(product: Product): Tuple = @@ -394,9 +388,8 @@ end Tuple type EmptyTuple = EmptyTuple.type /** A tuple of 0 elements. */ -case object EmptyTuple extends Tuple { +case object EmptyTuple extends Tuple: override def toString(): String = "()" -} /** Tuple of arbitrary non-zero arity */ sealed trait NonEmptyTuple extends Tuple @@ -404,6 +397,5 @@ sealed trait NonEmptyTuple extends Tuple @showAsInfix sealed abstract class *:[+H, +T <: Tuple] extends NonEmptyTuple -object *: { +object `*:`: def unapply[H, T <: Tuple](x: H *: T): (H, T) = (x.head, x.tail) -} From 9047ac3d216b7231e4a3233de63a5afc4ce4e318 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 1 May 2024 14:09:50 +0200 Subject: [PATCH 319/465] Fix `def indexOf` to return the size instead of -1 --- library/src/scala/Tuple.scala | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index a9401ea1be7e..92460c078b54 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -350,13 +350,14 @@ object Tuple: // Also note it would be unsound to use a type parameter for `y` in the type level // operations, since they are rightfully not covariant in their second parameter. - /** The index (starting at 0) of the first occurrence of y.type in the type `X` of `x` - * or Size[X] if no such element exists. + /** The index (starting at 0) of the first occurrence of `y` in `x` + * or its size if no such element exists. */ inline def indexOf(y: Any): IndexOf[X, y.type] = - x.productIterator.indexOf(y).asInstanceOf[IndexOf[X, y.type]] + val i = x.productIterator.indexOf(y) + (if i >= 0 then i else x.size).asInstanceOf[IndexOf[X, y.type]] - /** A boolean indicating whether there is an element `y.type` in the type `X` of `x` */ + /** A boolean indicating whether `x` contains the element `y` */ inline def contains(y: Any): Contains[X, y.type] = x.productIterator.contains(y).asInstanceOf[Contains[X, y.type]] From 579e14ab8bdb1a7e522ca4a5eb9fe87751a3098f Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 1 May 2024 18:23:16 +0200 Subject: [PATCH 320/465] Revert doing tuple runtime operations based on a term level predicates --- library/src/scala/Tuple.scala | 32 +++++++++++-------------- tests/pos/named-tuples-strawman-2.scala | 5 +--- 2 files changed, 15 insertions(+), 22 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 92460c078b54..364124481db7 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -104,10 +104,15 @@ sealed trait Tuple extends Product: inline def map[F[_]](f: [t] => t => F[t]): Map[this.type, F] = runtime.Tuples.map(this, f).asInstanceOf[Map[this.type, F]] - /** A tuple consisting of all elements of this tuple that satisfy the predicate `p`. */ - inline def filter[This >: this.type <: Tuple, P[_ <: Union[This]] <: Boolean] - (p: (x: Union[This]) => P[x.type]): Filter[This, P] = - val arr = this.toArray.filter(x => p(x.asInstanceOf[Union[This]])) + /** A tuple consisting of all elements of this tuple that have types + * for which the given type level predicate `P` reduces to the literal + * constant `true`. + */ + inline def filter[This >: this.type <: Tuple, P[_ <: Union[This]] <: Boolean]: Filter[This, P] = + val toInclude = constValueTuple[IndicesWhere[This, P]].toArray + val arr = new Array[Object](toInclude.length) + for i <- toInclude.indices do + arr(i) = this.productElement(toInclude(i).asInstanceOf[Int]).asInstanceOf[Object] Tuple.fromArray(arr).asInstanceOf[Filter[This, P]] /** Given a tuple `(a1, ..., am)`, returns the reversed tuple `(am, ..., a1)` @@ -345,23 +350,14 @@ object Tuple: runtime.Tuples.fromProduct(product) extension [X <: Tuple](inline x: X) - // Note the two methods are not equivalent to using `constValue`, - // since they also allow cases unknown at compiletime. - // Also note it would be unsound to use a type parameter for `y` in the type level - // operations, since they are rightfully not covariant in their second parameter. - /** The index (starting at 0) of the first occurrence of `y` in `x` - * or its size if no such element exists. + /** The index (starting at 0) of the first occurrence of `y.type` in the type `X` of `x` + * or `Size[X]` if no such element exists. */ - inline def indexOf(y: Any): IndexOf[X, y.type] = - val i = x.productIterator.indexOf(y) - (if i >= 0 then i else x.size).asInstanceOf[IndexOf[X, y.type]] - - /** A boolean indicating whether `x` contains the element `y` */ - inline def contains(y: Any): Contains[X, y.type] = - x.productIterator.contains(y).asInstanceOf[Contains[X, y.type]] + inline def indexOf(y: Any): IndexOf[X, y.type] = constValue[IndexOf[X, y.type]] - // TODO indexOfType & containsType ? + /** A boolean indicating whether there is an element `y.type` in the type `X` of `x` */ + inline def contains(y: Any): Contains[X, y.type] = constValue[Contains[X, y.type]] end extension diff --git a/tests/pos/named-tuples-strawman-2.scala b/tests/pos/named-tuples-strawman-2.scala index 7cd763bb7b00..4b32dd83f2eb 100644 --- a/tests/pos/named-tuples-strawman-2.scala +++ b/tests/pos/named-tuples-strawman-2.scala @@ -60,10 +60,7 @@ object TupleOps: case EmptyTuple => X inline def concatDistinct[X <: Tuple, Y <: Tuple](xs: X, ys: Y): ConcatDistinct[X, Y] = - // Note the type parameter is needed due to the invariance of compiletime.ops.boolean.! - extension [B <: Boolean](self: B) def negated: ![B] = (!self).asInstanceOf - val ysDistinct = ys.filter[Y, [y] =>> ![Contains[X, y]]](xs.contains(_).negated) - (xs ++ ysDistinct).asInstanceOf[ConcatDistinct[X, Y]] + (xs ++ ys.filter[Y, [Elem] =>> ![Contains[X, Elem]]]).asInstanceOf[ConcatDistinct[X, Y]] object NamedTupleDecomposition: import NamedTupleOps.* From 8d6fa37248efb27920c4ac16e129c63dda0c46a4 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 1 May 2024 18:51:40 +0200 Subject: [PATCH 321/465] Make named-tuples-strawman-2.scala a run test The tuple term level definitions were not being tested before --- compiler/test/dotc/pos-test-pickling.blacklist | 1 - compiler/test/dotc/run-test-pickling.blacklist | 1 + tests/{pos => run}/named-tuples-strawman-2.scala | 0 3 files changed, 1 insertion(+), 1 deletion(-) rename tests/{pos => run}/named-tuples-strawman-2.scala (100%) diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 94e510e04396..a856a5b84d92 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -67,7 +67,6 @@ mt-redux-norm.perspective.scala i18211.scala 10867.scala named-tuples1.scala -named-tuples-strawman-2.scala # Opaque type i5720.scala diff --git a/compiler/test/dotc/run-test-pickling.blacklist b/compiler/test/dotc/run-test-pickling.blacklist index 954a64db1b66..dacbc63bb520 100644 --- a/compiler/test/dotc/run-test-pickling.blacklist +++ b/compiler/test/dotc/run-test-pickling.blacklist @@ -45,4 +45,5 @@ t6138-2 i12656.scala trait-static-forwarder i17255 +named-tuples-strawman-2.scala diff --git a/tests/pos/named-tuples-strawman-2.scala b/tests/run/named-tuples-strawman-2.scala similarity index 100% rename from tests/pos/named-tuples-strawman-2.scala rename to tests/run/named-tuples-strawman-2.scala From 1e29c4f3ffa2257eee7a691f7910edca97d3bda2 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 2 May 2024 16:59:05 +0200 Subject: [PATCH 322/465] import language.experimental.namedTuples in pos/fieldsOf.scala --- tests/pos/fieldsOf.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/pos/fieldsOf.scala b/tests/pos/fieldsOf.scala index 08f20a1f7e8e..2594dae2cbf7 100644 --- a/tests/pos/fieldsOf.scala +++ b/tests/pos/fieldsOf.scala @@ -1,3 +1,5 @@ +import language.experimental.namedTuples + case class Person(name: String, age: Int) type PF = NamedTuple.From[Person] From 1cdf99f5cf77fc4304bb2cf3901a18e7becb90c2 Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Tue, 7 May 2024 01:22:07 +0200 Subject: [PATCH 323/465] chore: Backport changes for presentation compiler (#20345) Changes from: - https://github.com/scalameta/metals/pull/6225 - https://github.com/scalameta/metals/pull/6236 - https://github.com/scalameta/metals/pull/6241 - https://github.com/scalameta/metals/pull/6303 - https://github.com/scalameta/metals/pull/6308 --- .../main/dotty/tools/pc/HoverProvider.scala | 19 +- .../dotty/tools/pc/PcInlayHintsProvider.scala | 160 +++++++++------- .../tools/pc/ScalaPresentationCompiler.scala | 2 +- .../pc/completions/MatchCaseCompletions.scala | 3 +- .../pc/utils/InteractiveEnrichments.scala | 4 +- .../tools/pc/base/BaseInlayHintsSuite.scala | 6 +- .../completion/CompletionCaseSuite.scala | 37 +++- .../pc/tests/hover/HoverPlainTextSuite.scala | 90 +++++++++ .../pc/tests/inlayHints/InlayHintsSuite.scala | 172 +++++++++++++++++- .../tools/pc/utils/MockSymbolSearch.scala | 7 + project/Build.scala | 2 +- 11 files changed, 409 insertions(+), 93 deletions(-) create mode 100644 presentation-compiler/test/dotty/tools/pc/tests/hover/HoverPlainTextSuite.scala diff --git a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala index c0f2d49d341e..fd363dbd37a2 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala @@ -5,6 +5,7 @@ import java.util as ju import scala.meta.internal.metals.Report import scala.meta.internal.metals.ReportContext import scala.meta.internal.pc.ScalaHover +import scala.meta.pc.ContentType import scala.meta.pc.HoverSignature import scala.meta.pc.OffsetParams import scala.meta.pc.SymbolSearch @@ -30,7 +31,8 @@ object HoverProvider: def hover( params: OffsetParams, driver: InteractiveDriver, - search: SymbolSearch + search: SymbolSearch, + contentType: ContentType )(implicit reportContext: ReportContext): ju.Optional[HoverSignature] = val uri = params.uri().nn val text = params.text().nn @@ -101,10 +103,10 @@ object HoverProvider: skipCheckOnName ) match case Nil => - fallbackToDynamics(path, printer) + fallbackToDynamics(path, printer, contentType) case (symbol, tpe) :: _ if symbol.name == nme.selectDynamic || symbol.name == nme.applyDynamic => - fallbackToDynamics(path, printer) + fallbackToDynamics(path, printer, contentType) case symbolTpes @ ((symbol, tpe) :: _) => val exprTpw = tpe.widenTermRefExpr.deepDealias val hoverString = @@ -126,7 +128,7 @@ object HoverProvider: end hoverString val docString = symbolTpes - .flatMap(symTpe => search.symbolDocumentation(symTpe._1)) + .flatMap(symTpe => search.symbolDocumentation(symTpe._1, contentType)) .map(_.docstring()) .mkString("\n") printer.expressionType(exprTpw) match @@ -144,7 +146,8 @@ object HoverProvider: symbolSignature = Some(hoverString), docstring = Some(docString), forceExpressionType = forceExpressionType, - contextInfo = printer.getUsedRenamesInfo + contextInfo = printer.getUsedRenamesInfo, + contentType = contentType ) ).nn case _ => @@ -159,7 +162,8 @@ object HoverProvider: private def fallbackToDynamics( path: List[Tree], - printer: ShortenedTypePrinter + printer: ShortenedTypePrinter, + contentType: ContentType )(using Context): ju.Optional[HoverSignature] = path match case SelectDynamicExtractor(sel, n, name) => def findRefinement(tp: Type): Option[HoverSignature] = @@ -178,7 +182,8 @@ object HoverProvider: new ScalaHover( expressionType = Some(tpeString), symbolSignature = Some(s"$valOrDef $name$tpeString"), - contextInfo = printer.getUsedRenamesInfo + contextInfo = printer.getUsedRenamesInfo, + contentType = contentType ) ) case RefinedType(parent, _, _) => diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala index 70aaa82eae05..c4fdb97c0418 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala @@ -42,6 +42,7 @@ class PcInlayHintsProvider( val source = SourceFile.virtual(filePath.toString, sourceText) driver.run(uri, source) + given InlayHintsParams = params given InferredType.Text = InferredType.Text(text) given ctx: Context = driver.currentCtx @@ -65,7 +66,7 @@ class PcInlayHintsProvider( tree: Tree, ): InlayHints = tree match - case ImplicitConversion(symbol, range) if params.implicitConversions() => + case ImplicitConversion(symbol, range) => val adjusted = adjustPos(range) inlayHints .add( @@ -78,8 +79,7 @@ class PcInlayHintsProvider( LabelPart(")") :: Nil, InlayHintKind.Parameter, ) - case ImplicitParameters(symbols, pos, allImplicit) - if params.implicitParameters() => + case ImplicitParameters(symbols, pos, allImplicit) => val labelParts = symbols.map(s => List(labelPart(s, s.decodedName))) val label = if allImplicit then labelParts.separated("(using ", ", ", ")") @@ -89,14 +89,14 @@ class PcInlayHintsProvider( label, InlayHintKind.Parameter, ) - case ValueOf(label, pos) if params.implicitParameters() => + case ValueOf(label, pos) => inlayHints.add( adjustPos(pos).toLsp, LabelPart("(") :: LabelPart(label) :: List(LabelPart(")")), InlayHintKind.Parameter, ) case TypeParameters(tpes, pos, sel) - if params.typeParameters() && !syntheticTupleApply(sel) => + if !syntheticTupleApply(sel) => val label = tpes.map(toLabelParts(_, pos)).separated("[", ", ", "]") inlayHints.add( adjustPos(pos).endPos.toLsp, @@ -104,7 +104,7 @@ class PcInlayHintsProvider( InlayHintKind.Type, ) case InferredType(tpe, pos, defTree) - if params.inferredTypes() && !isErrorTpe(tpe) => + if !isErrorTpe(tpe) => val adjustedPos = adjustPos(pos).endPos if inlayHints.containsDef(adjustedPos.start) then inlayHints else @@ -191,14 +191,16 @@ class PcInlayHintsProvider( end PcInlayHintsProvider object ImplicitConversion: - def unapply(tree: Tree)(using Context) = - tree match - case Apply(fun: Ident, args) if isSynthetic(fun) => - implicitConversion(fun, args) - case Apply(Select(fun, name), args) - if name == nme.apply && isSynthetic(fun) => - implicitConversion(fun, args) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = + if (params.implicitConversions()) { + tree match + case Apply(fun: Ident, args) if isSynthetic(fun) => + implicitConversion(fun, args) + case Apply(Select(fun, name), args) + if name == nme.apply && isSynthetic(fun) => + implicitConversion(fun, args) + case _ => None + } else None private def isSynthetic(tree: Tree)(using Context) = tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) @@ -212,52 +214,64 @@ object ImplicitConversion: end ImplicitConversion object ImplicitParameters: - def unapply(tree: Tree)(using Context) = - tree match - case Apply(fun, args) - if args.exists(isSyntheticArg) && !tree.sourcePos.span.isZeroExtent => - val (implicitArgs, providedArgs) = args.partition(isSyntheticArg) - val allImplicit = providedArgs.isEmpty || providedArgs.forall { - case Ident(name) => name == nme.MISSING - case _ => false - } - val pos = implicitArgs.head.sourcePos - Some(implicitArgs.map(_.symbol), pos, allImplicit) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = + if (params.implicitParameters()) { + tree match + case Apply(fun, args) + if args.exists(isSyntheticArg) && !tree.sourcePos.span.isZeroExtent => + val (implicitArgs, providedArgs) = args.partition(isSyntheticArg) + val allImplicit = providedArgs.isEmpty || providedArgs.forall { + case Ident(name) => name == nme.MISSING + case _ => false + } + val pos = implicitArgs.head.sourcePos + Some(implicitArgs.map(_.symbol), pos, allImplicit) + case _ => None + } else None private def isSyntheticArg(tree: Tree)(using Context) = tree match case tree: Ident => - tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) + tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) && + !isQuotes(tree) case _ => false + + // Decorations for Quotes are rarely useful + private def isQuotes(tree: Tree)(using Context) = + tree.tpe.typeSymbol == defn.QuotesClass + end ImplicitParameters object ValueOf: - def unapply(tree: Tree)(using Context) = - tree match - case Apply(ta @ TypeApply(fun, _), _) - if fun.span.isSynthetic && isValueOf(fun) => - Some( - "new " + tpnme.valueOf.decoded.capitalize + "(...)", - fun.sourcePos, - ) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = + if (params.implicitParameters()) { + tree match + case Apply(ta @ TypeApply(fun, _), _) + if fun.span.isSynthetic && isValueOf(fun) => + Some( + "new " + tpnme.valueOf.decoded.capitalize + "(...)", + fun.sourcePos, + ) + case _ => None + } else None private def isValueOf(tree: Tree)(using Context) = val symbol = tree.symbol.maybeOwner symbol.name.decoded == tpnme.valueOf.decoded.capitalize end ValueOf object TypeParameters: - def unapply(tree: Tree)(using Context) = - tree match - case TypeApply(sel: Select, _) if sel.isForComprehensionMethod => None - case TypeApply(fun, args) if inferredTypeArgs(args) => - val pos = fun match - case sel: Select if sel.isInfix => - sel.sourcePos.withEnd(sel.nameSpan.end) - case _ => fun.sourcePos - val tpes = args.map(_.typeOpt.stripTypeVar.widen.finalResultType) - Some((tpes, pos.endPos, fun)) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = + if (params.typeParameters()) { + tree match + case TypeApply(sel: Select, _) + if sel.isForComprehensionMethod || sel.isInfix || + sel.symbol.name == nme.unapply => + None + case TypeApply(fun, args) if inferredTypeArgs(args) => + val tpes = args.map(_.tpe.stripTypeVar.widen.finalResultType) + Some((tpes, fun.sourcePos.endPos, fun)) + case _ => None + } else None + private def inferredTypeArgs(args: List[Tree]): Boolean = args.forall { case tt: TypeTree if tt.span.exists && !tt.span.isZeroExtent => true @@ -270,29 +284,35 @@ object InferredType: object Text: def apply(text: Array[Char]): Text = text - def unapply(tree: Tree)(using text: Text, cxt: Context) = - tree match - case vd @ ValDef(_, tpe, _) - if isValidSpan(tpe.span, vd.nameSpan) && - !vd.symbol.is(Flags.Enum) && - !isValDefBind(text, vd) => - if vd.symbol == vd.symbol.sourceSymbol then - Some(tpe.typeOpt, tpe.sourcePos.withSpan(vd.nameSpan), vd) - else None - case vd @ DefDef(_, _, tpe, _) - if isValidSpan(tpe.span, vd.nameSpan) && - tpe.span.start >= vd.nameSpan.end && - !vd.symbol.isConstructor && - !vd.symbol.is(Flags.Mutable) => - if vd.symbol == vd.symbol.sourceSymbol then - Some(tpe.typeOpt, tpe.sourcePos, vd) - else None - case bd @ Bind( - name, - Ident(nme.WILDCARD), - ) => - Some(bd.symbol.info, bd.namePos, bd) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, text: Text, ctx: Context) = + if (params.inferredTypes()) { + tree match + case vd @ ValDef(_, tpe, _) + if isValidSpan(tpe.span, vd.nameSpan) && + !vd.symbol.is(Flags.Enum) && + (isNotInUnapply(vd) || params.hintsInPatternMatch()) && + !isValDefBind(text, vd) => + if vd.symbol == vd.symbol.sourceSymbol then + Some(tpe.tpe, tpe.sourcePos.withSpan(vd.nameSpan), vd) + else None + case vd @ DefDef(_, _, tpe, _) + if isValidSpan(tpe.span, vd.nameSpan) && + tpe.span.start >= vd.nameSpan.end && + !vd.symbol.isConstructor && + !vd.symbol.is(Flags.Mutable) => + if vd.symbol == vd.symbol.sourceSymbol then + Some(tpe.tpe, tpe.sourcePos, vd) + else None + case bd @ Bind( + name, + Ident(nme.WILDCARD), + ) if !bd.span.isZeroExtent && bd.symbol.isTerm && params.hintsInPatternMatch() => + Some(bd.symbol.info, bd.namePos, bd) + case _ => None + } else None + + private def isNotInUnapply(vd: ValDef)(using Context) = + vd.rhs.span.exists && vd.rhs.span.start > vd.nameSpan.end private def isValidSpan(tpeSpan: Span, nameSpan: Span): Boolean = tpeSpan.isZeroExtent && diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index 378564d90bc1..86aa895cb4fc 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -360,7 +360,7 @@ case class ScalaPresentationCompiler( params.token() ) { access => val driver = access.compiler() - HoverProvider.hover(params, driver, search) + HoverProvider.hover(params, driver, search, config.hoverContentType()) } end hover diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala index 3ca57fc39cdf..48c6bcfe8317 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala @@ -23,6 +23,7 @@ import dotty.tools.dotc.core.Symbols.NoSymbol import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.Types.AndType import dotty.tools.dotc.core.Types.ClassInfo +import dotty.tools.dotc.core.Types.NoType import dotty.tools.dotc.core.Types.OrType import dotty.tools.dotc.core.Types.Type import dotty.tools.dotc.core.Types.TypeRef @@ -94,7 +95,7 @@ object CaseKeywordCompletion: Some(sel.tpe.widen.deepDealias) selTpe - .map { selTpe => + .collect { case selTpe if selTpe != NoType => val selectorSym = selTpe.typeSymbol // Special handle case when selector is a tuple or `FunctionN`. if definitions.isTupleClass(selectorSym) || definitions.isFunctionClass( diff --git a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala index fb41e8859801..dd2fb3107c49 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala @@ -4,6 +4,7 @@ import scala.annotation.tailrec import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.mtags.CommonMtagsEnrichments import scala.meta.internal.mtags.KeywordWrapper +import scala.meta.pc.ContentType import scala.meta.pc.OffsetParams import scala.meta.pc.RangeParams import scala.meta.pc.SymbolDocumentation @@ -260,7 +261,7 @@ object InteractiveEnrichments extends CommonMtagsEnrichments: } extension (search: SymbolSearch) - def symbolDocumentation(symbol: Symbol)(using + def symbolDocumentation(symbol: Symbol, contentType: ContentType = ContentType.MARKDOWN)(using Context ): Option[SymbolDocumentation] = def toSemanticdbSymbol(symbol: Symbol) = @@ -280,6 +281,7 @@ object InteractiveEnrichments extends CommonMtagsEnrichments: val documentation = search.documentation( sym, () => parentSymbols.iterator.map(toSemanticdbSymbol).toList.asJava, + contentType, ) documentation.nn.toScala end symbolDocumentation diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala index 94b00ca82aea..78635e540c43 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala @@ -18,6 +18,7 @@ class BaseInlayHintsSuite extends BasePCSuite { base: String, expected: String, kind: Option[Int] = None, + hintsInPatternMatch: Boolean = false ): Unit = def pkgWrap(text: String) = if (text.contains("package")) text @@ -35,7 +36,8 @@ class BaseInlayHintsSuite extends BasePCSuite { true, true, true, - true + true, + hintsInPatternMatch ) val inlayHints = presentationCompiler @@ -49,8 +51,8 @@ class BaseInlayHintsSuite extends BasePCSuite { val obtained = TestInlayHints.applyInlayHints(withPkg, inlayHints) assertNoDiff( + pkgWrap(expected), obtained, - pkgWrap(expected) ) } \ No newline at end of file diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCaseSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCaseSuite.scala index 521880b3a84b..e72ee5221d91 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCaseSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCaseSuite.scala @@ -763,14 +763,14 @@ class CompletionCaseSuite extends BaseCompletionSuite: | |object O { | val x: Foo | Bar = ??? - | val y = List(x).map{ ca@@ } + | val y = List(x).map{ca@@ } |}""".stripMargin, s"""|case class Foo(a: Int) |case class Bar(b: Int) | |object O { | val x: Foo | Bar = ??? - | val y = List(x).map{ + | val y = List(x).map{ |\tcase Foo(a) => $$0 |\tcase Bar(b) => | } @@ -779,3 +779,36 @@ class CompletionCaseSuite extends BaseCompletionSuite: filter = _.contains("exhaustive") ) + @Test def summonFrom = + check( + """ + |object A { + | import scala.compiletime.summonFrom + | class A + | + | inline def f: Any = summonFrom { + | case x@@: A => ??? // error: ambiguous givens + | } + |} + |""".stripMargin, + "" + ) + + @Test def summonFrom2 = + check( + """ + |object A { + | import scala.compiletime.summonFrom + | + | class A + | given a1: A = new A + | given a2: A = new A + | + | inline def f: Any = summonFrom { + | case x@@: A => ??? // error: ambiguous givens + | } + |} + |""".stripMargin, + "" + ) + diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverPlainTextSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverPlainTextSuite.scala new file mode 100644 index 000000000000..a69a1ff0f5da --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverPlainTextSuite.scala @@ -0,0 +1,90 @@ +package dotty.tools.pc.tests.hover + +import dotty.tools.pc.base.BaseHoverSuite + +import org.junit.Test +import dotty.tools.pc.utils.MockEntries +import scala.meta.pc.SymbolDocumentation +import scala.meta.internal.pc.PresentationCompilerConfigImpl +import scala.meta.pc.ContentType +import scala.meta.pc.PresentationCompilerConfig + +class HoverPlainTextSuite extends BaseHoverSuite: + + override protected def config: PresentationCompilerConfig = + PresentationCompilerConfigImpl().copy( + snippetAutoIndent = false, + hoverContentType = ContentType.PLAINTEXT + ) + + override protected def mockEntries: MockEntries = new MockEntries: + override def documentations: Set[SymbolDocumentation] = Set( + ScalaMockDocumentation("java/lang/String#substring().", "substring", List(), List(MockParam("beginIndex"))), + ScalaMockDocumentation("java/util/Collections#emptyList().", "emptyList"), + ScalaMockDocumentation("_empty_/Alpha.apply().", "apply", List(), List(MockParam("x"))), + ScalaMockDocumentation("_empty_/Alpha#", "init", List(), List(MockParam("x"))), + ScalaMockDocumentation("scala/collection/LinearSeqOps#headOption().", "headOption"), + ScalaMockDocumentation("scala/Option#fold().", "fold", List(MockParam("B"))), + ) + + @Test def `basic-plaintext` = + check( + """| + |/** + | * Some docstring + | */ + |case class Alpha(x: Int) { + |} + | + |object Main { + | val x = <> + |} + |""".stripMargin, + """|def apply(x: Int): Alpha + | + |Found documentation for _empty_/Alpha.apply(). + | + |""".stripMargin + ) + + + @Test def `head-plaintext` = + check( + """|object a { + | <> + |} + |""".stripMargin, + """|override def headOption: Option[Int] + | + |Found documentation for scala/collection/LinearSeqOps#headOption(). + |""".stripMargin + ) + + @Test def `trait-plaintext` = + check( + """|trait XX + |object Main extends <>{} + |""".stripMargin, + "trait XX: XX", + ) + + @Test def `function-chain4-plaintext` = + check( + """ + |trait Consumer { + | def subConsumer[T](i: T): T + | def consume(value: Int)(n: Int): Unit + |} + | + |object O { + | val consumer: Consumer = ??? + | List(1).foreach(<>.consume(1)) + |} + |""".stripMargin, + """|Expression type: + |Consumer + | + |Symbol signature: + |def subConsumer[T](i: T): T + |""".stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala index da7601e3c746..e470f492657c 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala @@ -170,7 +170,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin, """|object O { - | def m/*: List<>[Int<>]*/ = 1 ::/*[Int<>]*/ List/*[Int<>]*/(1) + | def m/*: List<>[Int<>]*/ = 1 :: List/*[Int<>]*/(1) |} |""".stripMargin ) @@ -418,13 +418,16 @@ class InlayHintsSuite extends BaseInlayHintsSuite { @Test def `tuple-unapply` = check( """|object Main { + | val (local, _) = ("", 1.0) | val (fst, snd) = (1, 2) |} |""".stripMargin, """|object Main { + | val (local/*: String<>*/, _) = ("", 1.0) | val (fst/*: Int<>*/, snd/*: Int<>*/) = (1, 2) |} - |""".stripMargin + |""".stripMargin, + hintsInPatternMatch = true ) @Test def `list-unapply` = @@ -434,7 +437,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin, """|object Main { - | val hd/*: Int<>*/ ::/*[Int<>]*/ tail/*: List<>[Int<>]*/ = List/*[Int<>]*/(1, 2) + | val hd :: tail = List/*[Int<>]*/(1, 2) |} |""".stripMargin, ) @@ -449,7 +452,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |""".stripMargin, """|object Main { | val x/*: Int<>*/ = List/*[Int<>]*/(1, 2) match { - | case hd/*: Int<>*/ ::/*[Int<>]*/ tail/*: List<>[Int<>]*/ => hd + | case hd :: tail => hd | } |} |""".stripMargin, @@ -464,9 +467,10 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |""".stripMargin, """|object Main { |case class Foo[A](x: A, y: A) - | val Foo/*[Int<>]*/(fst/*: Int<>*/, snd/*: Int<>*/) = Foo/*[Int<>]*/(1, 2) + | val Foo(fst/*: Int<>*/, snd/*: Int<>*/) = Foo/*[Int<>]*/(1, 2) |} |""".stripMargin, + hintsInPatternMatch = true ) @Test def `valueOf` = @@ -517,7 +521,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin, """|object Main { - | List/*[Int<>]*/(1).collect/*[Int<>]*/ { case x/*: Int<>*/ => x } + | List/*[Int<>]*/(1).collect/*[Int<>]*/ { case x => x } | val x: PartialFunction[Int, Int] = { | case 1 => 2 | } @@ -532,7 +536,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin, """|object O { - | val tupleBound @ (one/*: String<>*/, two/*: String<>*/) = ("1", "2") + | val tupleBound @ (one, two) = ("1", "2") |} |""".stripMargin ) @@ -546,7 +550,8 @@ class InlayHintsSuite extends BaseInlayHintsSuite { """|object O { | val tupleBound /* comment */ @ (one/*: String<>*/, two/*: String<>*/) = ("1", "2") |} - |""".stripMargin + |""".stripMargin, + hintsInPatternMatch = true ) @Test def `complex` = @@ -764,4 +769,155 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin ) + + @Test def `pattern-match` = + check( + """|package example + |object O { + | val head :: tail = List(1) + | List(1) match { + | case head :: next => + | case Nil => + | } + | Option(Option(1)) match { + | case Some(Some(value)) => + | case None => + | } + | val (local, _) = ("", 1.0) + | val Some(x) = Option(1) + | for { + | x <- List((1,2)) + | (z, y) = x + | } yield { + | x + | } + |} + |""".stripMargin, + """|package example + |object O { + | val head :: tail = List/*[Int<>]*/(1) + | List/*[Int<>]*/(1) match { + | case head :: next => + | case Nil => + | } + | Option/*[Option<>[Int<>]]*/(Option/*[Int<>]*/(1)) match { + | case Some(Some(value)) => + | case None => + | } + | val (local, _) = ("", 1.0) + | val Some(x) = Option/*[Int<>]*/(1) + | for { + | x <- List/*[(Int<>, Int<>)]*/((1,2)) + | (z, y) = x + | } yield { + | x + | } + |} + |""".stripMargin + ) + + + @Test def `pattern-match1` = + check( + """|package example + |object O { + | val head :: tail = List(1) + | List(1) match { + | case head :: next => + | case Nil => + | } + | Option(Option(1)) match { + | case Some(Some(value)) => + | case None => + | } + | val (local, _) = ("", 1.0) + | val Some(x) = Option(1) + | for { + | x <- List((1,2)) + | (z, y) = x + | } yield { + | x + | } + |} + |""".stripMargin, + """|package example + |object O { + | val head/*: Int<>*/ :: tail/*: List<>[Int<>]*/ = List/*[Int<>]*/(1) + | List/*[Int<>]*/(1) match { + | case head/*: Int<>*/ :: next/*: List<>[Int<>]*/ => + | case Nil => + | } + | Option/*[Option<>[Int<>]]*/(Option/*[Int<>]*/(1)) match { + | case Some(Some(value/*: Int<>*/)) => + | case None => + | } + | val (local/*: String<>*/, _) = ("", 1.0) + | val Some(x/*: Int<>*/) = Option/*[Int<>]*/(1) + | for { + | x/*: (Int<>, Int<>)*/ <- List/*[(Int<>, Int<>)]*/((1,2)) + | (z/*: Int<>*/, y/*: Int<>*/) = x + | } yield { + | x + | } + |} + |""".stripMargin, + hintsInPatternMatch = true + ) + + @Test def quotes = + check( + """|package example + |import scala.quoted.* + |object O: + | inline def foo[T]: List[String] = ${fooImpl[T]} + | def fooImpl[T: Type](using Quotes): Expr[List[String]] = ??? + |""".stripMargin, + """|package example + |import scala.quoted.* + |object O: + | inline def foo[T]: List[String] = ${fooImpl[T]} + | def fooImpl[T: Type](using Quotes): Expr[List[String]] = ??? + |""".stripMargin + ) + + @Test def quotes1 = + check( + """|package example + |import scala.quoted.* + |object O: + | def matchTypeImpl[T: Type](param1: Expr[T])(using Quotes) = + | import quotes.reflect.* + | Type.of[T] match + | case '[f] => + | val fr = TypeRepr.of[T] + |""".stripMargin, + """|package example + |import scala.quoted.* + |object O: + | def matchTypeImpl[T: Type](param1: Expr[T])(using Quotes)/*: Unit<>*/ = + | import quotes.reflect.* + | Type.of[T] match + | case '[f] => + | val fr/*: TypeRepr<>*/ = TypeRepr.of[T]/*(using evidence$1<<(3:21)>>)*/ + |""".stripMargin + ) + + + @Test def quotes2 = + check( + """|package example + |import scala.quoted.* + |object O: + | def rec[A : Type](using Quotes): List[String] = + | Type.of[A] match + | case '[field *: fields] => ??? + |""".stripMargin, + """|package example + |import scala.quoted.* + |object O: + | def rec[A : Type](using Quotes): List[String] = + | Type.of[A] match + | case '[field *: fields] => ??? + |""".stripMargin + ) } diff --git a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala index edd339a5e2ed..9015a39ba9e7 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala @@ -7,6 +7,7 @@ import java.util as ju import scala.jdk.CollectionConverters.* import scala.jdk.OptionConverters.* import scala.meta.internal.metals.{ClasspathSearch, WorkspaceSymbolQuery} +import scala.meta.pc.ContentType import scala.meta.pc.SymbolSearch.Result import scala.meta.pc.{ ParentSymbols, @@ -66,6 +67,12 @@ class MockSymbolSearch( override def documentation( symbol: String, parents: ParentSymbols + ) = documentation(symbol, parents, ContentType.MARKDOWN) + + override def documentation( + symbol: String, + parents: ParentSymbols, + contentType: ContentType ): Optional[SymbolDocumentation] = (symbol +: parents.parents().asScala).iterator .map(symbol => mockEntries.documentations.find(_.symbol == symbol)) diff --git a/project/Build.scala b/project/Build.scala index 52405660ef83..aaa16ce4bce4 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1335,7 +1335,7 @@ object Build { BuildInfoPlugin.buildInfoDefaultSettings lazy val presentationCompilerSettings = { - val mtagsVersion = "1.2.2+44-42e0515a-SNAPSHOT" + val mtagsVersion = "1.3.0+56-a06a024d-SNAPSHOT" Seq( resolvers ++= Resolver.sonatypeOssRepos("snapshots"), From 8f84df8a73573af18562ddcde7bf5e279ce85dc2 Mon Sep 17 00:00:00 2001 From: aherlihy Date: Mon, 29 Apr 2024 16:54:50 +0200 Subject: [PATCH 324/465] Add warning for anonymous inline classes (#16723) --- community-build/community-projects/scodec | 2 +- .../community-projects/shapeless-3 | 2 +- .../tools/dotc/printing/Formatting.scala | 4 +- .../tools/dotc/reporting/ErrorMessageID.scala | 1 + .../dotty/tools/dotc/reporting/messages.scala | 9 ++++ .../dotc/transform/PruneErasedDefs.scala | 12 +++++ tests/neg/i13044.check | 48 +++++++++---------- tests/neg/i13044.scala | 8 +--- tests/pos/i17314.scala | 3 +- tests/pos/not-looping-implicit.scala | 4 +- tests/run/i11050.scala | 12 +++-- tests/warn/i15503i.scala | 6 +-- tests/warn/i15503j.scala | 4 +- tests/warn/i16723.check | 6 +++ tests/warn/i16723.scala | 3 ++ tests/warn/i16723a.check | 6 +++ tests/warn/i16723a.scala | 17 +++++++ 17 files changed, 101 insertions(+), 46 deletions(-) create mode 100644 tests/warn/i16723.check create mode 100644 tests/warn/i16723.scala create mode 100644 tests/warn/i16723a.check create mode 100644 tests/warn/i16723a.scala diff --git a/community-build/community-projects/scodec b/community-build/community-projects/scodec index 96a77ecaaf91..b74f2085f071 160000 --- a/community-build/community-projects/scodec +++ b/community-build/community-projects/scodec @@ -1 +1 @@ -Subproject commit 96a77ecaaf913f195bb4079966a2e9fb41ce214e +Subproject commit b74f2085f07165d84b32c39eb214c9cc838711cc diff --git a/community-build/community-projects/shapeless-3 b/community-build/community-projects/shapeless-3 index 90f0c977b536..24e86dd290eb 160000 --- a/community-build/community-projects/shapeless-3 +++ b/community-build/community-projects/shapeless-3 @@ -1 +1 @@ -Subproject commit 90f0c977b536c06305496600b8b2014c9e8e3d86 +Subproject commit 24e86dd290eba9b27599936a7f338fac975f833d diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index ac13f0161c70..6f1c32beb822 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -42,7 +42,9 @@ object Formatting { trait CtxShow: def run(using Context): Shown - private inline def CtxShow(inline x: Context ?=> Shown) = new CtxShow { def run(using Context) = x(using ctx) } + private inline def CtxShow(inline x: Context ?=> Shown) = + class InlinedCtxShow extends CtxShow { def run(using Context) = x(using ctx) } + new InlinedCtxShow private def toStr[A: Show](x: A)(using Context): String = Shown.toStr(toShown(x)) private def toShown[A: Show](x: A)(using Context): Shown = Show[A].show(x).runCtxShow diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 33f5dcf1b1f5..e51f0a8b77ac 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -208,6 +208,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case UnstableInlineAccessorID // errorNumber: 192 case VolatileOnValID // errorNumber: 193 case ExtensionNullifiedByMemberID // errorNumber: 194 + case InlinedAnonClassWarningID // errorNumber: 195 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 58692b1ee7d7..51556a5c93ac 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3113,6 +3113,15 @@ extends SyntaxMsg(InlineGivenShouldNotBeFunctionID): | inline def apply(x: A) = x.toB """ +class InlinedAnonClassWarning()(using Context) + extends Message(InlinedAnonClassWarningID): + def kind = MessageKind.PotentialIssue + def msg(using Context) = "New anonymous class definition will be duplicated at each inline site" + def explain(using Context) = + i"""Anonymous class will be defined at each use site, which may lead to a larger number of classfiles. + | + |To inline class definitions, you may provide an explicit class name to avoid this warning.""" + class ValueDiscarding(tp: Type)(using Context) extends Message(ValueDiscardingID): def kind = MessageKind.PotentialIssue diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala index 9bb30926d45a..47eb70cb46d4 100644 --- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala @@ -10,6 +10,7 @@ import Symbols.* import typer.RefChecks import MegaPhase.MiniPhase import ast.tpd +import reporting.InlinedAnonClassWarning import config.Feature import Decorators.* @@ -51,6 +52,17 @@ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => else cpy.ValDef(tree)(rhs = trivialErasedTree(tree.rhs)) override def transformDefDef(tree: DefDef)(using Context): Tree = + def checkNoInlineAnnoClasses(tree: DefDef)(using Context): Unit = + if tree.symbol.is(Inline) then + new TreeTraverser { + def traverse(tree: Tree)(using Context): Unit = + tree match + case tree: TypeDef if tree.symbol.isAnonymousClass => + report.warning(new InlinedAnonClassWarning(), tree.symbol.sourcePos) + case _ => traverseChildren(tree) + }.traverse(tree) + + checkNoInlineAnnoClasses(tree) checkErasedInExperimental(tree.symbol) if !tree.symbol.isEffectivelyErased || tree.rhs.isEmpty then tree else cpy.DefDef(tree)(rhs = trivialErasedTree(tree.rhs)) diff --git a/tests/neg/i13044.check b/tests/neg/i13044.check index 1fbe978a49b8..e504b14185da 100644 --- a/tests/neg/i13044.check +++ b/tests/neg/i13044.check @@ -1,5 +1,5 @@ --- Error: tests/neg/i13044.scala:65:40 --------------------------------------------------------------------------------- -65 | implicit def typeSchema: Schema[A] = Schema.gen // error // error +-- Error: tests/neg/i13044.scala:61:40 --------------------------------------------------------------------------------- +61 | implicit def typeSchema: Schema[A] = Schema.gen // error // error | ^^^^^^^^^^ | given instance gen is declared as `inline`, but was not inlined | @@ -12,11 +12,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 @@ -24,11 +24,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 @@ -36,11 +36,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 @@ -48,11 +48,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 @@ -64,15 +64,15 @@ | ^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ -------------------------------------------------------------------------------------------------------------------- --- Error: tests/neg/i13044.scala:65:40 --------------------------------------------------------------------------------- -65 | implicit def typeSchema: Schema[A] = Schema.gen // error // error +-- Error: tests/neg/i13044.scala:61:40 --------------------------------------------------------------------------------- +61 | implicit def typeSchema: Schema[A] = Schema.gen // error // error | ^^^^^^^^^^ | method recurse is declared as `inline`, but was not inlined | @@ -85,11 +85,11 @@ | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 @@ -97,11 +97,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 @@ -109,11 +109,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 @@ -121,11 +121,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 @@ -137,10 +137,10 @@ | ^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i13044.scala b/tests/neg/i13044.scala index 42417a9096f9..2b00fc188f8c 100644 --- a/tests/neg/i13044.scala +++ b/tests/neg/i13044.scala @@ -23,15 +23,11 @@ trait SchemaDerivation { inline summonInline[Mirror.Of[A]] match { case m: Mirror.SumOf[A] => lazy val subTypes = recurse[m.MirroredElemTypes] - new Schema[A] { - def build: A = ??? - } + ??? case m: Mirror.ProductOf[A] => lazy val fields = recurse[m.MirroredElemTypes] - new Schema[A] { - def build: A = ??? - } + ??? } inline given gen[A]: Schema[A] = derived diff --git a/tests/pos/i17314.scala b/tests/pos/i17314.scala index 2d0c409ced10..8ece4a3bd7ac 100644 --- a/tests/pos/i17314.scala +++ b/tests/pos/i17314.scala @@ -13,8 +13,9 @@ object circelike { inline final def derived[A](using conf: Configuration)(using inline mirror: Mirror.Of[A] ): ConfiguredCodec[A] = - new ConfiguredCodec[A]: + class InlinedConfiguredCodec extends ConfiguredCodec[A]: val codec = summonInline[Codec[URI]] // simplification + new InlinedConfiguredCodec } object foo { diff --git a/tests/pos/not-looping-implicit.scala b/tests/pos/not-looping-implicit.scala index 90fba9f807a7..ebaf25e760f2 100644 --- a/tests/pos/not-looping-implicit.scala +++ b/tests/pos/not-looping-implicit.scala @@ -24,10 +24,10 @@ object Schema { inline summonInline[Mirror.Of[A]] match { case m: Mirror.SumOf[A] => lazy val members = recurse[m.MirroredElemLabels, m.MirroredElemTypes]() - new Schema[A] {} + ??? case m: Mirror.ProductOf[A] => lazy val fields = recurse[m.MirroredElemLabels, m.MirroredElemTypes]() - new Schema[A] {} + ??? } inline given gen[A]: Schema[A] = derived[A] diff --git a/tests/run/i11050.scala b/tests/run/i11050.scala index 90a6ec84df85..027812c013c4 100644 --- a/tests/run/i11050.scala +++ b/tests/run/i11050.scala @@ -113,12 +113,14 @@ object Show: inline def show[T](x: T): String = summonInline[Show[T]].show(x) - transparent inline def derived[T](implicit ev: Mirror.Of[T]): Show[T] = new { - def show(x: T): String = inline ev match { - case m: Mirror.ProductOf[T] => showProduct(x.asInstanceOf[Product], m) - case m: Mirror.SumOf[T] => showCases[m.MirroredElemTypes](0)(x, m.ordinal(x)) + transparent inline def derived[T](implicit ev: Mirror.Of[T]): Show[T] = + class InlinedShow extends Show[T] { // provide name to anonymous class + def show(x: T): String = inline ev match { + case m: Mirror.ProductOf[T] => showProduct(x.asInstanceOf[Product], m) + case m: Mirror.SumOf[T] => showCases[m.MirroredElemTypes](0)(x, m.ordinal(x)) + } } - } + new InlinedShow transparent inline def showProduct[T](x: Product, m: Mirror.ProductOf[T]): String = constValue[m.MirroredLabel] + showElems[m.MirroredElemTypes, m.MirroredElemLabels](0, Nil)(x) diff --git a/tests/warn/i15503i.scala b/tests/warn/i15503i.scala index 329b81327288..f4d41c53d981 100644 --- a/tests/warn/i15503i.scala +++ b/tests/warn/i15503i.scala @@ -247,7 +247,7 @@ package foo.test.i16679a: import scala.deriving.Mirror object CaseClassByStringName: inline final def derived[A](using inline A: Mirror.Of[A]): CaseClassByStringName[A] = - new CaseClassByStringName[A]: + new CaseClassByStringName[A]: // warn def name: String = A.toString object secondPackage: @@ -263,7 +263,7 @@ package foo.test.i16679b: object CaseClassName: import scala.deriving.Mirror inline final def derived[A](using inline A: Mirror.Of[A]): CaseClassName[A] = - new CaseClassName[A]: + new CaseClassName[A]: // warn def name: String = A.toString object Foo: @@ -279,7 +279,7 @@ package foo.test.i17156: package a: trait Foo[A] object Foo: - inline def derived[T]: Foo[T] = new Foo{} + inline def derived[T]: Foo[T] = new Foo{} // warn package b: import a.Foo diff --git a/tests/warn/i15503j.scala b/tests/warn/i15503j.scala index f5e15bb79f79..fa30601d8960 100644 --- a/tests/warn/i15503j.scala +++ b/tests/warn/i15503j.scala @@ -49,11 +49,11 @@ package foo.unused.summon.inlines: transparent inline given conflictInside: C = summonInline[A] - new {} + ??? transparent inline given potentialConflict: C = summonInline[B] - new {} + ??? val b: B = summon[B] val c: C = summon[C] \ No newline at end of file diff --git a/tests/warn/i16723.check b/tests/warn/i16723.check new file mode 100644 index 000000000000..ed8e55502a80 --- /dev/null +++ b/tests/warn/i16723.check @@ -0,0 +1,6 @@ +-- [E195] Potential Issue Warning: tests/warn/i16723.scala:3:2 --------------------------------------------------------- +3 | new Object {} // warn + | ^ + | New anonymous class definition will be duplicated at each inline site + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i16723.scala b/tests/warn/i16723.scala new file mode 100644 index 000000000000..32875f4edf36 --- /dev/null +++ b/tests/warn/i16723.scala @@ -0,0 +1,3 @@ +inline def foo = + class NotAnon + new Object {} // warn \ No newline at end of file diff --git a/tests/warn/i16723a.check b/tests/warn/i16723a.check new file mode 100644 index 000000000000..ba4794fac23e --- /dev/null +++ b/tests/warn/i16723a.check @@ -0,0 +1,6 @@ +-- [E195] Potential Issue Warning: tests/warn/i16723a.scala:5:38 ------------------------------------------------------- +5 |inline given Converter[Int, String] = new Converter { // warn + | ^ + | New anonymous class definition will be duplicated at each inline site + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i16723a.scala b/tests/warn/i16723a.scala new file mode 100644 index 000000000000..90aaeb868d4e --- /dev/null +++ b/tests/warn/i16723a.scala @@ -0,0 +1,17 @@ +trait Converter[A, B] { + def convert: A => B +} + +inline given Converter[Int, String] = new Converter { // warn + def convert = _.toString() +} + +def foo(using bar: Converter[Int, String]) = + "foo" + +@main +def main = + foo + foo + foo + foo \ No newline at end of file From 0c89c9226b11923de22ae3922aef10275b1d3e04 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 7 May 2024 09:47:52 +0200 Subject: [PATCH 325/465] Undo all unnecessary changes to Tuples. Keep only the changes we need for making NamedTuple work properly. We still keep operations Contains and Disjoint in Tuple and make Reverse standard. We remove or amend tests that relied on the changes to Tuple. No tests about NamedTuple's are affected. --- library/src/scala/Tuple.scala | 414 ++++++++++-------------- library/src/scala/runtime/Tuples.scala | 21 +- tests/neg/print-tuple-union.check | 2 +- tests/neg/wildcard-match.check | 7 +- tests/pos/tuple-filter.scala | 3 - tests/run/named-tuples-strawman-2.scala | 60 +++- 6 files changed, 249 insertions(+), 258 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 364124481db7..8074fe3664e5 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -5,7 +5,7 @@ import compiletime.* import compiletime.ops.int.* /** Tuple of arbitrary arity */ -sealed trait Tuple extends Product: +sealed trait Tuple extends Product { import Tuple.* /** Create a copy of this tuple as an Array */ @@ -21,43 +21,43 @@ sealed trait Tuple extends Product: inline def toIArray: IArray[Object] = runtime.Tuples.toIArray(this) + /** Return a copy of `this` tuple with an element appended */ + inline def :* [This >: this.type <: Tuple, L] (x: L): Append[This, L] = + runtime.Tuples.append(x, this).asInstanceOf[Append[This, L]] + /** Return a new tuple by prepending the element to `this` tuple. * This operation is O(this.size) */ - inline def *: [H, This >: this.type <: Tuple](x: H): H *: This = + inline def *: [H, This >: this.type <: Tuple] (x: H): H *: This = runtime.Tuples.cons(x, this).asInstanceOf[H *: This] - /** Return a copy of `this` tuple with an element appended */ - inline def :* [This >: this.type <: Tuple, L](x: L): This :* L = - runtime.Tuples.append(x, this).asInstanceOf[Append[This, L]] + /** Return a new tuple by concatenating `this` tuple with `that` tuple. + * This operation is O(this.size + that.size) + */ + inline def ++ [This >: this.type <: Tuple](that: Tuple): Concat[This, that.type] = + runtime.Tuples.concat(this, that).asInstanceOf[Concat[This, that.type]] /** Return the size (or arity) of the tuple */ inline def size[This >: this.type <: Tuple]: Size[This] = runtime.Tuples.size(this).asInstanceOf[Size[This]] - /** Get the i-th element of this tuple. - * Equivalent to productElement but with a precise return type. + /** Given two tuples, `(a1, ..., an)` and `(a1, ..., an)`, returns a tuple + * `((a1, b1), ..., (an, bn))`. If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The result is typed as `((A1, B1), ..., (An, Bn))` if at least one of the + * tuple types has a `EmptyTuple` tail. Otherwise the result type is + * `(A1, B1) *: ... *: (Ai, Bi) *: Tuple` */ - inline def apply[This >: this.type <: Tuple](n: Int): Elem[This, n.type] = - runtime.Tuples.apply(this, n).asInstanceOf[Elem[This, n.type]] - - /** Get the head of this tuple */ - inline def head[This >: this.type <: Tuple]: Head[This] = - runtime.Tuples.apply(this, 0).asInstanceOf[Head[This]] + inline def zip[This >: this.type <: Tuple, T2 <: Tuple](t2: T2): Zip[This, T2] = + runtime.Tuples.zip(this, t2).asInstanceOf[Zip[This, T2]] - /** Get the tail of this tuple. - * This operation is O(this.size) + /** Called on a tuple `(a1, ..., an)`, returns a new tuple `(f(a1), ..., f(an))`. + * The result is typed as `(F[A1], ..., F[An])` if the tuple type is fully known. + * If the tuple is of the form `a1 *: ... *: Tuple` (that is, the tail is not known + * to be the cons type. */ - inline def tail[This >: this.type <: Tuple]: Tail[This] = - runtime.Tuples.tail(this).asInstanceOf[Tail[This]] - - /** Get the last of this tuple */ - inline def last[This >: this.type <: Tuple]: Last[This] = - runtime.Tuples.last(this).asInstanceOf[Last[This]] - - /** Get the initial part of the tuple without its last element */ - inline def init[This >: this.type <: Tuple]: Init[This] = - runtime.Tuples.init(this).asInstanceOf[Init[This]] + inline def map[F[_]](f: [t] => t => F[t]): Map[this.type, F] = + runtime.Tuples.map(this, f).asInstanceOf[Map[this.type, F]] /** Given a tuple `(a1, ..., am)`, returns the tuple `(a1, ..., an)` consisting * of its first n elements. @@ -65,6 +65,7 @@ sealed trait Tuple extends Product: inline def take[This >: this.type <: Tuple](n: Int): Take[This, n.type] = runtime.Tuples.take(this, n).asInstanceOf[Take[This, n.type]] + /** Given a tuple `(a1, ..., am)`, returns the tuple `(an+1, ..., am)` consisting * all its elements except the first n ones. */ @@ -78,158 +79,118 @@ sealed trait Tuple extends Product: inline def splitAt[This >: this.type <: Tuple](n: Int): Split[This, n.type] = runtime.Tuples.splitAt(this, n).asInstanceOf[Split[This, n.type]] - /** Return a new tuple by concatenating `this` tuple with `that` tuple. - * This operation is O(this.size + that.size) - */ - // Contrarily to `this`, `that` does not need a type parameter - // since `++` is covariant in its second argument. - inline def ++ [This >: this.type <: Tuple](that: Tuple): This ++ that.type = - runtime.Tuples.concat(this, that).asInstanceOf[Concat[This, that.type]] - - /** Given two tuples, `(a1, ..., an)` and `(a1, ..., an)`, returns a tuple - * `((a1, b1), ..., (an, bn))`. If the two tuples have different sizes, - * the extra elements of the larger tuple will be disregarded. - * The result is typed as `((A1, B1), ..., (An, Bn))` if at least one of the - * tuple types has a `EmptyTuple` tail. Otherwise the result type is - * `(A1, B1) *: ... *: (Ai, Bi) *: Tuple` - */ - inline def zip[This >: this.type <: Tuple, T2 <: Tuple](t2: T2): Zip[This, T2] = - runtime.Tuples.zip(this, t2).asInstanceOf[Zip[This, T2]] - - /** Called on a tuple `(a1, ..., an)`, returns a new tuple `(f(a1), ..., f(an))`. - * The result is typed as `(F[A1], ..., F[An])` if the tuple type is fully known. - * If the tuple is of the form `a1 *: ... *: Tuple` (that is, the tail is not known - * to be the cons type. - */ - inline def map[F[_]](f: [t] => t => F[t]): Map[this.type, F] = - runtime.Tuples.map(this, f).asInstanceOf[Map[this.type, F]] - - /** A tuple consisting of all elements of this tuple that have types - * for which the given type level predicate `P` reduces to the literal - * constant `true`. - */ - inline def filter[This >: this.type <: Tuple, P[_ <: Union[This]] <: Boolean]: Filter[This, P] = - val toInclude = constValueTuple[IndicesWhere[This, P]].toArray - val arr = new Array[Object](toInclude.length) - for i <- toInclude.indices do - arr(i) = this.productElement(toInclude(i).asInstanceOf[Int]).asInstanceOf[Object] - Tuple.fromArray(arr).asInstanceOf[Filter[This, P]] - /** Given a tuple `(a1, ..., am)`, returns the reversed tuple `(am, ..., a1)` * consisting all its elements. */ inline def reverse[This >: this.type <: Tuple]: Reverse[This] = runtime.Tuples.reverse(this).asInstanceOf[Reverse[This]] +} - /** A tuple with the elements of this tuple in reversed order added in front of `acc` */ - inline def reverseOnto[This >: this.type <: Tuple, Acc <: Tuple](acc: Acc): ReverseOnto[This, Acc] = - (this.reverse ++ acc).asInstanceOf[ReverseOnto[This, Acc]] - -object Tuple: +object Tuple { /** Type of a tuple with an element appended */ - type Append[X <: Tuple, +Y] <: NonEmptyTuple = X match + type Append[X <: Tuple, Y] <: NonEmptyTuple = X match { case EmptyTuple => Y *: EmptyTuple case x *: xs => x *: Append[xs, Y] + } - /** An infix shorthand for `Append[X, Y]` */ - infix type :*[X <: Tuple, +Y] = Append[X, Y] - - /** The size of a tuple, represented as a literal constant subtype of Int */ - type Size[X <: Tuple] <: Int = X match - case EmptyTuple => 0 - case _ *: xs => S[Size[xs]] - - /** The type of the element at position N in the tuple X */ - type Elem[X <: Tuple, N <: Int] = X match - case x *: xs => N match - case 0 => x - case S[n1] => Elem[xs, n1] - - /** The type of the first element of a tuple */ - // Only bounded by `<: Tuple` not `<: NonEmptyTuple` - // even though it only matches non-empty tuples. - // Avoids bounds check failures from an irreducible type - // like `Tuple.Head[Tuple.Tail[X]]` - // Other types that don't reduce for empty tuples follow the same principle. - type Head[X <: Tuple] = X match + /** Type of the head of a tuple */ + type Head[X <: Tuple] = X match { case x *: _ => x + } - /** The type of a tuple consisting of all elements of tuple X except the first one */ - type Tail[X <: Tuple] <: Tuple = X match + /** Type of the initial part of the tuple without its last element */ + type Init[X <: Tuple] <: Tuple = X match { + case _ *: EmptyTuple => EmptyTuple + case x *: xs => + x *: Init[xs] + } + + /** Type of the tail of a tuple */ + type Tail[X <: Tuple] <: Tuple = X match { case _ *: xs => xs + } - /** The type of the last element of a tuple */ - type Last[X <: Tuple] = X match + /** Type of the last element of a tuple */ + type Last[X <: Tuple] = X match { case x *: EmptyTuple => x case _ *: xs => Last[xs] + } - /** The type of the initial part of a tuple without its last element */ - type Init[X <: Tuple] <: Tuple = X match - case _ *: EmptyTuple => EmptyTuple - case x *: xs => x *: Init[xs] - - /** The type of the tuple consisting of the first `N` elements of `X`, - * or all elements if `N` exceeds `Size[X]`. - */ - type Take[X <: Tuple, N <: Int] <: Tuple = N match - case 0 => EmptyTuple - case S[n1] => X match - case EmptyTuple => EmptyTuple - case x *: xs => x *: Take[xs, n1] - - /** The type of the tuple consisting of all elements of `X` except the first `N` ones, - * or no elements if `N` exceeds `Size[X]`. - */ - type Drop[X <: Tuple, N <: Int] <: Tuple = N match - case 0 => X - case S[n1] => X match - case EmptyTuple => EmptyTuple - case _ *: xs => Drop[xs, n1] + /** Type of the concatenation of two tuples */ + type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match { + case EmptyTuple => Y + case x1 *: xs1 => x1 *: Concat[xs1, Y] + } + + /** Type of the element at position N in the tuple X */ + type Elem[X <: Tuple, N <: Int] = X match { + case x *: xs => + N match { + case 0 => x + case S[n1] => Elem[xs, n1] + } + } + + /** Literal constant Int size of a tuple */ + type Size[X <: Tuple] <: Int = X match { + case EmptyTuple => 0 + case x *: xs => S[Size[xs]] + } - /** The pair type `(Take(X, N), Drop[X, N]). */ - type Split[X <: Tuple, N <: Int] = (Take[X, N], Drop[X, N]) + /** Fold a tuple `(T1, ..., Tn)` into `F[T1, F[... F[Tn, Z]...]]]` */ + type Fold[Tup <: Tuple, Z, F[_, _]] = Tup match + case EmptyTuple => Z + case h *: t => F[h, Fold[t, Z, F]] - /** Type of the concatenation of two tuples `X` and `Y` */ - // Can be covariant in `Y` since it never appears as a match type scrutinee. - type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match - case EmptyTuple => Y - case x *: xs => x *: Concat[xs, Y] + /** Converts a tuple `(T1, ..., Tn)` to `(F[T1], ..., F[Tn])` */ + type Map[Tup <: Tuple, F[_ <: Union[Tup]]] <: Tuple = Tup match { + case EmptyTuple => EmptyTuple + case h *: t => F[h] *: Map[t, F] + } - /** An infix shorthand for `Concat[X, Y]` */ - infix type ++[X <: Tuple, +Y <: Tuple] = Concat[X, Y] + /** Converts a tuple `(T1, ..., Tn)` to a flattened `(..F[T1], ..., ..F[Tn])` */ + type FlatMap[Tup <: Tuple, F[_ <: Union[Tup]] <: Tuple] <: Tuple = Tup match { + case EmptyTuple => EmptyTuple + case h *: t => Concat[F[h], FlatMap[t, F]] + } - /** The type of the tuple consisting of all element values of - * tuple `X` zipped with corresponding elements of tuple `Y`. - * If the two tuples have different sizes, - * the extra elements of the larger tuple will be disregarded. - * For example, if - * ``` - * X = (S1, ..., Si) - * Y = (T1, ..., Tj) where j >= i - * ``` - * then - * ``` - * Zip[X, Y] = ((S1, T1), ..., (Si, Ti)) + /** Filters out those members of the tuple for which the predicate `P` returns `false`. + * A predicate `P[X]` is a type that can be either `true` or `false`. For example: + * ```scala + * type IsString[x] <: Boolean = x match { + * case String => true + * case _ => false + * } + * summon[Tuple.Filter[(1, "foo", 2, "bar"), IsString] =:= ("foo", "bar")] * ``` * @syntax markdown */ - type Zip[X <: Tuple, Y <: Tuple] <: Tuple = (X, Y) match - case (x *: xs, y *: ys) => (x, y) *: Zip[xs, ys] + type Filter[Tup <: Tuple, P[_] <: Boolean] <: Tuple = Tup match { + case EmptyTuple => EmptyTuple + case h *: t => P[h] match { + case true => h *: Filter[t, P] + case false => Filter[t, P] + } + } + + /** Given two tuples, `A1 *: ... *: An * At` and `B1 *: ... *: Bn *: Bt` + * where at least one of `At` or `Bt` is `EmptyTuple` or `Tuple`, + * returns the tuple type `(A1, B1) *: ... *: (An, Bn) *: Ct` + * where `Ct` is `EmptyTuple` if `At` or `Bt` is `EmptyTuple`, otherwise `Ct` is `Tuple`. + */ + type Zip[T1 <: Tuple, T2 <: Tuple] <: Tuple = (T1, T2) match { + case (h1 *: t1, h2 *: t2) => (h1, h2) *: Zip[t1, t2] case (EmptyTuple, _) => EmptyTuple case (_, EmptyTuple) => EmptyTuple - - /** The type of tuple `X` mapped with the type-level function `F`. - * If `X = (T1, ..., Ti)` then `Map[X, F] = `(F[T1], ..., F[Ti])`. - */ - type Map[X <: Tuple, F[_ <: Union[X]]] <: Tuple = X match - case EmptyTuple => EmptyTuple - case x *: xs => F[x] *: Map[xs, F] + case _ => Tuple + } /** Converts a tuple `(F[T1], ..., F[Tn])` to `(T1, ... Tn)` */ - type InverseMap[X <: Tuple, F[_]] <: Tuple = X match - case F[x] *: xs => x *: InverseMap[xs, F] + type InverseMap[X <: Tuple, F[_]] <: Tuple = X match { + case F[x] *: t => x *: InverseMap[t, F] case EmptyTuple => EmptyTuple + } /** Implicit evidence. IsMappedBy[F][X] is present in the implicit scope iff * X is a tuple for which each element's type is constructed via `F`. E.g. @@ -238,64 +199,42 @@ object Tuple: */ type IsMappedBy[F[_]] = [X <: Tuple] =>> X =:= Map[InverseMap[X, F], F] - /** The type of tuple `X` flat-mapped with the type-level function `F`. - * If `X = (T1, ..., Ti)` then `FlatMap[X, F] = `F[T1] ++ ... ++ F[Ti]` - */ - type FlatMap[X <: Tuple, F[_ <: Union[X]] <: Tuple] <: Tuple = X match - case EmptyTuple => EmptyTuple - case x *: xs => Concat[F[x], FlatMap[xs, F]] - // TODO: implement term level analogue + /** Type of the reversed tuple */ + type Reverse[X <: Tuple] = ReverseOnto[X, EmptyTuple] - /** The type of the tuple consisting of all elements of tuple `X` that have types - * for which the given type level predicate `P` reduces to the literal - * constant `true`. A predicate `P[X]` is a type that can be either `true` - * or `false`. For example: - * ```scala - * type IsString[x] <: Boolean = x match - * case String => true - * case _ => false - * summon[Tuple.Filter[(1, "foo", 2, "bar"), IsString] =:= ("foo", "bar")] - * ``` - * @syntax markdown - */ - type Filter[X <: Tuple, P[_ <: Union[X]] <: Boolean] <: Tuple = X match - case EmptyTuple => EmptyTuple - case x *: xs => P[x] match - case true => x *: Filter[xs, P] - case false => Filter[xs, P] + /** Prepends all elements of a tuple in reverse order onto the other tuple */ + type ReverseOnto[From <: Tuple, +To <: Tuple] <: Tuple = From match + case x *: xs => ReverseOnto[xs, x *: To] + case EmptyTuple => To - /** A tuple with the elements of tuple `X` in reversed order */ - type Reverse[X <: Tuple] = ReverseOnto[X, EmptyTuple] + /** Transforms a tuple `(T1, ..., Tn)` into `(T1, ..., Ti)`. */ + type Take[T <: Tuple, N <: Int] <: Tuple = N match { + case 0 => EmptyTuple + case S[n1] => T match { + case EmptyTuple => EmptyTuple + case x *: xs => x *: Take[xs, n1] + } + } - /** A tuple with the elements of tuple `X` in reversed order added in front of `Acc` */ - type ReverseOnto[X <: Tuple, Acc <: Tuple] <: Tuple = X match - case x *: xs => ReverseOnto[xs, x *: Acc] - case EmptyTuple => Acc + /** Transforms a tuple `(T1, ..., Tn)` into `(Ti+1, ..., Tn)`. */ + type Drop[T <: Tuple, N <: Int] <: Tuple = N match { + case 0 => T + case S[n1] => T match { + case EmptyTuple => EmptyTuple + case x *: xs => Drop[xs, n1] + } + } - /** Fold a tuple `(T1, ..., Tn)` into `F[T1, F[... F[Tn, Z]...]]]` */ - type Fold[X <: Tuple, Z, F[_, _]] = X match - case EmptyTuple => Z - case x *: xs => F[x, Fold[xs, Z, F]] + /** Splits a tuple (T1, ..., Tn) into a pair of two tuples `(T1, ..., Ti)` and + * `(Ti+1, ..., Tn)`. + */ + type Split[T <: Tuple, N <: Int] = (Take[T, N], Drop[T, N]) /** Given a tuple `(T1, ..., Tn)`, returns a union of its * member types: `T1 | ... | Tn`. Returns `Nothing` if the tuple is empty. */ type Union[T <: Tuple] = Fold[T, Nothing, [x, y] =>> x | y] - /** The index of `Y` in tuple `X` as a literal constant Int, - * or `Size[X]` if `Y` is disjoint from all element types in `X`. - */ - type IndexOf[X <: Tuple, Y] <: Int = X match - case Y *: _ => 0 - case _ *: xs => S[IndexOf[xs, Y]] - case EmptyTuple => 0 - - /** A tuple consisting of those indices `N` of tuple `X` where the predicate `P` - * is true for `Elem[X, N]`. Indices are type level values <: Int. - */ - type IndicesWhere[X <: Tuple, P[_ <: Union[X]] <: Boolean] = - helpers.IndicesWhereHelper[X, P, 0] - /** A type level Boolean indicating whether the tuple `X` has an element * that matches `Y`. * @pre The elements of `X` are assumed to be singleton types @@ -325,42 +264,28 @@ object Tuple: def unapply(x: EmptyTuple): true = true /** Convert an array into a tuple of unknown arity and types */ - def fromArray[T](xs: Array[T]): Tuple = - fromArray(xs, xs.length) - - /** Convert the first `n` elements of an array into a tuple of unknown arity and types */ - def fromArray[T](xs: Array[T], n: Int): Tuple = - val xs2 = xs match + def fromArray[T](xs: Array[T]): Tuple = { + val xs2 = xs match { case xs: Array[Object] => xs case xs => xs.map(_.asInstanceOf[Object]) - runtime.Tuples.fromArray(xs2, n) + } + runtime.Tuples.fromArray(xs2) + } /** Convert an immutable array into a tuple of unknown arity and types */ - def fromIArray[T](xs: IArray[T]): Tuple = fromIArray(xs, xs.length) - - /** Convert the first `n` elements of an immutable array into a tuple of unknown arity and types */ - def fromIArray[T](xs: IArray[T], n: Int): Tuple = - val xs2: IArray[Object] = xs match + def fromIArray[T](xs: IArray[T]): Tuple = { + val xs2: IArray[Object] = xs match { case xs: IArray[Object] @unchecked => xs - case _ => xs.map(_.asInstanceOf[Object]) - runtime.Tuples.fromIArray(xs2, n) + case _ => + xs.map(_.asInstanceOf[Object]) + } + runtime.Tuples.fromIArray(xs2) + } /** Convert a Product into a tuple of unknown arity and types */ def fromProduct(product: Product): Tuple = runtime.Tuples.fromProduct(product) - extension [X <: Tuple](inline x: X) - - /** The index (starting at 0) of the first occurrence of `y.type` in the type `X` of `x` - * or `Size[X]` if no such element exists. - */ - inline def indexOf(y: Any): IndexOf[X, y.type] = constValue[IndexOf[X, y.type]] - - /** A boolean indicating whether there is an element `y.type` in the type `X` of `x` */ - inline def contains(y: Any): Contains[X, y.type] = constValue[Contains[X, y.type]] - - end extension - def fromProductTyped[P <: Product](p: P)(using m: scala.deriving.Mirror.ProductOf[P]): m.MirroredElemTypes = runtime.Tuples.fromProduct(p).asInstanceOf[m.MirroredElemTypes] @@ -368,31 +293,48 @@ object Tuple: given canEqualTuple[H1, T1 <: Tuple, H2, T2 <: Tuple]( using eqHead: CanEqual[H1, H2], eqTail: CanEqual[T1, T2] ): CanEqual[H1 *: T1, H2 *: T2] = CanEqual.derived - - private object helpers: - - /** Used to implement IndicesWhere */ - type IndicesWhereHelper[X <: Tuple, P[_ <: Union[X]] <: Boolean, N <: Int] <: Tuple = X match - case EmptyTuple => EmptyTuple - case h *: t => P[h] match - case true => N *: IndicesWhereHelper[t, P, S[N]] - case false => IndicesWhereHelper[t, P, S[N]] - - end helpers -end Tuple +} /** A tuple of 0 elements */ type EmptyTuple = EmptyTuple.type /** A tuple of 0 elements. */ -case object EmptyTuple extends Tuple: +case object EmptyTuple extends Tuple { override def toString(): String = "()" +} /** Tuple of arbitrary non-zero arity */ -sealed trait NonEmptyTuple extends Tuple +sealed trait NonEmptyTuple extends Tuple { + import Tuple.* + + /** Get the i-th element of this tuple. + * Equivalent to productElement but with a precise return type. + */ + inline def apply[This >: this.type <: NonEmptyTuple](n: Int): Elem[This, n.type] = + runtime.Tuples.apply(this, n).asInstanceOf[Elem[This, n.type]] + + /** Get the head of this tuple */ + inline def head[This >: this.type <: NonEmptyTuple]: Head[This] = + runtime.Tuples.apply(this, 0).asInstanceOf[Head[This]] + + /** Get the initial part of the tuple without its last element */ + inline def init[This >: this.type <: NonEmptyTuple]: Init[This] = + runtime.Tuples.init(this).asInstanceOf[Init[This]] + + /** Get the last of this tuple */ + inline def last[This >: this.type <: NonEmptyTuple]: Last[This] = + runtime.Tuples.last(this).asInstanceOf[Last[This]] + + /** Get the tail of this tuple. + * This operation is O(this.size) + */ + inline def tail[This >: this.type <: NonEmptyTuple]: Tail[This] = + runtime.Tuples.tail(this).asInstanceOf[Tail[This]] +} @showAsInfix sealed abstract class *:[+H, +T <: Tuple] extends NonEmptyTuple -object `*:`: +object *: { def unapply[H, T <: Tuple](x: H *: T): (H, T) = (x.head, x.tail) +} diff --git a/library/src/scala/runtime/Tuples.scala b/library/src/scala/runtime/Tuples.scala index 8da21c777943..efb54c54d50b 100644 --- a/library/src/scala/runtime/Tuples.scala +++ b/library/src/scala/runtime/Tuples.scala @@ -1,7 +1,5 @@ package scala.runtime -import scala.annotation.experimental - object Tuples { inline val MaxSpecialized = 22 @@ -28,7 +26,7 @@ object Tuples { arr } - def fromArray(xs: Array[Object], n: Int): Tuple = n match { + def fromArray(xs: Array[Object]): Tuple = xs.length match { case 0 => EmptyTuple case 1 => Tuple1(xs(0)) case 2 => Tuple2(xs(0), xs(1)) @@ -55,15 +53,10 @@ object Tuples { case _ => TupleXXL.fromIArray(xs.clone().asInstanceOf[IArray[Object]]).asInstanceOf[Tuple] } - def fromArray(xs: Array[Object]): Tuple = fromArray(xs, xs.length) - - def fromIArray(xs: IArray[Object], n: Int): Tuple = - if n <= 22 || n != xs.length - then fromArray(xs.asInstanceOf[Array[Object]], n) + def fromIArray(xs: IArray[Object]): Tuple = + if (xs.length <= 22) fromArray(xs.asInstanceOf[Array[Object]]) else TupleXXL.fromIArray(xs).asInstanceOf[Tuple] - def fromIArray(xs: IArray[Object]): Tuple = fromIArray(xs, xs.length) - def fromProduct(xs: Product): Tuple = (xs.productArity match { case 0 => EmptyTuple case 1 => @@ -357,7 +350,7 @@ object Tuples { } } - def tail(self: Tuple): Tuple = (self: Any) match { + def tail(self: NonEmptyTuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlTail(xxl) case _ => specialCaseTail(self) } @@ -565,16 +558,16 @@ object Tuples { } } - def init(self: Tuple): Tuple = (self: Any) match { + def init(self: NonEmptyTuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlInit(xxl) case _ => specialCaseInit(self) } - def last(self: Tuple): Any = (self: Any) match { + def last(self: NonEmptyTuple): Any = (self: Any) match { case self: Product => self.productElement(self.productArity - 1) } - def apply(self: Tuple, n: Int): Any = + def apply(self: NonEmptyTuple, n: Int): Any = self.productElement(n) // Benchmarks showed that this is faster than doing (it1 zip it2).copyToArray(...) diff --git a/tests/neg/print-tuple-union.check b/tests/neg/print-tuple-union.check index 7d2c019de5a6..f3754aa5b17e 100644 --- a/tests/neg/print-tuple-union.check +++ b/tests/neg/print-tuple-union.check @@ -13,6 +13,6 @@ | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case | - | case x *: xs => x | Tuple.Fold[xs, Nothing, [x, y] =>> x | y] + | case h *: t => h | Tuple.Fold[t, Nothing, [x, y] =>> x | y] | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/wildcard-match.check b/tests/neg/wildcard-match.check index fd20443c0a9f..d405326c3d2b 100644 --- a/tests/neg/wildcard-match.check +++ b/tests/neg/wildcard-match.check @@ -87,7 +87,8 @@ | trying to reduce shapeless.tuples.length[T2] | trying to reduce Tuple.Size[shapeless.tuples.to[T2]] | failed since selector shapeless.tuples.to[T2] - | does not uniquely determine parameter xs in - | case _ *: xs => scala.compiletime.ops.int.S[Tuple.Size[xs]] - | The computed bounds for the parameter are: + | does not uniquely determine parameters x, xs in + | case x *: xs => scala.compiletime.ops.int.S[Tuple.Size[xs]] + | The computed bounds for the parameters are: + | x <: Int | xs <: (Int, Int) diff --git a/tests/pos/tuple-filter.scala b/tests/pos/tuple-filter.scala index 0964d2e982d9..2c9638b2e47b 100644 --- a/tests/pos/tuple-filter.scala +++ b/tests/pos/tuple-filter.scala @@ -8,6 +8,3 @@ def Test = summon[Tuple.Filter[(1, 2, 3, 4), P] =:= (1, 2, 4)] summon[Tuple.Filter[(1, 2, 3, 4), RejectAll] =:= EmptyTuple] summon[Tuple.Filter[EmptyTuple, P] =:= EmptyTuple] - - import compiletime.ops.int.< - summon[Tuple.Filter[(1, 4, 7, 2, 10, 3, 4), [X <: Int] =>> X < 5] =:= (1, 4, 2, 3, 4)] diff --git a/tests/run/named-tuples-strawman-2.scala b/tests/run/named-tuples-strawman-2.scala index 4b32dd83f2eb..95f37ad23a93 100644 --- a/tests/run/named-tuples-strawman-2.scala +++ b/tests/run/named-tuples-strawman-2.scala @@ -5,6 +5,53 @@ import Tuple.* object TupleOps: + private object helpers: + + /** Used to implement IndicesWhere */ + type IndicesWhereHelper[X <: Tuple, P[_ <: Union[X]] <: Boolean, N <: Int] <: Tuple = X match + case EmptyTuple => EmptyTuple + case h *: t => P[h] match + case true => N *: IndicesWhereHelper[t, P, S[N]] + case false => IndicesWhereHelper[t, P, S[N]] + + end helpers + + /** A type level Boolean indicating whether the tuple `X` has an element + * that matches `Y`. + * @pre The elements of `X` are assumed to be singleton types + */ + type Contains[X <: Tuple, Y] <: Boolean = X match + case Y *: _ => true + case _ *: xs => Contains[xs, Y] + case EmptyTuple => false + + /** The index of `Y` in tuple `X` as a literal constant Int, + * or `Size[X]` if `Y` is disjoint from all element types in `X`. + */ + type IndexOf[X <: Tuple, Y] <: Int = X match + case Y *: _ => 0 + case _ *: xs => S[IndexOf[xs, Y]] + case EmptyTuple => 0 + + /** A tuple consisting of those indices `N` of tuple `X` where the predicate `P` + * is true for `Elem[X, N]`. Indices are type level values <: Int. + */ + type IndicesWhere[X <: Tuple, P[_ <: Union[X]] <: Boolean] = + helpers.IndicesWhereHelper[X, P, 0] + + extension [X <: Tuple](inline x: X) + + /** The index (starting at 0) of the first occurrence of `y.type` in the type `X` of `x` + * or `Size[X]` if no such element exists. + */ + inline def indexOf(y: Any): IndexOf[X, y.type] = constValue[IndexOf[X, y.type]] + + /** A boolean indicating whether there is an element `y.type` in the type `X` of `x` */ + inline def contains(y: Any): Contains[X, y.type] = constValue[Contains[X, y.type]] + + end extension + + /** The `X` tuple, with its element at index `N` replaced by `Y`. * If `N` is equal to `Size[X]`, the element `Y` is appended instead */ @@ -60,7 +107,18 @@ object TupleOps: case EmptyTuple => X inline def concatDistinct[X <: Tuple, Y <: Tuple](xs: X, ys: Y): ConcatDistinct[X, Y] = - (xs ++ ys.filter[Y, [Elem] =>> ![Contains[X, Elem]]]).asInstanceOf[ConcatDistinct[X, Y]] + (xs ++ filter[Y, [Elem] =>> ![Contains[X, Elem]]](ys)).asInstanceOf[ConcatDistinct[X, Y]] + + /** A tuple consisting of all elements of this tuple that have types + * for which the given type level predicate `P` reduces to the literal + * constant `true`. + */ + inline def filter[X <: Tuple, P[_] <: Boolean](xs: X): Filter[X, P] = + val toInclude = constValueTuple[IndicesWhere[X, P]].toArray + val arr = new Array[Object](toInclude.length) + for i <- toInclude.indices do + arr(i) = xs.productElement(toInclude(i).asInstanceOf[Int]).asInstanceOf[Object] + Tuple.fromArray(arr).asInstanceOf[Filter[X, P]] object NamedTupleDecomposition: import NamedTupleOps.* From f80a8ddfa1afb5a4a81f33e8297f8aac068161ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 6 May 2024 18:19:49 +0200 Subject: [PATCH 326/465] Revert the addition of `type Fields` in `Selectable`. If we add it as is now, we will *not* be able to add the bound in a future release. It is best to leave it completely undefined. The typer is happy to ignore the case where it does not exist at all. --- library/src/scala/Selectable.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/library/src/scala/Selectable.scala b/library/src/scala/Selectable.scala index 93c799dd124b..74004a350679 100644 --- a/library/src/scala/Selectable.scala +++ b/library/src/scala/Selectable.scala @@ -22,8 +22,7 @@ package scala * In this case the call will synthesize `Class` arguments for the erasure of * all formal parameter types of the method in the structural type. */ -trait Selectable extends Any: - type Fields // TODO: add <: NamedTyple.AnyNamedTuple when NamedTuple is no longer experimental +trait Selectable extends Any object Selectable: /* Scala 2 compat + allowing for cross-compilation: From 975df4a136910fe0451514ef0dc34aa29a60aef7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 3 May 2024 12:06:27 +0200 Subject: [PATCH 327/465] Simplify the logic for checking unused imports. Instead of dealing with entire `tpd.Import`s at the end of the scope, we eagerly flatten them into individual `ImportSelector`s. We store them along with some data, including a mutable flag for whether a selector has been used. This allows to dramatically simplify `isInImport`, as well as more aggressively cache the resolution of selectors. We also get rid of the `IdentityHashMap`. The algorithm is still O(n*m) where n is the number of imports in a scope, and m the number of references found in that scope. It is not entirely clear to me whether the previous logic was already O(n*m) or worse (it may have included an additional p factor for the number of possible selections from a given qualifier). Regardless, it is already quite a bit faster than before, thanks to smaller constant factors. --- .../tools/dotc/transform/CheckUnused.scala | 200 ++++++++++-------- tests/warn/i15503i.scala | 4 +- 2 files changed, 112 insertions(+), 92 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 15a73f18f323..84bf705905b1 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -2,6 +2,7 @@ package dotty.tools.dotc.transform import scala.annotation.tailrec +import dotty.tools.uncheckedNN import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.ast.tpd.{Inlined, TreeTraverser} @@ -109,8 +110,8 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke ctx override def prepareForSelect(tree: tpd.Select)(using Context): Context = - val name = tree.removeAttachment(OriginalName).orElse(Some(tree.name)) - unusedDataApply(_.registerUsed(tree.symbol, name)) + val name = tree.removeAttachment(OriginalName) + unusedDataApply(_.registerUsed(tree.symbol, name, includeForImport = tree.qualifier.span.isSynthetic)) override def prepareForBlock(tree: tpd.Block)(using Context): Context = pushInBlockTemplatePackageDef(tree) @@ -128,7 +129,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke if !tree.symbol.is(Module) then ud.registerDef(tree) if tree.name.startsWith("derived$") && tree.typeOpt != NoType then - ud.registerUsed(tree.typeOpt.typeSymbol, None, true) + ud.registerUsed(tree.typeOpt.typeSymbol, None, isDerived = true) ud.addIgnoredUsage(tree.symbol) } @@ -359,7 +360,7 @@ object CheckUnused: var unusedAggregate: Option[UnusedResult] = None /* IMPORTS */ - private val impInScope = MutStack(MutList[tpd.Import]()) + private val impInScope = MutStack(MutList[ImportSelectorData]()) /** * We store the symbol along with their accessibility without import. * Accessibility to their definition in outer context/scope @@ -369,7 +370,7 @@ object CheckUnused: private val usedInScope = MutStack(MutSet[(Symbol,Boolean, Option[Name], Boolean)]()) private val usedInPosition = MutMap.empty[Name, MutSet[Symbol]] /* unused import collected during traversal */ - private val unusedImport = new java.util.IdentityHashMap[ImportSelector, Unit] + private val unusedImport = MutList.empty[ImportSelectorData] /* LOCAL DEF OR VAL / Private Def or Val / Pattern variables */ private val localDefInScope = MutList.empty[tpd.MemberDef] @@ -409,16 +410,17 @@ object CheckUnused: * The optional name will be used to target the right import * as the same element can be imported with different renaming */ - def registerUsed(sym: Symbol, name: Option[Name], isDerived: Boolean = false)(using Context): Unit = + def registerUsed(sym: Symbol, name: Option[Name], includeForImport: Boolean = true, isDerived: Boolean = false)(using Context): Unit = if sym.exists && !isConstructorOfSynth(sym) && !doNotRegister(sym) then if sym.isConstructor then - registerUsed(sym.owner, None) // constructor are "implicitly" imported with the class + registerUsed(sym.owner, None, includeForImport) // constructor are "implicitly" imported with the class else val accessibleAsIdent = sym.isAccessibleAsIdent def addIfExists(sym: Symbol): Unit = if sym.exists then usedDef += sym - usedInScope.top += ((sym, accessibleAsIdent, name, isDerived)) + if includeForImport then + usedInScope.top += ((sym, accessibleAsIdent, name, isDerived)) addIfExists(sym) addIfExists(sym.companionModule) addIfExists(sym.companionClass) @@ -439,12 +441,27 @@ object CheckUnused: /** Register an import */ def registerImport(imp: tpd.Import)(using Context): Unit = - if !tpd.languageImport(imp.expr).nonEmpty && !imp.isGeneratedByEnum && !isTransparentAndInline(imp) then - impInScope.top += imp - if currScopeType.top != ScopeType.ReplWrapper then // #18383 Do not report top-level import's in the repl as unused - for s <- imp.selectors do - if !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) && !isImportIgnored(imp, s) then - unusedImport.put(s, ()) + if + !tpd.languageImport(imp.expr).nonEmpty + && !imp.isGeneratedByEnum + && !isTransparentAndInline(imp) + && currScopeType.top != ScopeType.ReplWrapper // #18383 Do not report top-level import's in the repl as unused + then + val qualTpe = imp.expr.tpe + + // Put wildcard imports at the end, because they have lower priority within one Import + val reorderdSelectors = + val (wildcardSels, nonWildcardSels) = imp.selectors.partition(_.isWildcard) + nonWildcardSels ::: wildcardSels + + val newDataInScope = + for sel <- reorderdSelectors yield + val data = new ImportSelectorData(qualTpe, sel) + if shouldSelectorBeReported(imp, sel) || isImportExclusion(sel) || isImportIgnored(imp, sel) then + // Immediately mark the selector as used + data.markUsed() + data + impInScope.top.prependAll(newDataInScope) end registerImport /** Register (or not) some `val` or `def` according to the context, scope and flags */ @@ -482,40 +499,27 @@ object CheckUnused: * - If there are imports in this scope check for unused ones */ def popScope()(using Context): Unit = - // used symbol in this scope - val used = usedInScope.pop().toSet - // used imports in this scope - val imports = impInScope.pop() - val kept = used.filterNot { (sym, isAccessible, optName, isDerived) => - // keep the symbol for outer scope, if it matches **no** import - // This is the first matching wildcard selector - var selWildCard: Option[ImportSelector] = None - - val matchedExplicitImport = imports.exists { imp => - sym.isInImport(imp, isAccessible, optName, isDerived) match - case None => false - case optSel@Some(sel) if sel.isWildcard => - if selWildCard.isEmpty then selWildCard = optSel - // We keep wildcard symbol for the end as they have the least precedence - false - case Some(sel) => - unusedImport.remove(sel) - true + currScopeType.pop() + val usedInfos = usedInScope.pop() + val selDatas = impInScope.pop() + + for usedInfo <- usedInfos do + val (sym, isAccessible, optName, isDerived) = usedInfo + val usedData = selDatas.find { selData => + sym.isInImport(selData, isAccessible, optName, isDerived) } - if !matchedExplicitImport && selWildCard.isDefined then - unusedImport.remove(selWildCard.get) - true // a matching import exists so the symbol won't be kept for outer scope - else - matchedExplicitImport - } - - // if there's an outer scope - if usedInScope.nonEmpty then - // we keep the symbols not referencing an import in this scope - // as it can be the only reference to an outer import - usedInScope.top ++= kept - // retrieve previous scope type - currScopeType.pop + usedData match + case Some(data) => + data.markUsed() + case None => + // Propagate the symbol one level up + if usedInScope.nonEmpty then + usedInScope.top += usedInfo + end for // each in `used` + + for selData <- selDatas do + if !selData.isUsed then + unusedImport += selData end popScope /** @@ -534,9 +538,8 @@ object CheckUnused: val sortedImp = if ctx.settings.WunusedHas.imports || ctx.settings.WunusedHas.strictNoImplicitWarn then - import scala.jdk.CollectionConverters.* - unusedImport.keySet().nn.iterator().nn.asScala - .map(d => UnusedSymbol(d.srcPos, d.name, WarnTypes.Imports)).toList + unusedImport.toList + .map(d => UnusedSymbol(d.selector.srcPos, d.selector.name, WarnTypes.Imports)) else Nil // Partition to extract unset local variables from usedLocalDefs @@ -697,52 +700,40 @@ object CheckUnused: } /** Given an import and accessibility, return selector that matches import<->symbol */ - private def isInImport(imp: tpd.Import, isAccessible: Boolean, altName: Option[Name], isDerived: Boolean)(using Context): Option[ImportSelector] = + private def isInImport(selData: ImportSelectorData, isAccessible: Boolean, altName: Option[Name], isDerived: Boolean)(using Context): Boolean = assert(sym.exists) - val tpd.Import(qual, sels) = imp - val qualTpe = qual.tpe - val dealiasedSym = sym.dealias - - val selectionsToDealias: List[SingleDenotation] = - val typeSelections = sels.flatMap(n => qualTpe.member(n.name.toTypeName).alternatives) - val termSelections = sels.flatMap(n => qualTpe.member(n.name.toTermName).alternatives) - typeSelections ::: termSelections - - val qualHasSymbol: Boolean = - val simpleSelections = qualTpe.member(sym.name).alternatives - simpleSelections.exists(d => d.symbol == sym || d.symbol.dealias == dealiasedSym) - || selectionsToDealias.exists(d => d.symbol.dealias == dealiasedSym) + val selector = selData.selector - def selector: Option[ImportSelector] = - sels.find(sel => sym.name.toTermName == sel.name && altName.forall(n => n.toTermName == sel.rename)) - - def dealiasedSelector: Option[ImportSelector] = - if isDerived then - sels.flatMap(sel => selectionsToDealias.map(m => (sel, m.symbol))).collectFirst { - case (sel, sym) if sym.dealias == dealiasedSym => sel - } - else None - - def givenSelector: Option[ImportSelector] = - if sym.is(Given) || sym.is(Implicit) then - sels.filter(sel => sel.isGiven && !sel.bound.isEmpty).find(sel => sel.boundTpe =:= sym.info) - else None - - def wildcard: Option[ImportSelector] = - sels.find(sel => sel.isWildcard && ((sym.is(Given) == sel.isGiven && sel.bound.isEmpty) || sym.is(Implicit))) - - if qualHasSymbol && (!isAccessible || altName.exists(_.toSimpleName != sym.name.toSimpleName)) then - selector.orElse(dealiasedSelector).orElse(givenSelector).orElse(wildcard) // selector with name or wildcard (or given) + if isAccessible && !altName.exists(_.toTermName != sym.name.toTermName) then + // Even if this import matches, it is pointless because the symbol would be accessible anyway + false + else if !selector.isWildcard then + if altName.exists(explicitName => selector.rename != explicitName.toTermName) then + // if there is an explicit name, it must match + false + else + if isDerived then + // See i15503i.scala, grep for "package foo.test.i17156" + selData.allSymbolsDealiasedForNamed.contains(dealias(sym)) + else + selData.allSymbolsForNamed.contains(sym) else - None + // Wildcard + if !selData.qualTpe.member(sym.name).hasAltWith(_.symbol == sym) then + // The qualifier does not have the target symbol as a member + false + else + if selector.isGiven then + // Further check that the symbol is a given or implicit and conforms to the bound + sym.isOneOf(Given | Implicit) + && (selector.bound.isEmpty || sym.info <:< selector.boundTpe) + else + // Normal wildcard, check that the symbol is not a given (but can be implicit) + !sym.is(Given) + end if end isInImport - private def dealias(using Context): Symbol = - if sym.isType && sym.asType.denot.isAliasType then - sym.asType.typeRef.dealias.typeSymbol - else sym - /** Annotated with @unused */ private def isUnusedAnnot(using Context): Boolean = sym.annotations.exists(a => a.symbol == ctx.definitions.UnusedAnnot) @@ -840,6 +831,29 @@ object CheckUnused: case _:tpd.Block => Local case _ => Other + final class ImportSelectorData(val qualTpe: Type, val selector: ImportSelector): + private var myUsed: Boolean = false + + def markUsed(): Unit = myUsed = true + + def isUsed: Boolean = myUsed + + private var myAllSymbols: Set[Symbol] | Null = null + + def allSymbolsForNamed(using Context): Set[Symbol] = + if myAllSymbols == null then + val allDenots = qualTpe.member(selector.name).alternatives ::: qualTpe.member(selector.name.toTypeName).alternatives + myAllSymbols = allDenots.map(_.symbol).toSet + myAllSymbols.uncheckedNN + + private var myAllSymbolsDealiased: Set[Symbol] | Null = null + + def allSymbolsDealiasedForNamed(using Context): Set[Symbol] = + if myAllSymbolsDealiased == null then + myAllSymbolsDealiased = allSymbolsForNamed.map(sym => dealias(sym)) + myAllSymbolsDealiased.uncheckedNN + end ImportSelectorData + case class UnusedSymbol(pos: SrcPos, name: Name, warnType: WarnTypes) /** A container for the results of the used elements analysis */ case class UnusedResult(warnings: Set[UnusedSymbol]) @@ -847,4 +861,10 @@ object CheckUnused: val Empty = UnusedResult(Set.empty) end UnusedData + private def dealias(symbol: Symbol)(using Context): Symbol = + if symbol.isType && symbol.asType.denot.isAliasType then + symbol.asType.typeRef.dealias.typeSymbol + else + symbol + end CheckUnused diff --git a/tests/warn/i15503i.scala b/tests/warn/i15503i.scala index 329b81327288..8a8ed487477a 100644 --- a/tests/warn/i15503i.scala +++ b/tests/warn/i15503i.scala @@ -270,7 +270,7 @@ package foo.test.i16679b: given x: myPackage.CaseClassName[secondPackage.CoolClass] = null object secondPackage: - import myPackage.CaseClassName // OK + import myPackage.CaseClassName // warn import Foo.x case class CoolClass(i: Int) println(summon[myPackage.CaseClassName[CoolClass]]) @@ -312,4 +312,4 @@ package foo.test.i17117: val test = t1.test } - } \ No newline at end of file + } From 8553bfcc2dc17073b49b3fbed82fa1b7079abcc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 3 May 2024 17:40:12 +0200 Subject: [PATCH 328/465] Move the isAccessible test up to registerSym, instead of isInImport. That test does not rely on any information dependent on the import selectors. It only relies on information at the use site. Eagerly checking it means we do not put as many symbols into the `usedInScope` set, which is good because it is one of the complexity factors of the unused-imports analysis. --- .../tools/dotc/transform/CheckUnused.scala | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 84bf705905b1..bd4ef73d6eea 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -367,7 +367,7 @@ object CheckUnused: * * See the `isAccessibleAsIdent` extension method below in the file */ - private val usedInScope = MutStack(MutSet[(Symbol,Boolean, Option[Name], Boolean)]()) + private val usedInScope = MutStack(MutSet[(Symbol, Option[Name], Boolean)]()) private val usedInPosition = MutMap.empty[Name, MutSet[Symbol]] /* unused import collected during traversal */ private val unusedImport = MutList.empty[ImportSelectorData] @@ -415,12 +415,16 @@ object CheckUnused: if sym.isConstructor then registerUsed(sym.owner, None, includeForImport) // constructor are "implicitly" imported with the class else - val accessibleAsIdent = sym.isAccessibleAsIdent + // If the symbol is accessible in this scope without an import, do not register it for unused import analysis + val includeForImport1 = + includeForImport + && (name.exists(_.toTermName != sym.name.toTermName) || !sym.isAccessibleAsIdent) + def addIfExists(sym: Symbol): Unit = if sym.exists then usedDef += sym - if includeForImport then - usedInScope.top += ((sym, accessibleAsIdent, name, isDerived)) + if includeForImport1 then + usedInScope.top += ((sym, name, isDerived)) addIfExists(sym) addIfExists(sym.companionModule) addIfExists(sym.companionClass) @@ -504,9 +508,9 @@ object CheckUnused: val selDatas = impInScope.pop() for usedInfo <- usedInfos do - val (sym, isAccessible, optName, isDerived) = usedInfo + val (sym, optName, isDerived) = usedInfo val usedData = selDatas.find { selData => - sym.isInImport(selData, isAccessible, optName, isDerived) + sym.isInImport(selData, optName, isDerived) } usedData match case Some(data) => @@ -700,15 +704,12 @@ object CheckUnused: } /** Given an import and accessibility, return selector that matches import<->symbol */ - private def isInImport(selData: ImportSelectorData, isAccessible: Boolean, altName: Option[Name], isDerived: Boolean)(using Context): Boolean = + private def isInImport(selData: ImportSelectorData, altName: Option[Name], isDerived: Boolean)(using Context): Boolean = assert(sym.exists) val selector = selData.selector - if isAccessible && !altName.exists(_.toTermName != sym.name.toTermName) then - // Even if this import matches, it is pointless because the symbol would be accessible anyway - false - else if !selector.isWildcard then + if !selector.isWildcard then if altName.exists(explicitName => selector.rename != explicitName.toTermName) then // if there is an explicit name, it must match false From 836a25a105eb38f130a575779cbaca4bd20d021d Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Tue, 7 May 2024 11:58:43 +0200 Subject: [PATCH 329/465] Relax font-weight reset (#20348) --- scaladoc/resources/dotty_res/styles/theme/typography.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scaladoc/resources/dotty_res/styles/theme/typography.css b/scaladoc/resources/dotty_res/styles/theme/typography.css index cd8730f31dc2..3e75847a3e28 100644 --- a/scaladoc/resources/dotty_res/styles/theme/typography.css +++ b/scaladoc/resources/dotty_res/styles/theme/typography.css @@ -1,4 +1,4 @@ -* { +h1, h2, h3, h4, h5, h6 { /*text-rendering: geometricPrecision;*/ font-weight: initial; } From 1a62d0a5758aa0900187176dc086bf098a971672 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ola=20Flisb=C3=A4ck?= Date: Tue, 7 May 2024 12:24:53 +0200 Subject: [PATCH 330/465] Fix three dead links in spec Types doc chapter (#20337) This PR most likely fixes three dead links currently served at: [https://scala-lang.org/files/archive/spec/3.4/03-types.html ](https://scala-lang.org/files/archive/spec/3.4/03-types.html) I tried `sbt sbt scaladoc/generateScalaDocumentation` and `sbt scaladoc/generateReferenceDocumentation` to generate the updated documentation but I think some other rule is responsible for that, so I haven't checked that the updated links actually work, but I think they will. --- docs/_spec/03-types.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/_spec/03-types.md b/docs/_spec/03-types.md index 407a69b8c8c5..6bc7886c5677 100644 --- a/docs/_spec/03-types.md +++ b/docs/_spec/03-types.md @@ -197,7 +197,7 @@ An argument type of the form ´\Rightarrow T´ represents a [call-by-name parame Function types associate to the right, e.g. ´S \Rightarrow T \Rightarrow R´ is the same as ´S \Rightarrow (T \Rightarrow R)´. -Function types are [covariant](04-basic-definitions.md#variance-annotations) in their result type and [contravariant](04-basic-definitions.md#variance-annotations) in their argument types. +Function types are [covariant](04-basic-definitions.html#variance-annotations) in their result type and [contravariant](04-basic-definitions.html#variance-annotations) in their argument types. Function types translate into internal class types that define an `apply` method. Specifically, the ´n´-ary function type ´(T_1, ..., T_n) \Rightarrow R´ translates to the internal class type `scala.Function´_n´[´T_1´, ..., ´T_n´, ´R´]`. @@ -210,7 +210,7 @@ trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ ``` -Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.md#the-function-classes) of the standard library page in this document. +Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.html#the-function-classes) of the standard library page in this document. _Dependent function types_ are function types whose parameters are named and can referred to in result types. In the concrete type ´(x_1: T_1, ..., x_n: T_n) \Rightarrow R´, ´R´ can refer to the parameters ´x_i´, notably to form path-dependent types. From 58549596db6f1dd4714c44855a8d5fa978b17251 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Tue, 7 May 2024 14:30:29 +0200 Subject: [PATCH 331/465] Lift Scala Settings from experimental to stabilized (#20199) Fixes: https://github.com/scala/scala3/issues/19693 This is the tracking list from the issue and the status of migraton. - [x] lift to `-X` - [x] `-Yresolve-term-conflict` - [x] `-YnoGenericSig` - [x] `-Ydumpclasses` - [x] `-YjarCompressionLevel` - [x] `-YkindProjector` - [x] `-YdropComments` - [x] `-YcookComments` - [x] `-YreadComments` - [x] `-YnoDecodeStacktraces` - [x] `-YnoEnrichErrorMessages` - [x] `-YdebugMacros` - [ ] `-YjavaTasty`- deprecate in 3.7.0 - [ ] `-YjavaTastyOutput`- deprecate in 3.7.0 - [ ] `-YallowOutlineFromTasty`- deprecate in 3.7.0 - [x] lift to `-W` - [x] `-Wsafe-init` - [x] to be removed - [x] `--Yforce-inline-while-typing` // Note: this was already removed by https://github.com/scala/scala3/commit/05eb7f7ae514d1db5e0bece30d63cf5a30164ee8 - [ ] TBD - [ ] `-YfromTastyIgnoreList` // this flag was not decided - [x] `-YoutputOnlyTasty` I suppose 2 last flags that have to be decided are maintained by @bishabosha Do you know their status ? Other than that, this PR implemented a deprecation mechanism for the future e.g. ```scala Deprecation( msg = "message", replacedBy = Some("newMessage"), ) ``` EDIT: As it was decided, options responsible for pipelining will be retained until 3.7.0. The code is commented and following PR which will deprecate them will be created after merging this one. EDIT2: `-Xlint` is a very problematic flag. It was added into the compiler in 3.4 bypassing argument verification. Then in the future, when it was deprecated in favour of `-Wshadow` we couldn't exactly map them 1-1, and it was deprecated in following way: - Scala 2: `-Xlint` is valid flag, - Scala pre 3.4: `-Xlint` was skipped as it was not implemented, - Scala post 3.4 pre flag refactor: `-Xlint` was implemented allowing for `all`, `private-shadow` and `type-parameter-shadow` options. It also bypassed argument verification allowing standalone `-Xlint` to be ignored, not reported as error, - Scala post 3.4 post flag refactor: This flag was then deprecated in favour of `-Wshadow`. We didn't want to commit this mistake again and allow standalone `-Wshadow` thus when it was deprecated, we didn't handle it as we should (deprecated option should trigger replacement when possible). That's why I'm retaining the previous behaviour of `-Xlint`. It will be removed in the future versions as it is already deprecated. --- community-build/community-projects/AsyncFile | 2 +- community-build/community-projects/Equal | 2 +- community-build/community-projects/FingerTree | 2 +- community-build/community-projects/Log | 2 +- community-build/community-projects/Lucre | 2 +- community-build/community-projects/Model | 2 +- community-build/community-projects/Numbers | 2 +- community-build/community-projects/Serial | 2 +- community-build/community-projects/Span | 2 +- community-build/community-projects/discipline | 2 +- .../community-projects/discipline-munit | 2 +- .../community-projects/discipline-specs2 | 2 +- community-build/community-projects/fs2 | 2 +- community-build/community-projects/scala-stm | 2 +- community-build/community-projects/scodec | 2 +- community-build/community-projects/verify | 2 +- .../scala/dotty/communitybuild/projects.scala | 19 +- .../tools/backend/jvm/BCodeHelpers.scala | 2 +- .../jvm/PostProcessorFrontendAccess.scala | 6 +- compiler/src/dotty/tools/dotc/Driver.scala | 2 +- compiler/src/dotty/tools/dotc/Run.scala | 2 +- .../dotty/tools/dotc/config/CliCommand.scala | 3 +- .../tools/dotc/config/ScalaSettings.scala | 85 +++++-- .../dotty/tools/dotc/config/Settings.scala | 238 ++++++++++-------- .../src/dotty/tools/dotc/core/Phases.scala | 6 +- .../dotty/tools/dotc/core/SymbolLoaders.scala | 6 +- .../src/dotty/tools/dotc/core/Symbols.scala | 2 +- .../dotty/tools/dotc/core/TypeErrors.scala | 4 +- .../tools/dotc/core/tasty/TreePickler.scala | 4 +- .../tools/dotc/core/tasty/TreeUnpickler.scala | 4 +- .../dotc/decompiler/IDEDecompilerDriver.scala | 2 +- .../tools/dotc/fromtasty/ReadTasty.scala | 8 +- .../dotc/interactive/InteractiveDriver.scala | 4 +- .../dotty/tools/dotc/parsing/Parsers.scala | 14 +- .../dotty/tools/dotc/parsing/Scanners.scala | 2 +- compiler/src/dotty/tools/dotc/report.scala | 4 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 8 +- .../tools/dotc/sbt/ExtractDependencies.scala | 2 +- .../tools/dotc/transform/CookComments.scala | 2 +- .../dotc/transform/GenericSignatures.scala | 2 +- .../dotty/tools/dotc/transform/Pickler.scala | 12 +- .../tools/dotc/transform/init/Checker.scala | 4 +- .../dotty/tools/dotc/typer/TyperPhase.scala | 2 +- compiler/src/dotty/tools/io/FileWriters.scala | 2 +- .../src/dotty/tools/repl/ReplDriver.scala | 4 +- .../quoted/runtime/impl/QuotesImpl.scala | 2 +- .../dotty/tools/dotc/CompilationTests.scala | 18 +- .../test/dotty/tools/dotc/SettingsTests.scala | 128 +++++++++- .../dotc/config/ScalaSettingsTests.scala | 101 ++++++++ .../dotc/core/tasty/CommentPicklingTest.scala | 2 +- .../reference/experimental/explicit-nulls.md | 2 +- .../other-new-features/safe-initialization.md | 2 +- .../other-new-features/safe-initialization.md | 2 +- project/Build.scala | 16 +- .../a/src/main/scala/a/A.scala | 0 .../b/src/main/scala/b/B.scala | 0 .../build.sbt | 2 +- .../project/DottyInjectedPlugin.scala | 0 .../test | 0 .../a/src/main/scala/a/A.scala | 0 .../b-early-out/.keep | 0 .../b/src/main/scala/b/B.scala | 0 .../build.sbt | 4 +- .../c/src/main/scala/c/C.scala | 0 .../project/DottyInjectedPlugin.scala | 0 .../test | 0 .../a/src/main/scala/a/A.java | 0 .../a/src/main/scala/a/package.scala | 0 .../b-alt/.keep | 0 .../b/src/main/scala/b/B.scala | 0 .../build.sbt | 4 +- .../project/DottyInjectedPlugin.scala | 0 .../test | 0 .../a/src/main/scala/a/A.java | 0 .../a/src/main/scala/a/package.scala | 0 .../b-alt/.keep | 0 .../b/src/main/scala/b/B.scala | 0 .../build.sbt | 4 +- .../project/DottyInjectedPlugin.scala | 0 .../test | 0 .../a/src/main/scala/a/A.java | 0 .../a/src/main/scala/a/package.scala | 0 .../a_from_tasty/.keep | 0 .../b-alt/.keep | 0 .../b/src/main/scala/b/B.scala | 0 .../build.sbt | 12 +- .../project/DottyInjectedPlugin.scala | 0 .../test | 0 .../a-check/.keep | 0 .../a/src/main/scala/a/A.java | 0 .../a/src/main/scala/a/AImport.java | 0 .../a/src/main/scala/a/package.scala | 0 .../b-alt/.keep | 0 .../b/src/main/scala/b/B.scala | 0 .../b/src/main/scala/b/BImport.scala | 0 .../build.sbt | 8 +- .../c-alt/.keep | 0 .../c/src/main/scala/c/C.scala | 0 .../c/src/main/scala/c/CImport.scala | 0 .../project/DottyInjectedPlugin.scala | 0 .../test | 0 .../a/src/main/scala/a/A.java | 0 .../a/src/main/scala/a/package.scala | 0 .../b-alt/.keep | 0 .../b/src/main/scala/b/B.scala | 0 .../build.sbt | 4 +- .../project/DottyInjectedPlugin.scala | 0 .../test | 0 .../a/InnerClass.java | 0 .../a/InnerClassGen.java | 0 .../a/InnerClassSub.java | 0 .../a/RawTypes.java | 0 .../a/package.scala | 0 .../b/Test.scala | 0 .../build.sbt | 4 +- .../project/DottyInjectedPlugin.scala | 0 .../test | 0 .../a/src/main/scala/a/A.java | 0 .../a/src/main/scala/a/package.scala | 0 .../b-alt/.keep | 0 .../b/src/main/scala/b/B.scala | 0 .../build.sbt | 4 +- .../project/DottyInjectedPlugin.scala | 0 .../test | 0 sbt-test/pipelining/pipelining-test/test | 10 +- .../scaladoc/snippets/SnippetCompiler.scala | 4 +- .../tasty/inspector/TastyInspector.scala | 2 +- .../scala/quoted/staging/QuoteDriver.scala | 8 +- .../tasty/inspector/TastyInspector.scala | 2 +- tests/explicit-nulls/pos/i14682.scala | 4 +- tests/neg/i16438.scala | 2 +- tests/neg/i2887b.check | 2 +- tests/neg/kind-projector-underscores.scala | 2 +- tests/neg/kind-projector.scala | 2 +- tests/pos/i16777.scala | 2 +- tests/pos/i19806/Module.scala | 2 +- tests/pos/kind-projector-underscores.scala | 2 +- tests/pos/kind-projector.scala | 2 +- tests/run-macros/i12351/Test_2.scala | 2 +- tests/run-macros/i12352/Main.scala | 2 +- tests/untried/neg/choices.check | 2 +- tests/untried/neg/choices.flags | 2 +- 142 files changed, 581 insertions(+), 279 deletions(-) rename sbt-test/pipelining/{Yearly-tasty-output-inline => Xearly-tasty-output-inline}/a/src/main/scala/a/A.scala (100%) rename sbt-test/pipelining/{Yearly-tasty-output-inline => Xearly-tasty-output-inline}/b/src/main/scala/b/B.scala (100%) rename sbt-test/pipelining/{Yearly-tasty-output-inline => Xearly-tasty-output-inline}/build.sbt (90%) rename sbt-test/pipelining/{Yearly-tasty-output-inline => Xearly-tasty-output-inline}/project/DottyInjectedPlugin.scala (100%) rename sbt-test/pipelining/{Yearly-tasty-output-inline => Xearly-tasty-output-inline}/test (100%) rename sbt-test/pipelining/{Yearly-tasty-output => Xearly-tasty-output}/a/src/main/scala/a/A.scala (100%) rename sbt-test/pipelining/{Yearly-tasty-output => Xearly-tasty-output}/b-early-out/.keep (100%) rename sbt-test/pipelining/{Yearly-tasty-output => Xearly-tasty-output}/b/src/main/scala/b/B.scala (100%) rename sbt-test/pipelining/{Yearly-tasty-output => Xearly-tasty-output}/build.sbt (87%) rename sbt-test/pipelining/{Yearly-tasty-output => Xearly-tasty-output}/c/src/main/scala/c/C.scala (100%) rename sbt-test/pipelining/{Yearly-tasty-output => Xearly-tasty-output}/project/DottyInjectedPlugin.scala (100%) rename sbt-test/pipelining/{Yearly-tasty-output => Xearly-tasty-output}/test (100%) rename sbt-test/pipelining/{Yjava-tasty-annotation => Xjava-tasty-annotation}/a/src/main/scala/a/A.java (100%) rename sbt-test/pipelining/{Yjava-tasty-annotation => Xjava-tasty-annotation}/a/src/main/scala/a/package.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-annotation => Xjava-tasty-annotation}/b-alt/.keep (100%) rename sbt-test/pipelining/{Yjava-tasty-annotation => Xjava-tasty-annotation}/b/src/main/scala/b/B.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-annotation => Xjava-tasty-annotation}/build.sbt (87%) rename sbt-test/pipelining/{Yjava-tasty-annotation => Xjava-tasty-annotation}/project/DottyInjectedPlugin.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-annotation => Xjava-tasty-annotation}/test (100%) rename sbt-test/pipelining/{Yjava-tasty-enum => Xjava-tasty-enum}/a/src/main/scala/a/A.java (100%) rename sbt-test/pipelining/{Yjava-tasty-enum => Xjava-tasty-enum}/a/src/main/scala/a/package.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-enum => Xjava-tasty-enum}/b-alt/.keep (100%) rename sbt-test/pipelining/{Yjava-tasty-enum => Xjava-tasty-enum}/b/src/main/scala/b/B.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-enum => Xjava-tasty-enum}/build.sbt (92%) rename sbt-test/pipelining/{Yjava-tasty-enum => Xjava-tasty-enum}/project/DottyInjectedPlugin.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-enum => Xjava-tasty-enum}/test (100%) rename sbt-test/pipelining/{Yjava-tasty-from-tasty => Xjava-tasty-from-tasty}/a/src/main/scala/a/A.java (100%) rename sbt-test/pipelining/{Yjava-tasty-from-tasty => Xjava-tasty-from-tasty}/a/src/main/scala/a/package.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-from-tasty => Xjava-tasty-from-tasty}/a_from_tasty/.keep (100%) rename sbt-test/pipelining/{Yjava-tasty-from-tasty => Xjava-tasty-from-tasty}/b-alt/.keep (100%) rename sbt-test/pipelining/{Yjava-tasty-from-tasty => Xjava-tasty-from-tasty}/b/src/main/scala/b/B.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-from-tasty => Xjava-tasty-from-tasty}/build.sbt (87%) rename sbt-test/pipelining/{Yjava-tasty-from-tasty => Xjava-tasty-from-tasty}/project/DottyInjectedPlugin.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-from-tasty => Xjava-tasty-from-tasty}/test (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/a-check/.keep (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/a/src/main/scala/a/A.java (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/a/src/main/scala/a/AImport.java (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/a/src/main/scala/a/package.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/b-alt/.keep (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/b/src/main/scala/b/B.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/b/src/main/scala/b/BImport.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/build.sbt (91%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/c-alt/.keep (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/c/src/main/scala/c/C.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/c/src/main/scala/c/CImport.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/project/DottyInjectedPlugin.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-fromjavaobject => Xjava-tasty-fromjavaobject}/test (100%) rename sbt-test/pipelining/{Yjava-tasty-generic => Xjava-tasty-generic}/a/src/main/scala/a/A.java (100%) rename sbt-test/pipelining/{Yjava-tasty-generic => Xjava-tasty-generic}/a/src/main/scala/a/package.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-generic => Xjava-tasty-generic}/b-alt/.keep (100%) rename sbt-test/pipelining/{Yjava-tasty-generic => Xjava-tasty-generic}/b/src/main/scala/b/B.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-generic => Xjava-tasty-generic}/build.sbt (91%) rename sbt-test/pipelining/{Yjava-tasty-generic => Xjava-tasty-generic}/project/DottyInjectedPlugin.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-generic => Xjava-tasty-generic}/test (100%) rename sbt-test/pipelining/{Yjava-tasty-paths => Xjava-tasty-paths}/a/InnerClass.java (100%) rename sbt-test/pipelining/{Yjava-tasty-paths => Xjava-tasty-paths}/a/InnerClassGen.java (100%) rename sbt-test/pipelining/{Yjava-tasty-paths => Xjava-tasty-paths}/a/InnerClassSub.java (100%) rename sbt-test/pipelining/{Yjava-tasty-paths => Xjava-tasty-paths}/a/RawTypes.java (100%) rename sbt-test/pipelining/{Yjava-tasty-paths => Xjava-tasty-paths}/a/package.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-paths => Xjava-tasty-paths}/b/Test.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-paths => Xjava-tasty-paths}/build.sbt (92%) rename sbt-test/pipelining/{Yjava-tasty-paths => Xjava-tasty-paths}/project/DottyInjectedPlugin.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-paths => Xjava-tasty-paths}/test (100%) rename sbt-test/pipelining/{Yjava-tasty-result-types => Xjava-tasty-result-types}/a/src/main/scala/a/A.java (100%) rename sbt-test/pipelining/{Yjava-tasty-result-types => Xjava-tasty-result-types}/a/src/main/scala/a/package.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-result-types => Xjava-tasty-result-types}/b-alt/.keep (100%) rename sbt-test/pipelining/{Yjava-tasty-result-types => Xjava-tasty-result-types}/b/src/main/scala/b/B.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-result-types => Xjava-tasty-result-types}/build.sbt (92%) rename sbt-test/pipelining/{Yjava-tasty-result-types => Xjava-tasty-result-types}/project/DottyInjectedPlugin.scala (100%) rename sbt-test/pipelining/{Yjava-tasty-result-types => Xjava-tasty-result-types}/test (100%) diff --git a/community-build/community-projects/AsyncFile b/community-build/community-projects/AsyncFile index d72a5279e4b0..7bdd11657e3e 160000 --- a/community-build/community-projects/AsyncFile +++ b/community-build/community-projects/AsyncFile @@ -1 +1 @@ -Subproject commit d72a5279e4b055ad13d1c19d75939b9bd9d014a0 +Subproject commit 7bdd11657e3e840c31f27140942c8dc9e07ea7b8 diff --git a/community-build/community-projects/Equal b/community-build/community-projects/Equal index 4340d3a6b503..4f357131b4ea 160000 --- a/community-build/community-projects/Equal +++ b/community-build/community-projects/Equal @@ -1 +1 @@ -Subproject commit 4340d3a6b503aad4fbea89ee6026923127e8b1ab +Subproject commit 4f357131b4ea719ba5d09a62dd577a8aab204c4d diff --git a/community-build/community-projects/FingerTree b/community-build/community-projects/FingerTree index 0ed006549a78..eaadc6e0c660 160000 --- a/community-build/community-projects/FingerTree +++ b/community-build/community-projects/FingerTree @@ -1 +1 @@ -Subproject commit 0ed006549a78036aac0b09375e90237e72f599fe +Subproject commit eaadc6e0c660d27fc77a2cb4ab6a265a8e746f97 diff --git a/community-build/community-projects/Log b/community-build/community-projects/Log index 1839754549ed..c55ac4b7a822 160000 --- a/community-build/community-projects/Log +++ b/community-build/community-projects/Log @@ -1 +1 @@ -Subproject commit 1839754549ed690fbba2ea1b220e3f70f8d2ba91 +Subproject commit c55ac4b7a82278f896f25b291e129440d2436fb4 diff --git a/community-build/community-projects/Lucre b/community-build/community-projects/Lucre index 0def1dcb1aa6..1008f0b7f513 160000 --- a/community-build/community-projects/Lucre +++ b/community-build/community-projects/Lucre @@ -1 +1 @@ -Subproject commit 0def1dcb1aa63ba8f398428c000cb2a2f166cca4 +Subproject commit 1008f0b7f51374ddbc947e677c505fa97677b7d4 diff --git a/community-build/community-projects/Model b/community-build/community-projects/Model index d797f70e9d17..fb73627f37d7 160000 --- a/community-build/community-projects/Model +++ b/community-build/community-projects/Model @@ -1 +1 @@ -Subproject commit d797f70e9d178fa6a70d6aa2d3c3324bc1c27b48 +Subproject commit fb73627f37d77d97892a4a0eebe5bd7406559366 diff --git a/community-build/community-projects/Numbers b/community-build/community-projects/Numbers index 656dfd3f7c0a..e19972adb794 160000 --- a/community-build/community-projects/Numbers +++ b/community-build/community-projects/Numbers @@ -1 +1 @@ -Subproject commit 656dfd3f7c0a541b243e2d0f5aabbd20fc8bcea6 +Subproject commit e19972adb7941871e8b1b4a76de0e0c2b4d9fbfc diff --git a/community-build/community-projects/Serial b/community-build/community-projects/Serial index c161cc36e68c..e69b44086955 160000 --- a/community-build/community-projects/Serial +++ b/community-build/community-projects/Serial @@ -1 +1 @@ -Subproject commit c161cc36e68c0d24a508fc9a52a44551c779c682 +Subproject commit e69b44086955023b8747ac10791ad10baad0c5cc diff --git a/community-build/community-projects/Span b/community-build/community-projects/Span index da4c4a9c335c..8d9b4575482e 160000 --- a/community-build/community-projects/Span +++ b/community-build/community-projects/Span @@ -1 +1 @@ -Subproject commit da4c4a9c335c114dbda829150d6476aec830cb84 +Subproject commit 8d9b4575482e103117b3fd2b016c7aaad5962789 diff --git a/community-build/community-projects/discipline b/community-build/community-projects/discipline index 09c975b18dc0..27016c356287 160000 --- a/community-build/community-projects/discipline +++ b/community-build/community-projects/discipline @@ -1 +1 @@ -Subproject commit 09c975b18dc0b4e10499fb2922abac82ea8b5252 +Subproject commit 27016c3562871c136e88cc13ffa64a02380265df diff --git a/community-build/community-projects/discipline-munit b/community-build/community-projects/discipline-munit index 4e61f1861956..975ae3efaddd 160000 --- a/community-build/community-projects/discipline-munit +++ b/community-build/community-projects/discipline-munit @@ -1 +1 @@ -Subproject commit 4e61f186195660529e7a6f7461b939477735e3f4 +Subproject commit 975ae3efadddaa558435c4c8326628618048fdad diff --git a/community-build/community-projects/discipline-specs2 b/community-build/community-projects/discipline-specs2 index e689c3e809a8..eb9427335a30 160000 --- a/community-build/community-projects/discipline-specs2 +++ b/community-build/community-projects/discipline-specs2 @@ -1 +1 @@ -Subproject commit e689c3e809a89a03cdbbb3a1771e33148715f6c7 +Subproject commit eb9427335a309d6dd1e82632298529ca6a0920fa diff --git a/community-build/community-projects/fs2 b/community-build/community-projects/fs2 index 6d7c6d6924cb..e91c54621b76 160000 --- a/community-build/community-projects/fs2 +++ b/community-build/community-projects/fs2 @@ -1 +1 @@ -Subproject commit 6d7c6d6924cb055028458ac8236622190acf66d1 +Subproject commit e91c54621b762a58c942b6576c42dcd94ba0fc0a diff --git a/community-build/community-projects/scala-stm b/community-build/community-projects/scala-stm index 3244edf13c41..cf204977752a 160000 --- a/community-build/community-projects/scala-stm +++ b/community-build/community-projects/scala-stm @@ -1 +1 @@ -Subproject commit 3244edf13c41f22ff8b45143186745e9eb469220 +Subproject commit cf204977752af7ec2ca3b50c43f27daa6a628f49 diff --git a/community-build/community-projects/scodec b/community-build/community-projects/scodec index b74f2085f071..9b0423b90de9 160000 --- a/community-build/community-projects/scodec +++ b/community-build/community-projects/scodec @@ -1 +1 @@ -Subproject commit b74f2085f07165d84b32c39eb214c9cc838711cc +Subproject commit 9b0423b90de95fc968fafe4543e6b16ef9f81d08 diff --git a/community-build/community-projects/verify b/community-build/community-projects/verify index ae37d7e153fc..f82bb3f52623 160000 --- a/community-build/community-projects/verify +++ b/community-build/community-projects/verify @@ -1 +1 @@ -Subproject commit ae37d7e153fc62d64c40a72c45f810511aef2e01 +Subproject commit f82bb3f52623e44f02b4b43f8bdf27f4f0a7d3d4 diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index cc0f0ac1f20f..a0444505801a 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -149,7 +149,7 @@ final case class SbtCommunityProject( object SbtCommunityProject: def scalacOptions = List( "-Xcheck-macros", - "-Ysafe-init", + "-Wsafe-init", ) object projects: @@ -362,7 +362,7 @@ object projects: project = "shapeless-3", sbtTestCommand = "testJVM; testJS", sbtDocCommand = forceDoc("typeable", "deriving"), - scalacOptions = "-source" :: "3.3" :: SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), // due to -Xfatal-warnings + scalacOptions = "-source" :: "3.3" :: SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), // due to -Xfatal-warnings ) lazy val xmlInterpolator = SbtCommunityProject( @@ -429,7 +429,7 @@ object projects: sbtTestCommand = "unitTests/test", // Adds package sbtDocCommand = "coreJVM/doc", - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(munit, scodecBits), ) @@ -510,7 +510,7 @@ object projects: project = "discipline", sbtTestCommand = "coreJVM/test;coreJS/test", sbtPublishCommand = "set every credentials := Nil;coreJVM/publishLocal;coreJS/publishLocal", - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(scalacheck) ) @@ -526,7 +526,7 @@ object projects: sbtTestCommand = "test", sbtPublishCommand = "coreJVM/publishLocal;coreJS/publishLocal", dependencies = List(discipline), - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init") + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init") ) lazy val simulacrumScalafixAnnotations = SbtCommunityProject( @@ -540,8 +540,7 @@ object projects: sbtTestCommand = "set Global/scalaJSStage := FastOptStage;rootJVM/test;rootJS/test", sbtPublishCommand = "rootJVM/publishLocal;rootJS/publishLocal", dependencies = List(discipline, disciplineMunit, scalacheck, simulacrumScalafixAnnotations), - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init") // disable -Ysafe-init, due to -Xfatal-warning - + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init") // disable -Ysafe-init or -Wsafe-init, due to -Xfatal-warning ) lazy val catsMtl = SbtCommunityProject( @@ -656,7 +655,7 @@ object projects: """set actorTests/Compile/scalacOptions -= "-Xfatal-warnings"""", "akka-actor-tests/Test/compile", ).mkString("; "), - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(scalatest, scalatestplusJunit, scalatestplusScalacheck) ) @@ -707,7 +706,7 @@ object projects: project = "fs2", sbtTestCommand = "coreJVM/test; coreJS/test", // io/test requires JDK9+ sbtPublishCommand = "coreJVM/publishLocal; coreJS/publishLocal", - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(cats, catsEffect3, munitCatsEffect, scalacheckEffect, scodecBits) ) @@ -744,7 +743,7 @@ object projects: project = "http4s", sbtTestCommand = """set ThisBuild / tlFatalWarnings := false; rootJVM/test""", sbtPublishCommand = "publishLocal", - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(cats, catsEffect3, fs2, disciplineMunit, scalacheckEffect) ) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index 385521e2785f..f8866f40d9d4 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -823,7 +823,7 @@ trait BCodeHelpers extends BCodeIdiomatic { // without it. This is particularly bad because the availability of // generic information could disappear as a consequence of a seemingly // unrelated change. - ctx.base.settings.YnoGenericSig.value + ctx.base.settings.XnoGenericSig.value || sym.is(Artifact) || sym.isAllOf(LiftedMethod) || sym.is(Bridge) diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala index f7955cbb350c..e1b2120fa848 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala @@ -121,12 +121,14 @@ object PostProcessorFrontendAccess { case (None, None) => "8" // least supported version by default override val debug: Boolean = ctx.debug - override val dumpClassesDirectory: Option[String] = s.Ydumpclasses.valueSetByUser + override val dumpClassesDirectory: Option[String] = s.Xdumpclasses.valueSetByUser override val outputDirectory: AbstractFile = s.outputDir.value override val mainClass: Option[String] = s.XmainClass.valueSetByUser - override val jarCompressionLevel: Int = s.YjarCompressionLevel.value + override val jarCompressionLevel: Int = s.XjarCompressionLevel.value override val backendParallelism: Int = s.YbackendParallelism.value override val backendMaxWorkerQueue: Option[Int] = s.YbackendWorkerQueue.valueSetByUser + + @annotation.nowarn("cat=deprecation") override val outputOnlyTasty: Boolean = s.YoutputOnlyTasty.value } diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 6625b5ca6ea2..98abe2ac6c38 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -84,7 +84,7 @@ class Driver { Positioned.init(using ictx) inContext(ictx) { - if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then + if !ctx.settings.XdropComments.value || ctx.settings.XreadComments.value then ictx.setProperty(ContextDoc, new ContextDocstrings) val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) fileNamesOrNone.map { fileNames => diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index fa827432460a..11a0430480d9 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -377,7 +377,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint runCtx.withProgressCallback: cb => _progress = Progress(cb, this, fusedPhases.map(_.traversals).sum) val cancelAsyncTasty: () => Unit = - if !myAsyncTastyWritten && Phases.picklerPhase.exists && !ctx.settings.YearlyTastyOutput.isDefault then + if !myAsyncTastyWritten && Phases.picklerPhase.exists && !ctx.settings.XearlyTastyOutput.isDefault then initializeAsyncTasty() else () => {} diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index be97297218fa..5ac6b772df95 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -60,7 +60,8 @@ trait CliCommand: def defaultValue = s.default match case _: Int | _: String => s.default.toString case _ => "" - val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices : ${s.legalChoices}" else "") + val deprecationMessage = s.deprecation.map(d => s"Option deprecated.\n${d.msg}").getOrElse("") + val info = List(deprecationMessage, shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices : ${s.legalChoices}" else "") (s.name, info.filter(_.nonEmpty).mkString("\n")) end help diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 94b6d8023c34..86b657ddf00d 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -3,7 +3,7 @@ package config import scala.language.unsafeNulls import dotty.tools.dotc.config.PathResolver.Defaults -import dotty.tools.dotc.config.Settings.{Setting, SettingGroup, SettingCategory} +import dotty.tools.dotc.config.Settings.{Setting, SettingGroup, SettingCategory, Deprecation} import dotty.tools.dotc.config.SourceVersion import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.rewrites.Rewrites @@ -296,6 +296,7 @@ private sealed trait WarningSettings: def typeParameterShadow(using Context) = allOr("type-parameter-shadow") + val WcheckInit: Setting[Boolean] = BooleanSetting(WarningSetting, "Wsafe-init", "Ensure safe initialization of objects.") /** -X "Extended" or "Advanced" settings */ private sealed trait XSettings: @@ -322,6 +323,27 @@ private sealed trait XSettings: val XmainClass: Setting[String] = StringSetting(AdvancedSetting, "Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") val XimplicitSearchLimit: Setting[Int] = IntSetting(AdvancedSetting, "Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) + val XtermConflict: Setting[String] = ChoiceSetting(AdvancedSetting, "Xresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") + val XnoGenericSig: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xno-generic-signatures", "Suppress generation of generic signatures for Java.") + val Xdumpclasses: Setting[String] = StringSetting(AdvancedSetting, "Xdump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") + val XjarCompressionLevel: Setting[Int] = IntChoiceSetting(AdvancedSetting, "Xjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION) + val XkindProjector: Setting[String] = ChoiceSetting(AdvancedSetting, "Xkind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Xkind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable", legacyArgs = true) + + /** Documentation related settings */ + val XdropComments: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xdrop-docs", "Drop documentation when scanning source files.", aliases = List("-Xdrop-comments")) + val XcookComments: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xcook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Xcook-comments")) + val XreadComments: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xread-docs", "Read documentation from tasty.") + + /** Area-specific debug output */ + val XnoDecodeStacktraces: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") + val XnoEnrichErrorMessages: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xno-enrich-error-messages", "Show raw error messages, instead of enriching them with contextual information.") + val XdebugMacros: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xdebug-macros", "Show debug info when quote pattern match fails") + + /** Pipeline compilation options */ + val XjavaTasty: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Xpickle-java", "-Yjava-tasty", "-Ypickle-java"), preferPrevious = true) + val XearlyTastyOutput: Setting[AbstractFile] = OutputSetting(AdvancedSetting, "Xearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Xpickle-write", "-Yearly-tasty-output", "-Ypickle-write"), preferPrevious = true) + val XallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.", aliases = List("-Yallow-outline-from-tasty")) + val XmixinForceForwarders = ChoiceSetting( AdvancedSetting, name = "Xmixin-force-forwarders", @@ -337,8 +359,8 @@ private sealed trait XSettings: val XmacroSettings: Setting[List[String]] = MultiStringSetting(AdvancedSetting, "Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") - // Deprecated - val Xlint: Setting[_] = DeprecatedSetting(AdvancedSetting, "Xlint", "Enable or disable specific warnings", "Use -Wshadow to enable shadowing lints.") + @deprecated(message = "Superseded by -Wshadow, Scheduled for removal", since = "3.5.0") + val Xlint: Setting[_] = BooleanSetting(AdvancedSetting, "Xlint", "Enable or disable specific warnings", deprecation = Some(Deprecation("Use -Wshadow to enable shadowing lints. Scheduled for removal.")), ignoreInvalidArgs = true) end XSettings @@ -360,7 +382,6 @@ private sealed trait YSettings: val YdebugError: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-error", "Print the stack trace when any error is caught.", false) val YdebugUnpickling: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-unpickling", "Print the stack trace when an error occurs when reading Tasty.", false) val YdebugCyclic: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-cyclic", "Print the stack trace when a cyclic reference error occurs.", false) - val YtermConflict: Setting[String] = ChoiceSetting(ForkSetting, "Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") val Ylog: Setting[List[String]] = PhasesSetting(ForkSetting, "Ylog", "Log operations during") val YlogClasspath: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylog-classpath", "Output information about what classpath is being applied.") val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting(ForkSetting, "YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") @@ -369,18 +390,14 @@ private sealed trait YSettings: val YnoImports: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") val Yimports: Setting[List[String]] = MultiStringSetting(ForkSetting, "Yimports", helpArg="", "Custom root imports. If set, none of scala.*, java.lang.*, or Predef.* will be imported unless explicitly included.") - val YnoGenericSig: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-generic-signatures", "Suppress generation of generic signatures for Java.") val YnoPredef: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-predef", "Compile without importing Predef.") val Yskip: Setting[List[String]] = PhasesSetting(ForkSetting, "Yskip", "Skip") - val Ydumpclasses: Setting[String] = StringSetting(ForkSetting, "Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") - val YjarCompressionLevel: Setting[Int] = IntChoiceSetting(ForkSetting, "Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION) val YbackendParallelism: Setting[Int] = IntChoiceSetting(ForkSetting, "Ybackend-parallelism", "maximum worker threads for backend", 1 to 16, 1) val YbackendWorkerQueue: Setting[Int] = IntChoiceSetting(ForkSetting, "Ybackend-worker-queue", "backend threads worker queue size", 0 to 1000, 0) val YstopAfter: Setting[List[String]] = PhasesSetting(ForkSetting, "Ystop-after", "Stop after", aliases = List("-stop")) // backward compat val YstopBefore: Setting[List[String]] = PhasesSetting(ForkSetting, "Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") val YdetailedStats: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") - val YkindProjector: Setting[String] = ChoiceSetting(ForkSetting, "Ykind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable", legacyArgs = true) val YprintPos: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos", "Show tree positions.") val YprintPosSyms: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos-syms", "Show symbol definitions positions.") val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") @@ -396,9 +413,6 @@ private sealed trait YSettings: val YtestPickler: Setting[Boolean] = BooleanSetting(ForkSetting, "Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") val YtestPicklerCheck: Setting[Boolean] = BooleanSetting(ForkSetting, "Ytest-pickler-check", "Self-test for pickling -print-tasty output; should be used with -Ytest-pickler.") val YcheckReentrant: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") - val YdropComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) - val YcookComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) - val YreadComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Yread-docs", "Read documentation from tasty.") val YforceSbtPhases: Setting[Boolean] = BooleanSetting(ForkSetting, "Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") val YdumpSbtInc: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") val YcheckAllPatmat: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") @@ -408,7 +422,6 @@ private sealed trait YSettings: val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting(ForkSetting, "Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty.") val YlegacyLazyVals: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") val YcompileScala2Library: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") - val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles") val YprofileEnabled: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprofile-enabled", "Enable profiling.") val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") //.withPostSetHook( _ => YprofileEnabled.value = true ) @@ -424,7 +437,6 @@ private sealed trait YSettings: val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-kind-polymorphism", "Disable kind polymorphism.") val YexplicitNulls: Setting[Boolean] = BooleanSetting(ForkSetting, "Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") val YnoFlexibleTypes: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-flexible-types", "Disable turning nullable Java return types and parameter types into flexible types, which behave like abstract types with a nullable lower bound and non-nullable upper bound.") - val YcheckInit: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init", "Ensure safe initialization of objects.") val YcheckInitGlobal: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init-global", "Check safe initialization of global objects.") val YrequireTargetName: Setting[Boolean] = BooleanSetting(ForkSetting, "Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation.") val YrecheckTest: Setting[Boolean] = BooleanSetting(ForkSetting, "Yrecheck-test", "Run basic rechecking (internal test only).") @@ -438,16 +450,45 @@ private sealed trait YSettings: val YnoDoubleBindings: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).") val YshowVarBounds: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-var-bounds", "Print type variables with their bounds.") - val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") - val YnoEnrichErrorMessages: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-enrich-error-messages", "Show raw error messages, instead of enriching them with contextual information.") - val Yinstrument: Setting[Boolean] = BooleanSetting(ForkSetting, "Yinstrument", "Add instrumentation code that counts allocations and closure creations.") val YinstrumentDefs: Setting[Boolean] = BooleanSetting(ForkSetting, "Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") - val YdebugMacros: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-macros", "Show debug info when quote pattern match fails") - - // Pipeline compilation options - val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Ypickle-java"), preferPrevious = true) - val YearlyTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Ypickle-write"), preferPrevious = true) - val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.") + // Deprecated: lifted from -Y to -X + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YtermConflict: Setting[String] = ChoiceSetting(ForkSetting, "Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error", deprecation = Deprecation.renamed("-Xresolve-term-conflict")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YnoGenericSig: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-generic-signatures", "Suppress generation of generic signatures for Java.", deprecation = Deprecation.renamed("-Xno-generic-signatures")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val Ydumpclasses: Setting[String] = StringSetting(ForkSetting, "Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", default = "", deprecation = Deprecation.renamed("-Xdump-classes")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YjarCompressionLevel: Setting[Int] = IntChoiceSetting(ForkSetting, "Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION, deprecation = Deprecation.renamed("-Xjar-compression-level")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YkindProjector: Setting[String] = ChoiceSetting(ForkSetting, "Ykind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable", legacyArgs = true, deprecation = Deprecation.renamed("-Xkind-projector")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YdropComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments"), deprecation = Deprecation.renamed("-Xdrop-docs")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YcookComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments"), deprecation = Deprecation.renamed("-Xcook-docs")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YreadComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Yread-docs", "Read documentation from tasty.", deprecation = Deprecation.renamed("-Xread-docs")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.", deprecation = Deprecation.renamed("-Xno-decode-stacktraces")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YnoEnrichErrorMessages: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-enrich-error-messages", "Show raw error messages, instead of enriching them with contextual information.", deprecation = Deprecation.renamed("-Xno-enrich-error-messages")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YdebugMacros: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-macros", "Show debug info when quote pattern match fails", deprecation = Deprecation.renamed("-Xdebug-macros")) + + // @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.7.0") + // val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Ypickle-java"), preferPrevious = true, deprecation = Deprecation.lifted("-Xjava-tasty")) + // @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.7.0") + // val YearlyTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Ypickle-write"), preferPrevious = true, deprecation = Deprecation.lifted("-Xearly-tasty-output")) + // @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.7.0") + // val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.", deprecation = Deprecation.lifted("-Xallow-outline-from-tasty")) + + // Deprecated: lifted from -Y to -W + @deprecated(message = "Lifted to -W, Scheduled for removal.", since = "3.5.0") + val YcheckInit: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init", "Ensure safe initialization of objects.", deprecation = Deprecation.renamed("-Wsafe-init")) + + // Deprecated: Scheduled for removal + @deprecated(message = "Scheduled for removal.", since = "3.5.0") + val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles", deprecation = Deprecation.removed()) end YSettings diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 241ab34052a1..1e2ced4d65a7 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -53,14 +53,16 @@ object Settings: sstate: SettingsState, arguments: List[String], errors: List[String], - warnings: List[String]) { + warnings: List[String]): def fail(msg: String): Settings.ArgsSummary = ArgsSummary(sstate, arguments.tail, errors :+ msg, warnings) def warn(msg: String): Settings.ArgsSummary = ArgsSummary(sstate, arguments.tail, errors, warnings :+ msg) - } + + def deprecated(msg: String, extraArgs: List[String] = Nil): Settings.ArgsSummary = + ArgsSummary(sstate, extraArgs ++ arguments.tail, errors, warnings :+ msg) @unshared val settingCharacters = "[a-zA-Z0-9_\\-]*".r @@ -81,9 +83,9 @@ object Settings: ignoreInvalidArgs: Boolean = false, preferPrevious: Boolean = false, propertyClass: Option[Class[?]] = None, - deprecationMsg: Option[String] = None, - // kept only for -Ykind-projector option compatibility - legacyArgs: Boolean = false)(private[Settings] val idx: Int) { + deprecation: Option[Deprecation] = None, + // kept only for -Xkind-projector option compatibility + legacyArgs: Boolean = false)(private[Settings] val idx: Int): validateSettingString(prefix.getOrElse(name)) aliases.foreach(validateSettingString) @@ -108,104 +110,114 @@ object Settings: def acceptsNoArg: Boolean = summon[ClassTag[T]] == BooleanTag || summon[ClassTag[T]] == OptionTag || choices.exists(_.contains("")) def legalChoices: String = - choices match { + choices match case Some(xs) if xs.isEmpty => "" case Some(r: Range) => s"${r.head}..${r.last}" case Some(xs) => xs.mkString(", ") case None => "" - } - def tryToSet(state: ArgsSummary): ArgsSummary = { + def tryToSet(state: ArgsSummary): ArgsSummary = val ArgsSummary(sstate, arg :: args, errors, warnings) = state: @unchecked - def update(value: Any, args: List[String]): ArgsSummary = - var dangers = warnings - val valueNew = - if sstate.wasChanged(idx) && isMultivalue then - val valueList = value.asInstanceOf[List[String]] - val current = valueIn(sstate).asInstanceOf[List[String]] - valueList.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") - current ++ valueList - else - if sstate.wasChanged(idx) then - assert(!preferPrevious, "should have shortcutted with ignoreValue, side-effect may be present!") - dangers :+= s"Flag $name set repeatedly" - value - ArgsSummary(updateIn(sstate, valueNew), args, errors, dangers) + + /** + * Updates the value in state + * + * @param getValue it is crucial that this argument is passed by name, as [setOutput] have side effects. + * @param argStringValue string value of currently proccessed argument that will be used to set deprecation replacement + * @param args remaining arguments to process + * @return new argumment state + */ + def update(getValue: => Any, argStringValue: String, args: List[String]): ArgsSummary = + deprecation match + case Some(Deprecation(msg, Some(replacedBy))) => + val deprecatedMsg = s"Option $name is deprecated: $msg" + if argStringValue.isEmpty then state.deprecated(deprecatedMsg, List(replacedBy)) + else state.deprecated(deprecatedMsg, List(s"$replacedBy:$argStringValue")) + + case Some(Deprecation(msg, _)) => + state.deprecated(s"Option $name is deprecated: $msg") + + case None => + val value = getValue + var dangers = warnings + val valueNew = + if sstate.wasChanged(idx) && isMultivalue then + val valueList = value.asInstanceOf[List[String]] + val current = valueIn(sstate).asInstanceOf[List[String]] + valueList.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") + current ++ valueList + else + if sstate.wasChanged(idx) then + assert(!preferPrevious, "should have shortcutted with ignoreValue, side-effect may be present!") + dangers :+= s"Flag $name set repeatedly" + value + ArgsSummary(updateIn(sstate, valueNew), args, errors, dangers) end update def ignoreValue(args: List[String]): ArgsSummary = ArgsSummary(sstate, args, errors, warnings) - def fail(msg: String, args: List[String]) = - ArgsSummary(sstate, args, errors :+ msg, warnings) - - def warn(msg: String, args: List[String]) = - ArgsSummary(sstate, args, errors, warnings :+ msg) - def missingArg = val msg = s"missing argument for option $name" - if ignoreInvalidArgs then warn(msg + ", the tag was ignored", args) else fail(msg, args) + if ignoreInvalidArgs then state.warn(msg + ", the tag was ignored") else state.fail(msg) def invalidChoices(invalid: List[String]) = val msg = s"invalid choice(s) for $name: ${invalid.mkString(",")}" - if ignoreInvalidArgs then warn(msg + ", the tag was ignored", args) else fail(msg, args) + if ignoreInvalidArgs then state.warn(msg + ", the tag was ignored") else state.fail(msg) def setBoolean(argValue: String, args: List[String]) = - if argValue.equalsIgnoreCase("true") || argValue.isEmpty then update(true, args) - else if argValue.equalsIgnoreCase("false") then update(false, args) - else fail(s"$argValue is not a valid choice for boolean setting $name", args) + if argValue.equalsIgnoreCase("true") || argValue.isEmpty then update(true, argValue, args) + else if argValue.equalsIgnoreCase("false") then update(false, argValue, args) + else state.fail(s"$argValue is not a valid choice for boolean setting $name") def setString(argValue: String, args: List[String]) = choices match case Some(xs) if !xs.contains(argValue) => - fail(s"$argValue is not a valid choice for $name", args) + state.fail(s"$argValue is not a valid choice for $name") case _ => - update(argValue, args) + update(argValue, argValue, args) def setInt(argValue: String, args: List[String]) = - try - val x = argValue.toInt + argValue.toIntOption.map: intValue => choices match - case Some(r: Range) if x < r.head || r.last < x => - fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name", args) - case Some(xs) if !xs.contains(x) => - fail(s"$argValue is not a valid choice for $name", args) + case Some(r: Range) if intValue < r.head || r.last < intValue => + state.fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name") + case Some(xs) if !xs.contains(intValue) => + state.fail(s"$argValue is not a valid choice for $name") case _ => - update(x, args) - catch case _: NumberFormatException => - fail(s"$argValue is not an integer argument for $name", args) + update(intValue, argValue, args) + .getOrElse: + state.fail(s"$argValue is not an integer argument for $name") def setOutput(argValue: String, args: List[String]) = val path = Directory(argValue) val isJar = path.ext.isJar - if (!isJar && !path.isDirectory) - fail(s"'$argValue' does not exist or is not a directory or .jar file", args) - else { - val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) - update(output, args) - } + if (!isJar && !path.isDirectory) then + state.fail(s"'$argValue' does not exist or is not a directory or .jar file") + else + /* Side effect, do not change this method to evaluate eagerly */ + def output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) + update(output, argValue, args) def setVersion(argValue: String, args: List[String]) = - ScalaVersion.parse(argValue) match { - case Success(v) => update(v, args) - case Failure(ex) => fail(ex.getMessage, args) - } + ScalaVersion.parse(argValue) match + case Success(v) => update(v, argValue, args) + case Failure(ex) => state.fail(ex.getMessage) - def appendList(strings: List[String], args: List[String]) = + def appendList(strings: List[String], argValue: String, args: List[String]) = choices match case Some(valid) => strings.filterNot(valid.contains) match - case Nil => update(strings, args) + case Nil => update(strings, argValue, args) case invalid => invalidChoices(invalid) - case _ => update(strings, args) - + case _ => update(strings, argValue, args) def doSet(argRest: String) = - ((summon[ClassTag[T]], args): @unchecked) match { + ((summon[ClassTag[T]], args): @unchecked) match case (BooleanTag, _) => if sstate.wasChanged(idx) && preferPrevious then ignoreValue(args) else setBoolean(argRest, args) case (OptionTag, _) => - update(Some(propertyClass.get.getConstructor().newInstance()), args) + update(Some(propertyClass.get.getConstructor().newInstance()), "", args) case (ct, args) => val argInArgRest = !argRest.isEmpty || legacyArgs val argAfterParam = !argInArgRest && args.nonEmpty && (ct == IntTag || !args.head.startsWith("-")) @@ -214,12 +226,11 @@ object Settings: else if argAfterParam then doSetArg(args.head, args.tail) else missingArg - } def doSetArg(arg: String, argsLeft: List[String]) = summon[ClassTag[T]] match case ListTag => val strings = arg.split(",").toList - appendList(strings, argsLeft) + appendList(strings, arg, argsLeft) case StringTag => setString(arg, argsLeft) case OutputTag => @@ -241,14 +252,33 @@ object Settings: if(prefix.isEmpty) arg.dropWhile(_ != ':').drop(1) else arg.drop(prefix.get.length) if matches(arg) then - if deprecationMsg.isDefined then - warn(s"Option $name is deprecated: ${deprecationMsg.get}", args) - else - doSet(argValRest) - else - state - } - } + deprecation match + case Some(Deprecation(msg, _)) if ignoreInvalidArgs => // a special case for Xlint + state.deprecated(s"Option $name is deprecated: $msg") + case _ => doSet(argValRest) + else state + + end tryToSet + end Setting + + /** + * Class used for deprecating purposes. + * It contains all necessary information to deprecate given option. + * Scala Settings are considered deprecated when this object is present at their creation site. + * + * @param msg deprecation message that will be displayed in following format: s"Option $name is deprecated: $msg" + * @param replacedBy option that is substituting current option + */ + case class Deprecation( + msg: String, + replacedBy: Option[String] = None, + ) + + object Deprecation: + def renamed(replacement: String) = Some(Deprecation(s"Use $replacement instead.", Some(replacement))) + def removed(removedVersion: Option[String] = None) = + val msg = removedVersion.map(" in " + _).getOrElse(".") + Some(Deprecation(s"Scheduled for removal$msg", None)) object Setting: extension [T](setting: Setting[T]) @@ -269,7 +299,7 @@ object Settings: s"\n- $name${if description.isEmpty() then "" else s" :\n\t${description.replace("\n","\n\t")}"}" end Setting - class SettingGroup { + class SettingGroup: @unshared private val _allSettings = new ArrayBuffer[Setting[?]] @@ -287,11 +317,10 @@ object Settings: userSetSettings(state.sstate).foldLeft(state)(checkDependenciesOfSetting) private def checkDependenciesOfSetting(state: ArgsSummary, setting: Setting[?]) = - setting.depends.foldLeft(state) { (s, dep) => + setting.depends.foldLeft(state): (s, dep) => val (depSetting, reqValue) = dep if (depSetting.valueIn(state.sstate) == reqValue) s else s.fail(s"incomplete option ${setting.name} (requires ${depSetting.name})") - } /** Iterates over the arguments applying them to settings where applicable. * Then verifies setting dependencies are met. @@ -333,60 +362,57 @@ object Settings: def processArguments(arguments: List[String], processAll: Boolean, settingsState: SettingsState = defaultState): ArgsSummary = processArguments(ArgsSummary(settingsState, arguments, Nil, Nil), processAll, Nil) - def publish[T](settingf: Int => Setting[T]): Setting[T] = { + def publish[T](settingf: Int => Setting[T]): Setting[T] = val setting = settingf(_allSettings.length) _allSettings += setting setting - } def prependName(name: String): String = assert(!name.startsWith("-"), s"Setting $name cannot start with -") "-" + name - def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil, preferPrevious: Boolean = false): Setting[Boolean] = - publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases, preferPrevious = preferPrevious)) + def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil, preferPrevious: Boolean = false, deprecation: Option[Deprecation] = None, ignoreInvalidArgs: Boolean = false): Setting[Boolean] = + publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases, preferPrevious = preferPrevious, deprecation = deprecation, ignoreInvalidArgs = ignoreInvalidArgs)) - def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) + def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation)) - def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false): Setting[String] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs)) + def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs, deprecation = deprecation)) - def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases)) + def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, deprecation = deprecation)) - def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases)) + def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[ChoiceWithHelp[String]]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, deprecation = deprecation)) - def IntSetting(category: SettingCategory, name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = - publish(Setting(category, prependName(name), descr, default, aliases = aliases)) + def IntSetting(category: SettingCategory, name: String, descr: String, default: Int, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[Int] = + publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) - def IntChoiceSetting(category: SettingCategory, name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = - publish(Setting(category, prependName(name), descr, default, choices = Some(choices))) + def IntChoiceSetting(category: SettingCategory, name: String, descr: String, choices: Seq[Int], default: Int, deprecation: Option[Deprecation] = None): Setting[Int] = + publish(Setting(category, prependName(name), descr, default, choices = Some(choices), deprecation = deprecation)) - def MultiStringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) + def MultiStringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation)) - def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile, aliases: List[String] = Nil, preferPrevious: Boolean = false): Setting[AbstractFile] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, preferPrevious = preferPrevious)) + def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile, aliases: List[String] = Nil, preferPrevious: Boolean = false, deprecation: Option[Deprecation] = None): Setting[AbstractFile] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, preferPrevious = preferPrevious, deprecation = deprecation)) - def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(category, prependName(name), descr, default, aliases = aliases)) + def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = + publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) - def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases)) + def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation)) - def PrefixSetting(category: SettingCategory, name: String, descr: String): Setting[List[String]] = + def PrefixSetting(category: SettingCategory, name: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] = val prefix = name.takeWhile(_ != '<') - publish(Setting(category, "-" + name, descr, Nil, prefix = Some(prefix))) + publish(Setting(category, "-" + name, descr, Nil, prefix = Some(prefix), deprecation = deprecation)) - def VersionSetting(category: SettingCategory, name: String, descr: String, default: ScalaVersion = NoScalaVersion, legacyArgs: Boolean = false): Setting[ScalaVersion] = - publish(Setting(category, prependName(name), descr, default, legacyArgs = legacyArgs)) + def VersionSetting(category: SettingCategory, name: String, descr: String, default: ScalaVersion = NoScalaVersion, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[ScalaVersion] = + publish(Setting(category, prependName(name), descr, default, legacyArgs = legacyArgs, deprecation = deprecation)) - def OptionSetting[T: ClassTag](category: SettingCategory, name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = - publish(Setting(category, prependName(name), descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases)) + def OptionSetting[T: ClassTag](category: SettingCategory, name: String, descr: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[Option[T]] = + publish(Setting(category, prependName(name), descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases, deprecation = deprecation)) - def DeprecatedSetting(category: SettingCategory, name: String, descr: String, deprecationMsg: String): Setting[Boolean] = - publish(Setting(category, prependName(name), descr, false, deprecationMsg = Some(deprecationMsg))) - } + end SettingGroup end Settings diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 89ea4dd6aa50..7f925b0fc322 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -349,15 +349,15 @@ object Phases { def subPhases: List[Run.SubPhase] = Nil final def traversals: Int = if subPhases.isEmpty then 1 else subPhases.length - /** skip the phase for a Java compilation unit, may depend on -Yjava-tasty */ + /** skip the phase for a Java compilation unit, may depend on -Xjava-tasty */ def skipIfJava(using Context): Boolean = true final def isAfterLastJavaPhase(using Context): Boolean = - // With `-Yjava-tasty` nominally the final phase is expected be ExtractAPI, + // With `-Xjava-tasty` nominally the final phase is expected be ExtractAPI, // otherwise drop Java sources at the end of TyperPhase. // Checks if the last Java phase is before this phase, // which always fails if the terminal phase is before lastJavaPhase. - val lastJavaPhase = if ctx.settings.YjavaTasty.value then sbtExtractAPIPhase else typerPhase + val lastJavaPhase = if ctx.settings.XjavaTasty.value then sbtExtractAPIPhase else typerPhase lastJavaPhase <= this /** @pre `isRunnable` returns true */ diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index fdc1ba9697d0..51e6a5e6138a 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -80,12 +80,12 @@ object SymbolLoaders { // offer a setting to resolve the conflict one way or the other. // This was motivated by the desire to use YourKit probes, which // require yjp.jar at runtime. See SI-2089. - if (ctx.settings.YtermConflict.value == "package" || ctx.mode.is(Mode.Interactive)) { + if (ctx.settings.XtermConflict.value == "package" || ctx.mode.is(Mode.Interactive)) { report.warning( s"Resolving package/object name conflict in favor of package ${preExisting.fullName}. The object will be inaccessible.") owner.asClass.delete(preExisting) } - else if (ctx.settings.YtermConflict.value == "object") { + else if (ctx.settings.XtermConflict.value == "object") { report.warning( s"Resolving package/object name conflict in favor of object ${preExisting.fullName}. The package will be inaccessible.") return NoSymbol @@ -470,7 +470,7 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { new ClassfileTastyUUIDParser(classfile)(ctx).checkTastyUUID(tastyUUID) else // This will be the case in any of our tests that compile with `-Youtput-only-tasty`, or when - // tasty file compiled by `-Yearly-tasty-output-write` comes from an early output jar. + // tasty file compiled by `-Xearly-tasty-output-write` comes from an early output jar. report.inform(s"No classfiles found for $tastyFile when checking TASTy UUID") private def checkBeTastyUUID(tastyFile: AbstractFile, tastyBytes: Array[Byte])(using Context): Unit = diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 7f6f13585efd..0020efa5018d 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -84,7 +84,7 @@ object Symbols extends SymUtils { ctx.settings.YretainTrees.value || denot.owner.isTerm || // no risk of leaking memory after a run for these denot.isOneOf(InlineOrProxy) || // need to keep inline info - ctx.settings.YcheckInit.value || // initialization check + ctx.settings.WcheckInit.value || // initialization check ctx.settings.YcheckInitGlobal.value /** The last denotation of this symbol */ diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index c71c20a38eb9..5b19fe0e7bdd 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -102,7 +102,7 @@ extends TypeError: em"""Recursion limit exceeded. |Maybe there is an illegal cyclic reference? |If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. - |For the unprocessed stack trace, compile with -Yno-decode-stacktraces. + |For the unprocessed stack trace, compile with -Xno-decode-stacktraces. |A recurring operation is (inner to outer): |${opsString(mostCommon).stripMargin}""" @@ -122,7 +122,7 @@ object handleRecursive: e def apply(op: String, details: => String, exc: Throwable, weight: Int = 1)(using Context): Nothing = - if ctx.settings.YnoDecodeStacktraces.value then + if ctx.settings.XnoDecodeStacktraces.value then throw exc else exc match case _: RecursionOverflow => diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 4b2b218edb58..186e039c4d74 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -385,7 +385,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { } catch case ex: Throwable => - if !ctx.settings.YnoDecodeStacktraces.value + if !ctx.settings.XnoDecodeStacktraces.value && handleRecursive.underlyingStackOverflowOrNull(ex) != null then throw StackSizeExceeded(mdef) else @@ -924,7 +924,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { em"""Recursion limit exceeded while pickling ${ex.mdef} |in ${ex.mdef.symbol.showLocated}. |You could try to increase the stacksize using the -Xss JVM option. - |For the unprocessed stack trace, compile with -Yno-decode-stacktraces.""", + |For the unprocessed stack trace, compile with -Xno-decode-stacktraces.""", ex.mdef.srcPos) def missing = forwardSymRefs.keysIterator diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 19c22c6bb3d0..04d19f2f8821 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -986,8 +986,8 @@ class TreeUnpickler(reader: TastyReader, if !sym.isType && !sym.is(ParamAccessor) then sym.info = ta.avoidPrivateLeaks(sym) - if (ctx.settings.YreadComments.value) { - assert(ctx.docCtx.isDefined, "`-Yread-docs` enabled, but no `docCtx` is set.") + if (ctx.settings.XreadComments.value) { + assert(ctx.docCtx.isDefined, "`-Xread-docs` enabled, but no `docCtx` is set.") commentUnpicklerOpt.foreach { commentUnpickler => val comment = commentUnpickler.commentAt(start) ctx.docCtx.get.addDocstring(tree.symbol, comment) diff --git a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala index c1bd6b6778fd..18c5ceb5f346 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala @@ -19,7 +19,7 @@ class IDEDecompilerDriver(val settings: List[String]) extends dotc.Driver { private val myInitCtx: Context = { val rootCtx = initCtx.fresh.addMode(Mode.Interactive | Mode.ReadPositions) - rootCtx.setSetting(rootCtx.settings.YreadComments, true) + rootCtx.setSetting(rootCtx.settings.XreadComments, true) rootCtx.setSetting(rootCtx.settings.YretainTrees, true) rootCtx.setSetting(rootCtx.settings.fromTasty, true) val ctx = setup(settings.toArray :+ "dummy.scala", rootCtx).get._2 diff --git a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala index 74010b3f64d1..f13bcdf00b34 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala @@ -48,11 +48,11 @@ class ReadTasty extends Phase { if (cls.rootTree.isEmpty) None else { val attributes = unpickler.tastyAttributes - if attributes.isJava && !ctx.settings.YjavaTasty.value then - // filter out Java compilation units if -Yjava-tasty is not set + if attributes.isJava && !ctx.settings.XjavaTasty.value then + // filter out Java compilation units if -Xjava-tasty is not set None - else if attributes.isOutline && !ctx.settings.YallowOutlineFromTasty.value then - cannotUnpickle("it contains outline signatures and -Yallow-outline-from-tasty is not set.") + else if attributes.isOutline && !ctx.settings.XallowOutlineFromTasty.value then + cannotUnpickle("it contains outline signatures and -Xallow-outline-from-tasty is not set.") else val unit = CompilationUnit(cls, cls.rootTree, forceTrees = true) unit.pickled += (cls -> (() => unpickler.unpickler.bytes)) diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala index 8f42c62cb3b0..673874ae2769 100644 --- a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala +++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala @@ -33,8 +33,8 @@ class InteractiveDriver(val settings: List[String]) extends Driver { private val myInitCtx: Context = { val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions).addMode(Mode.Interactive) rootCtx.setSetting(rootCtx.settings.YretainTrees, true) - rootCtx.setSetting(rootCtx.settings.YcookComments, true) - rootCtx.setSetting(rootCtx.settings.YreadComments, true) + rootCtx.setSetting(rootCtx.settings.XcookComments, true) + rootCtx.setSetting(rootCtx.settings.XreadComments, true) val ctx = setup(settings.toArray, rootCtx) match case Some((_, ctx)) => ctx case None => rootCtx diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 2f3daa79fb07..916fd2b30ed7 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1614,7 +1614,7 @@ object Parsers { imods &~= Given syntaxError(em"context function types require at least one parameter", paramSpan) FunctionWithMods(params, resultType, imods, erasedArgs.toList) - else if !ctx.settings.YkindProjector.isDefault then + else if !ctx.settings.XkindProjector.isDefault then val (newParams :+ newResultType, tparams) = replaceKindProjectorPlaceholders(params :+ resultType): @unchecked lambdaAbstract(tparams, Function(newParams, newResultType)) else @@ -1741,7 +1741,7 @@ object Parsers { val isVarianceAnnotated = name.startsWith("+") || name.startsWith("-") // We remove the variance marker from the name without passing along the specified variance at all // The real variance will be inferred at a later stage but may contradict the variance specified, - // This is ok, because `-Ykind-projector` is for cross-compiling existing Scala 2 code, not for writing new code, + // This is ok, because `-Xkind-projector` is for cross-compiling existing Scala 2 code, not for writing new code, // we may assume that variance annotations have already been checked by the Scala 2 compiler. val unannotatedName = if (isVarianceAnnotated) name.mapLast(_.drop(1)) else name TypeDef(unannotatedName, WildcardTypeBoundsTree()).withFlags(Param) @@ -1758,7 +1758,7 @@ object Parsers { Ident(name) } - val uscores = ctx.settings.YkindProjector.value == "underscores" + val uscores = ctx.settings.XkindProjector.value == "underscores" val newParams = params.mapConserve { case param @ Ident(tpnme.raw.STAR | tpnme.raw.MINUS_STAR | tpnme.raw.PLUS_STAR) => addParam() case param @ Ident(tpnme.USCOREkw | tpnme.raw.MINUS_USCORE | tpnme.raw.PLUS_USCORE) if uscores => addParam() @@ -1944,7 +1944,7 @@ object Parsers { if isSimpleLiteral then SingletonTypeTree(simpleLiteral()) else if in.token == USCORE then - if ctx.settings.YkindProjector.value == "underscores" then + if ctx.settings.XkindProjector.value == "underscores" then val start = in.skipToken() Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start)) else @@ -1960,7 +1960,7 @@ object Parsers { typeBounds().withSpan(Span(start, in.lastOffset, start)) // Allow symbols -_ and +_ through for compatibility with code written using kind-projector in Scala 3 underscore mode. // While these signify variant type parameters in Scala 2 + kind-projector, we ignore their variance markers since variance is inferred. - else if (isIdent(nme.MINUS) || isIdent(nme.PLUS)) && in.lookahead.token == USCORE && ctx.settings.YkindProjector.value == "underscores" then + else if (isIdent(nme.MINUS) || isIdent(nme.PLUS)) && in.lookahead.token == USCORE && ctx.settings.XkindProjector.value == "underscores" then val identName = in.name.toTypeName ++ nme.USCOREkw val start = in.skipToken() in.nextToken() @@ -2012,7 +2012,7 @@ object Parsers { val applied = rejectWildcardType(t) val args = typeArgs(namedOK = false, wildOK = true) - if (!ctx.settings.YkindProjector.isDefault) { + if (!ctx.settings.XkindProjector.isDefault) { def fail(): Tree = { syntaxError( em"λ requires a single argument of the form X => ... or (X, Y) => ...", @@ -2044,7 +2044,7 @@ object Parsers { } }) case _ => - if (!ctx.settings.YkindProjector.isDefault) { + if (!ctx.settings.XkindProjector.isDefault) { t match { case Tuple(params) => val (newParams, tparams) = replaceKindProjectorPlaceholders(params) diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 3f9e8ca6532e..831d31d6fa6e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -174,7 +174,7 @@ object Scanners { } class Scanner(source: SourceFile, override val startFrom: Offset = 0, profile: Profile = NoProfile, allowIndent: Boolean = true)(using Context) extends ScannerCommon(source) { - val keepComments = !ctx.settings.YdropComments.value + val keepComments = !ctx.settings.XdropComments.value /** A switch whether operators at the start of lines can be infix operators */ private[Scanners] var allowLeadingInfixOperators = true diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 10b0023992fe..1d8ca5f208fa 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -146,7 +146,7 @@ object report: // Should only be called from Run#enrichErrorMessage. def enrichErrorMessage(errorMessage: String)(using Context): String = - if ctx.settings.YnoEnrichErrorMessages.value then errorMessage + if ctx.settings.XnoEnrichErrorMessages.value then errorMessage else try enrichErrorMessage1(errorMessage) catch case _: Throwable => errorMessage // don't introduce new errors trying to report errors, so swallow exceptions @@ -171,7 +171,7 @@ object report: | An unhandled exception was thrown in the compiler. | Please file a crash report here: | https://github.com/scala/scala3/issues/new/choose - | For non-enriched exceptions, compile with -Yno-enrich-error-messages. + | For non-enriched exceptions, compile with -Xno-enrich-error-messages. | |$info1 |""".stripMargin diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 9bbac381efab..75f04908ac55 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -52,13 +52,13 @@ class ExtractAPI extends Phase { override def description: String = ExtractAPI.description override def isRunnable(using Context): Boolean = { - super.isRunnable && (ctx.runZincPhases || ctx.settings.YjavaTasty.value) + super.isRunnable && (ctx.runZincPhases || ctx.settings.XjavaTasty.value) } // Check no needed. Does not transform trees override def isCheckable: Boolean = false - // when `-Yjava-tasty` is set we actually want to run this phase on Java sources + // when `-Xjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false // SuperAccessors need to be part of the API (see the scripted test @@ -79,8 +79,8 @@ class ExtractAPI extends Phase { units // still run the phase for the side effects (writing TASTy files to -Yearly-tasty-output) if doZincCallback then ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) - if ctx.settings.YjavaTasty.value then - units0.filterNot(_.typedAsJava) // remove java sources, this is the terminal phase when `-Yjava-tasty` is set + if ctx.settings.XjavaTasty.value then + units0.filterNot(_.typedAsJava) // remove java sources, this is the terminal phase when `-Xjava-tasty` is set else units0 end runOn diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 352636f681c3..dfff5971889e 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -64,7 +64,7 @@ class ExtractDependencies extends Phase { // Check no needed. Does not transform trees override def isCheckable: Boolean = false - // when `-Yjava-tasty` is set we actually want to run this phase on Java sources + // when `-Xjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false // This phase should be run directly after `Frontend`, if it is run after diff --git a/compiler/src/dotty/tools/dotc/transform/CookComments.scala b/compiler/src/dotty/tools/dotc/transform/CookComments.scala index d443e31fdc39..5dec0fff1e39 100644 --- a/compiler/src/dotty/tools/dotc/transform/CookComments.scala +++ b/compiler/src/dotty/tools/dotc/transform/CookComments.scala @@ -12,7 +12,7 @@ class CookComments extends MegaPhase.MiniPhase { override def description: String = CookComments.description override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree = { - if (ctx.settings.YcookComments.value && tree.isClassDef) { + if (ctx.settings.XcookComments.value && tree.isClassDef) { val cls = tree.symbol val cookingCtx = ctx.localContext(tree, cls).setNewScope val template = tree.rhs.asInstanceOf[tpd.Template] diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index c75ac9982317..217c843c4e50 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -445,7 +445,7 @@ object GenericSignatures { } } - private def needsJavaSig(tp: Type, throwsArgs: List[Type])(using Context): Boolean = !ctx.settings.YnoGenericSig.value && { + private def needsJavaSig(tp: Type, throwsArgs: List[Type])(using Context): Boolean = !ctx.settings.XnoGenericSig.value && { def needs(tp: Type) = (new NeedsSigCollector).apply(false, tp) needs(tp) || throwsArgs.exists(needs) } diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 8bb396ca4081..6c3dcc669877 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -131,7 +131,7 @@ object Pickler { * that API and Dependency phases are complete. */ def init(using Context, ExecutionContext): AsyncTastyHolder = - AsyncTastyHolder(ctx.settings.YearlyTastyOutput.value, ctx.incCallback) + AsyncTastyHolder(ctx.settings.XearlyTastyOutput.value, ctx.incCallback) /** Asynchronously writes TASTy files to the destination -Yearly-tasty-output. @@ -198,7 +198,7 @@ class Pickler extends Phase { // we do not want to pickle `.betasty` if do not plan to actually create the // betasty file (as signified by the -Ybest-effort option) - // when `-Yjava-tasty` is set we actually want to run this phase on Java sources + // when `-Xjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false private def output(name: String, msg: String) = { @@ -286,8 +286,8 @@ class Pickler extends Phase { util.SourceFile.relativePath(unit.source, reference) val isJavaAttr = unit.isJava // we must always set JAVAattr when pickling Java sources if isJavaAttr then - // assert that Java sources didn't reach Pickler without `-Yjava-tasty`. - assert(ctx.settings.YjavaTasty.value, "unexpected Java source file without -Yjava-tasty") + // assert that Java sources didn't reach Pickler without `-Xjava-tasty`. + assert(ctx.settings.XjavaTasty.value, "unexpected Java source file without -Xjava-tasty") val isOutline = isJavaAttr // TODO: later we may want outline for Scala sources too val attributes = Attributes( sourceFile = sourceRelativePath, @@ -326,7 +326,7 @@ class Pickler extends Phase { unit.source, tree :: Nil, positionWarnings, scratch.positionBuffer, scratch.pickledIndices) - if !ctx.settings.YdropComments.value then + if !ctx.settings.XdropComments.value then CommentPickler.pickleComments( pickler, treePkl.buf.addrOfTree, treePkl.docString, tree, scratch.commentBuffer) @@ -403,7 +403,7 @@ class Pickler extends Phase { runPhase(_()) if ctx.settings.YtestPickler.value then val ctx2 = ctx.fresh - .setSetting(ctx.settings.YreadComments, true) + .setSetting(ctx.settings.XreadComments, true) .setSetting(ctx.settings.YshowPrintErrors, true) testUnpickler( using ctx2 diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index 1a4141f3d495..9e78bd5474a3 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -29,7 +29,7 @@ class Checker extends Phase: override val runsAfter = Set(Pickler.name) override def isEnabled(using Context): Boolean = - super.isEnabled && (ctx.settings.YcheckInit.value || ctx.settings.YcheckInitGlobal.value) + super.isEnabled && (ctx.settings.WcheckInit.value || ctx.settings.YcheckInitGlobal.value) def traverse(traverser: InitTreeTraverser)(using Context): Boolean = monitor(phaseName): val unit = ctx.compilationUnit @@ -50,7 +50,7 @@ class Checker extends Phase: cancellable { val classes = traverser.getClasses() - if ctx.settings.YcheckInit.value then + if ctx.settings.WcheckInit.value then Semantic.checkClasses(classes)(using checkCtx) if ctx.settings.YcheckInitGlobal.value then diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala index f7afc7a7e0a7..0c63f5b4ecb1 100644 --- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala @@ -59,7 +59,7 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { } protected def discardAfterTyper(unit: CompilationUnit)(using Context): Boolean = - (unit.isJava && !ctx.settings.YjavaTasty.value) || unit.suspended + (unit.isJava && !ctx.settings.XjavaTasty.value) || unit.suspended override val subPhases: List[SubPhase] = List( SubPhase("indexing"), SubPhase("typechecking"), SubPhase("checkingJava")) diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index 87825b025734..b6338082c696 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -150,7 +150,7 @@ object FileWriters { object ReadOnlyContext: def readSettings(using ctx: Context): ReadOnlySettings = new: - val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value + val jarCompressionLevel = ctx.settings.XjarCompressionLevel.value val debug = ctx.settings.Ydebug.value def readRun(using ctx: Context): ReadOnlyRun = new: diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index f8bba2f59fe1..121d00bcf8b3 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -82,8 +82,8 @@ class ReplDriver(settings: Array[String], /** Create a fresh and initialized context with IDE mode enabled */ private def initialCtx(settings: List[String]) = { val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions | Mode.Interactive) - rootCtx.setSetting(rootCtx.settings.YcookComments, true) - rootCtx.setSetting(rootCtx.settings.YreadComments, true) + rootCtx.setSetting(rootCtx.settings.XcookComments, true) + rootCtx.setSetting(rootCtx.settings.XreadComments, true) setupRootCtx(this.settings ++ settings, rootCtx) } diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index eb300a0512b5..517adff17991 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -43,7 +43,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler import tpd.* private val xCheckMacro: Boolean = ctx.settings.XcheckMacros.value - private val yDebugMacro: Boolean = ctx.settings.YdebugMacros.value + private val yDebugMacro: Boolean = ctx.settings.XdebugMacros.value extension [T](self: scala.quoted.Expr[T]) def show: String = diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 2084b11d0b97..de3bd02bba6e 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -31,8 +31,8 @@ class CompilationTests { @Test def pos: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePos") var tests = List( - compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Wunused:all", "-Wshadow:private-shadow", "-Wshadow:type-parameter-shadow"), FileFilter.include(TestSources.posLintingAllowlist)), - compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init"), FileFilter.exclude(TestSources.posLintingAllowlist)), + compileFilesInDir("tests/pos", defaultOptions.and("-Wsafe-init", "-Wunused:all", "-Wshadow:private-shadow", "-Wshadow:type-parameter-shadow"), FileFilter.include(TestSources.posLintingAllowlist)), + compileFilesInDir("tests/pos", defaultOptions.and("-Wsafe-init"), FileFilter.exclude(TestSources.posLintingAllowlist)), compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/pos-special/sourcepath/outer", defaultOptions.and("-sourcepath", "tests/pos-special/sourcepath")), compileFile("tests/pos-special/sourcepath/outer/nested/Test4.scala", defaultOptions.and("-sourcepath", "tests/pos-special/sourcepath")), @@ -40,9 +40,9 @@ class CompilationTests { compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), compileFile("tests/pos-special/utf8encoded.scala", defaultOptions.and("-encoding", "UTF8")), compileFile("tests/pos-special/utf16encoded.scala", defaultOptions.and("-encoding", "UTF16")), - compileDir("tests/pos-special/i18589", defaultOptions.and("-Ysafe-init").without("-Ycheck:all")), + compileDir("tests/pos-special/i18589", defaultOptions.and("-Wsafe-init").without("-Ycheck:all")), // Run tests for legacy lazy vals - compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), + compileFilesInDir("tests/pos", defaultOptions.and("-Wsafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), ) ::: ( // TODO create a folder for capture checking tests with the stdlib, or use tests/pos-custom-args/captures under this mode? @@ -51,7 +51,7 @@ class CompilationTests { ) if scala.util.Properties.isJavaAtLeast("16") then - tests ::= compileFilesInDir("tests/pos-java16+", defaultOptions.and("-Ysafe-init")) + tests ::= compileFilesInDir("tests/pos-java16+", defaultOptions.and("-Wsafe-init")) aggregateTests(tests*).checkCompile() } @@ -157,11 +157,11 @@ class CompilationTests { @Test def runAll: Unit = { implicit val testGroup: TestGroup = TestGroup("runAll") aggregateTests( - compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init")), + compileFilesInDir("tests/run", defaultOptions.and("-Wsafe-init")), compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking")), // Run tests for legacy lazy vals. - compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.runLazyValsAllowlist)), + compileFilesInDir("tests/run", defaultOptions.and("-Wsafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.runLazyValsAllowlist)), ).checkRuns() } @@ -225,9 +225,9 @@ class CompilationTests { // initialization tests @Test def checkInit: Unit = { implicit val testGroup: TestGroup = TestGroup("checkInit") - val options = defaultOptions.and("-Ysafe-init", "-Xfatal-warnings") + val options = defaultOptions.and("-Wsafe-init", "-Xfatal-warnings") compileFilesInDir("tests/init/neg", options).checkExpectedErrors() - compileFilesInDir("tests/init/warn", defaultOptions.and("-Ysafe-init")).checkWarnings() + compileFilesInDir("tests/init/warn", defaultOptions.and("-Wsafe-init")).checkWarnings() compileFilesInDir("tests/init/pos", options).checkCompile() compileFilesInDir("tests/init/crash", options.without("-Xfatal-warnings")).checkCompile() // The regression test for i12128 has some atypical classpath requirements. diff --git a/compiler/test/dotty/tools/dotc/SettingsTests.scala b/compiler/test/dotty/tools/dotc/SettingsTests.scala index 8125a80f29f8..301dc10ab54e 100644 --- a/compiler/test/dotty/tools/dotc/SettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/SettingsTests.scala @@ -7,14 +7,19 @@ import reporting.StoreReporter import vulpix.TestConfiguration import core.Contexts.{Context, ContextBase} -import dotty.tools.dotc.config.Settings._ -import dotty.tools.dotc.config.ScalaSettingCategories._ +import dotty.tools.dotc.config.Settings.* +import dotty.tools.dotc.config.Settings.Setting.ChoiceWithHelp +import dotty.tools.dotc.config.ScalaSettingCategories.* import dotty.tools.vulpix.TestConfiguration.mkClasspath +import dotty.tools.io.PlainDirectory +import dotty.tools.io.Directory +import dotty.tools.dotc.config.ScalaVersion import java.nio.file._ import org.junit.Test import org.junit.Assert._ +import scala.util.Using class SettingsTests { @@ -199,6 +204,125 @@ class SettingsTests { assertEquals(List("Flag -qux set repeatedly"), summary.warnings) } + @Test def `Output setting is overriding existing jar`: Unit = + val result = Using.resource(Files.createTempFile("myfile", ".jar").nn){ file => + object Settings extends SettingGroup: + val defaultDir = new PlainDirectory(Directory(".")) + val testOutput = OutputSetting(RootSetting, "testOutput", "testOutput", "", defaultDir) + + import Settings._ + + Files.write(file, "test".getBytes()) + val fileStateBefore = String(Files.readAllBytes(file)) + + val args = List(s"-testOutput:${file.toString}") + val summary = processArguments(args, processAll = true) + + assertNotEquals(fileStateBefore, String(Files.readAllBytes(file)), "Jar should have been overriden") + + }(Files.deleteIfExists(_)) + + @Test def `Output setting is respecting previous setting`: Unit = + val result = Using.resources( + Files.createTempFile("myfile", ".jar").nn, Files.createTempFile("myfile2", ".jar").nn + ){ (file1, file2) => + object Settings extends SettingGroup: + val defaultDir = new PlainDirectory(Directory(".")) + val testOutput = OutputSetting(RootSetting, "testOutput", "testOutput", "", defaultDir, preferPrevious = true) + + import Settings._ + + Files.write(file1, "test1".getBytes()) + Files.write(file2, "test2".getBytes()) + + val file1StateBefore = String(Files.readAllBytes(file1)) + val file2StateBefore = String(Files.readAllBytes(file2)) + + val creationTime = Files.getLastModifiedTime(file1) + val args = List(s"-testOutput:${file1.toString}", s"-testOutput:${file2.toString}") + val summary = processArguments(args, processAll = true) + + // The output is a new filesystem without information of original path + // We can't check the `testOutput.value` as in other tests. + assertNotEquals(file1StateBefore, String(Files.readAllBytes(file1))) + assertEquals(file2StateBefore, String(Files.readAllBytes(file2))) + + }(Files.deleteIfExists(_), Files.deleteIfExists(_)) + + @Test def `Output side effect is not present when setting is deprecated`: Unit = + val result = Using.resource(Files.createTempFile("myfile", ".jar").nn){ file => + object Settings extends SettingGroup: + val defaultDir = new PlainDirectory(Directory(".")) + val testOutput = OutputSetting(RootSetting, "testOutput", "testOutput", "", defaultDir, preferPrevious = true, deprecation = Deprecation.renamed("XtestOutput")) + + import Settings._ + + Files.write(file, "test".getBytes()) + val fileStateBefore = String(Files.readAllBytes(file)) + + val args = List(s"-testOutput:${file.toString}") + val summary = processArguments(args, processAll = true) + + assertEquals(fileStateBefore, String(Files.readAllBytes(file))) + + }(Files.deleteIfExists(_)) + + @Test def `Arguments of flags are correctly parsed with both ":" and " " separating`: Unit = + object Settings extends SettingGroup: + val booleanSetting = BooleanSetting(RootSetting, "booleanSetting", "booleanSetting", false) + val stringSetting = StringSetting(RootSetting, "stringSetting", "stringSetting", "", "test") + val choiceSetting = ChoiceSetting(RootSetting, "choiceSetting", "choiceSetting", "", List("a", "b"), "a") + val multiChoiceSetting= MultiChoiceSetting(RootSetting, "multiChoiceSetting", "multiChoiceSetting", "", List("a", "b"), List()) + val multiChoiceHelpSetting= MultiChoiceHelpSetting(RootSetting, "multiChoiceHelpSetting", "multiChoiceHelpSetting", "", List(ChoiceWithHelp("a", "a"), ChoiceWithHelp("b", "b")), List()) + val intSetting = IntSetting(RootSetting, "intSetting", "intSetting", 0) + val intChoiceSetting = IntChoiceSetting(RootSetting, "intChoiceSetting", "intChoiceSetting", List(1,2,3), 1) + val multiStringSetting = MultiStringSetting(RootSetting, "multiStringSetting", "multiStringSetting", "", List("a", "b"), List()) + val outputSetting = OutputSetting(RootSetting, "outputSetting", "outputSetting", "", new PlainDirectory(Directory("."))) + val pathSetting = PathSetting(RootSetting, "pathSetting", "pathSetting", ".") + val phasesSetting = PhasesSetting(RootSetting, "phasesSetting", "phasesSetting", "all") + val versionSetting= VersionSetting(RootSetting, "versionSetting", "versionSetting") + + import Settings._ + Using.resource(Files.createTempDirectory("testDir")) { dir => + + val args = List( + List("-booleanSetting", "true"), + List("-stringSetting", "newTest"), + List("-choiceSetting", "b"), + List("-multiChoiceSetting", "a,b"), + List("-multiChoiceHelpSetting", "a,b"), + List("-intSetting", "42"), + List("-intChoiceSetting", "2"), + List("-multiStringSetting", "a,b"), + List("-outputSetting", dir.toString), + List("-pathSetting", dir.toString), + List("-phasesSetting", "parser,typer"), + List("-versionSetting", "1.0.0"), + ) + + def testValues(summary: ArgsSummary) = + withProcessedArgs(summary) { + assertEquals(true, booleanSetting.value) + assertEquals("newTest", stringSetting.value) + assertEquals("b", choiceSetting.value) + assertEquals(List("a", "b"), multiChoiceSetting.value) + assertEquals(List("a", "b"), multiChoiceHelpSetting.value) + assertEquals(42, intSetting.value) + assertEquals(2, intChoiceSetting.value) + assertEquals(List("a", "b"), multiStringSetting.value) + assertEquals(dir.toString, outputSetting.value.path) + assertEquals(dir.toString, pathSetting.value) + assertEquals(List("parser", "typer"), phasesSetting.value) + assertEquals(ScalaVersion.parse("1.0.0").get, versionSetting.value) + } + + val summaryColon = processArguments(args.map(_.mkString(":")), processAll = true) + val summaryWhitespace = processArguments(args.flatten, processAll = true) + testValues(summary = summaryColon) + testValues(summary = summaryWhitespace) + + }(Files.deleteIfExists(_)) + private def withProcessedArgs(summary: ArgsSummary)(f: SettingsState ?=> Unit) = f(using summary.sstate) extension [T](setting: Setting[T]) diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index b2de0d6423df..3dc4f4e4ec5e 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -13,6 +13,8 @@ import java.net.URI import java.nio.file.Files import scala.util.Using +import scala.annotation.nowarn + class ScalaSettingsTests: @Test def `A setting with aliases is accepted`: Unit = @@ -88,6 +90,105 @@ class ScalaSettingsTests: val nowr = new Diagnostic.Warning("This is a problem.".toMessage, util.NoSourcePosition) assertEquals(Action.Silent, sut.action(nowr)) + @nowarn("cat=deprecation") + @Test def `Deprecated options are correctly mapped to their replacements`: Unit = + def createTestCase(oldSetting: Setting[_], newSetting: Setting[_], value: String = "") = + s"${oldSetting.name}$value" -> newSetting + + val settings = ScalaSettings + List( + createTestCase(settings.YtermConflict , settings.XtermConflict, ":package"), + createTestCase(settings.YnoGenericSig , settings.XnoGenericSig), + createTestCase(settings.Ydumpclasses , settings.Xdumpclasses,":./"), + createTestCase(settings.YjarCompressionLevel , settings.XjarCompressionLevel,":0"), + createTestCase(settings.YkindProjector , settings.XkindProjector, ":underscores"), + createTestCase(settings.YdropComments , settings.XdropComments), + createTestCase(settings.YcookComments , settings.XcookComments), + createTestCase(settings.YreadComments , settings.XreadComments), + createTestCase(settings.YnoDecodeStacktraces , settings.XnoDecodeStacktraces), + createTestCase(settings.YnoEnrichErrorMessages, settings.XnoEnrichErrorMessages), + createTestCase(settings.YdebugMacros , settings.XdebugMacros), + // createTestCase(settings.YjavaTasty , settings.XjavaTasty), + // createTestCase(settings.YearlyTastyOutput , settings.XearlyTastyOutput, ":./"), + // createTestCase(settings.YallowOutlineFromTasty, settings.XallowOutlineFromTasty), + createTestCase(settings.YcheckInit , settings.WcheckInit), + // createTestCase(settings.Xlint , settings.Wshadow, ":all"), // this setting is not going to be mapped to replacement. Read more in the commit message + ).map: (deprecatedArgument, newSetting) => + val args = List(deprecatedArgument) + val argSummary = ArgsSummary(settings.defaultState, args, errors = Nil, warnings = Nil) + val conf = settings.processArguments(argSummary, processAll = true, skipped = Nil) + assert(!newSetting.isDefaultIn(conf.sstate), s"Setting $deprecatedArgument was not forwarded to ${newSetting.name}") + + @nowarn("cat=deprecation") + @Test def `Deprecated options should not be set if old option was incorrect`: Unit = + def createTestCase(oldSetting: Setting[_], newSetting: Setting[_], value: String = ":illegal") = + s"${oldSetting.name}:$value" -> newSetting + + val settings = ScalaSettings + List( + createTestCase(settings.YtermConflict , settings.XtermConflict), + createTestCase(settings.YnoGenericSig , settings.XnoGenericSig), + createTestCase(settings.Ydumpclasses , settings.Xdumpclasses, ""), + createTestCase(settings.YjarCompressionLevel , settings.XjarCompressionLevel), + createTestCase(settings.YkindProjector , settings.XkindProjector), + createTestCase(settings.YdropComments , settings.XdropComments), + createTestCase(settings.YcookComments , settings.XcookComments), + createTestCase(settings.YreadComments , settings.XreadComments), + createTestCase(settings.YnoDecodeStacktraces , settings.XnoDecodeStacktraces), + createTestCase(settings.YnoEnrichErrorMessages, settings.XnoEnrichErrorMessages), + createTestCase(settings.YdebugMacros , settings.XdebugMacros), + // createTestCase(settings.YjavaTasty , settings.XjavaTasty), + // createTestCase(settings.YearlyTastyOutput , settings.XearlyTastyOutput), + // createTestCase(settings.YallowOutlineFromTasty, settings.XallowOutlineFromTasty), + createTestCase(settings.YcheckInit , settings.WcheckInit), + createTestCase(settings.Xlint , settings.Wshadow), + ).map: (deprecatedArgument, newSetting) => + val args = List(deprecatedArgument) + val argSummary = ArgsSummary(settings.defaultState, args, errors = Nil, warnings = Nil) + val conf = settings.processArguments(argSummary, processAll = true, skipped = Nil) + assert(newSetting.isDefaultIn(conf.sstate), s"Setting $deprecatedArgument was forwarded to ${newSetting.name}, when it should be ignored because first option was erroreus") + + // -Xlint was handled in a special way when it was added, making in hard to deprecate it. + // For now on we will retain old behavior, in next version we will emit deprecation warning. + // It is also scheduled for removal in future versions. + @Test def `Make Xlint to ignore invalid args`: Unit = + val settings = ScalaSettings + val args = List("-Xlint:-unused,_") + val argSummary = ArgsSummary(settings.defaultState, args, errors = Nil, warnings = Nil) + val conf = settings.processArguments(argSummary, processAll = true, skipped = Nil) + assert(conf.warnings.contains("Option -Xlint is deprecated: Use -Wshadow to enable shadowing lints. Scheduled for removal.")) + assert(conf.errors.isEmpty) + + @nowarn("cat=deprecation") + @Test def `Deprecated options aliases are correctly mapped to their replacements`: Unit = + def createTestCase(oldSetting: Setting[_], newSetting: Setting[_], value: String = "") = + oldSetting.aliases.map: alias => + s"$alias$value" -> newSetting + + val settings = ScalaSettings + List( + createTestCase(settings.YtermConflict , settings.XtermConflict, ":package"), + createTestCase(settings.YnoGenericSig , settings.XnoGenericSig), + createTestCase(settings.Ydumpclasses , settings.Xdumpclasses,":./"), + createTestCase(settings.YjarCompressionLevel , settings.XjarCompressionLevel,":0"), + createTestCase(settings.YkindProjector , settings.XkindProjector, ":underscores"), + createTestCase(settings.YdropComments , settings.XdropComments), + createTestCase(settings.YcookComments , settings.XcookComments), + createTestCase(settings.YreadComments , settings.XreadComments), + createTestCase(settings.YnoDecodeStacktraces , settings.XnoDecodeStacktraces), + createTestCase(settings.YnoEnrichErrorMessages, settings.XnoEnrichErrorMessages), + createTestCase(settings.YdebugMacros , settings.XdebugMacros), + // createTestCase(settings.YjavaTasty , settings.XjavaTasty), + // createTestCase(settings.YearlyTastyOutput , settings.XearlyTastyOutput, ":./"), + // createTestCase(settings.YallowOutlineFromTasty, settings.XallowOutlineFromTasty), + createTestCase(settings.YcheckInit , settings.WcheckInit), + // createTestCase(settings.Xlint , settings.Wshadow, ":all"), // this setting is not going to be mapped to replacement. Read more in the commit message + ).flatten.map: (deprecatedArgument, newSetting) => + val args = List(deprecatedArgument) + val argSummary = ArgsSummary(settings.defaultState, args, errors = Nil, warnings = Nil) + val conf = settings.processArguments(argSummary, processAll = true, skipped = Nil) + assert(!newSetting.isDefaultIn(conf.sstate), s"Setting alias $deprecatedArgument was not forwarded to ${newSetting.name}") + @Test def `i18367 rightmost WConf flags take precedence over flags to the left`: Unit = import reporting.{Action, Diagnostic} val sets = ScalaSettings diff --git a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala index 11406070ce7a..072944da1349 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala @@ -110,7 +110,7 @@ class CommentPicklingTest { private class UnpicklingDriver extends Driver { override def initCtx = val ctx = super.initCtx.fresh - ctx.setSetting(ctx.settings.YreadComments, true) + ctx.setSetting(ctx.settings.XreadComments, true) ctx def unpickle[T](args: Array[String], files: List[File])(fn: (List[tpd.Tree], Context) => T): T = { diff --git a/docs/_docs/reference/experimental/explicit-nulls.md b/docs/_docs/reference/experimental/explicit-nulls.md index bcbea34dd18d..50339c3fa1e4 100644 --- a/docs/_docs/reference/experimental/explicit-nulls.md +++ b/docs/_docs/reference/experimental/explicit-nulls.md @@ -85,7 +85,7 @@ val c = new C() // c.f == "field is null" ``` -The unsoundness above can be caught by the compiler with the option `-Ysafe-init`. +The unsoundness above can be caught by the compiler with the option `-Wsafe-init`. More details can be found in [safe initialization](../other-new-features/safe-initialization.md). ## Equality diff --git a/docs/_docs/reference/other-new-features/safe-initialization.md b/docs/_docs/reference/other-new-features/safe-initialization.md index 757038eac786..503dbc7bde47 100644 --- a/docs/_docs/reference/other-new-features/safe-initialization.md +++ b/docs/_docs/reference/other-new-features/safe-initialization.md @@ -4,7 +4,7 @@ title: "Safe Initialization" nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/safe-initialization.html --- -Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Ysafe-init`. +Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Wsafe-init`. The design and implementation of the initialization checker is described in the paper _Safe object initialization, abstractly_ [3]. diff --git a/docs/_spec/TODOreference/other-new-features/safe-initialization.md b/docs/_spec/TODOreference/other-new-features/safe-initialization.md index 757038eac786..503dbc7bde47 100644 --- a/docs/_spec/TODOreference/other-new-features/safe-initialization.md +++ b/docs/_spec/TODOreference/other-new-features/safe-initialization.md @@ -4,7 +4,7 @@ title: "Safe Initialization" nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/safe-initialization.html --- -Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Ysafe-init`. +Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Wsafe-init`. The design and implementation of the initialization checker is described in the paper _Safe object initialization, abstractly_ [3]. diff --git a/project/Build.scala b/project/Build.scala index aaa16ce4bce4..350471cc3e12 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -602,8 +602,8 @@ object Build { // Settings shared between scala3-compiler and scala3-compiler-bootstrapped lazy val commonDottyCompilerSettings = Seq( - // Note: bench/profiles/projects.yml should be updated accordingly. - Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Ysafe-init"), + // Note: bench/profiles/projects.yml should be updated accordingly. + Compile / scalacOptions ++= Seq("-Yexplicit-nulls"), // Use source 3.3 to avoid fatal migration warnings on scalajs-ir scalacOptions ++= Seq("-source", "3.3"), @@ -880,6 +880,8 @@ object Build { } lazy val nonBootstrappedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq( + // FIXME revert this to commonDottyCompilerSettings, when we bump reference version to 3.5.0 + scalacOptions += "-Ysafe-init", // packageAll packages all and then returns a map with the abs location packageAll := Def.taskDyn { // Use a dynamic task to avoid loops when loading the settings Def.task { @@ -907,6 +909,8 @@ object Build { ) lazy val bootstrappedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq( + // FIXME revert this to commonDottyCompilerSettings, when we bump reference version to 3.5.0 + scalacOptions += "-Wsafe-init", javaOptions ++= { val jars = packageAll.value Seq( @@ -1334,7 +1338,7 @@ object Build { BuildInfoPlugin.buildInfoScopedSettings(Test) ++ BuildInfoPlugin.buildInfoDefaultSettings - lazy val presentationCompilerSettings = { + def presentationCompilerSettings(implicit mode: Mode) = { val mtagsVersion = "1.3.0+56-a06a024d-SNAPSHOT" Seq( @@ -1348,7 +1352,11 @@ object Build { ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings - Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Ysafe-init"), + // FIXME change this to just Seq("-Yexplicit-nulls, "-Wsafe-init") when reference is set to 3.5.0 + Compile / scalacOptions ++= (mode match { + case Bootstrapped => Seq("-Yexplicit-nulls", "-Wsafe-init") + case NonBootstrapped => Seq("-Yexplicit-nulls", "-Ysafe-init") + }), Compile / sourceGenerators += Def.task { val s = streams.value val cacheDir = s.cacheDirectory diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala b/sbt-test/pipelining/Xearly-tasty-output-inline/a/src/main/scala/a/A.scala similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala rename to sbt-test/pipelining/Xearly-tasty-output-inline/a/src/main/scala/a/A.scala diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xearly-tasty-output-inline/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xearly-tasty-output-inline/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt b/sbt-test/pipelining/Xearly-tasty-output-inline/build.sbt similarity index 90% rename from sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt rename to sbt-test/pipelining/Xearly-tasty-output-inline/build.sbt index e350fa7e7def..8ca1f4df759e 100644 --- a/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt +++ b/sbt-test/pipelining/Xearly-tasty-output-inline/build.sbt @@ -4,7 +4,7 @@ // defines a inline method. lazy val a = project.in(file("a")) .settings( - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), scalacOptions += "-Ycheck:all", ) diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xearly-tasty-output-inline/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xearly-tasty-output-inline/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/test b/sbt-test/pipelining/Xearly-tasty-output-inline/test similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output-inline/test rename to sbt-test/pipelining/Xearly-tasty-output-inline/test diff --git a/sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala b/sbt-test/pipelining/Xearly-tasty-output/a/src/main/scala/a/A.scala similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala rename to sbt-test/pipelining/Xearly-tasty-output/a/src/main/scala/a/A.scala diff --git a/sbt-test/pipelining/Yearly-tasty-output/b-early-out/.keep b/sbt-test/pipelining/Xearly-tasty-output/b-early-out/.keep similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output/b-early-out/.keep rename to sbt-test/pipelining/Xearly-tasty-output/b-early-out/.keep diff --git a/sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xearly-tasty-output/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xearly-tasty-output/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yearly-tasty-output/build.sbt b/sbt-test/pipelining/Xearly-tasty-output/build.sbt similarity index 87% rename from sbt-test/pipelining/Yearly-tasty-output/build.sbt rename to sbt-test/pipelining/Xearly-tasty-output/build.sbt index 5cfe30936ea9..739f6cc8c31e 100644 --- a/sbt-test/pipelining/Yearly-tasty-output/build.sbt +++ b/sbt-test/pipelining/Xearly-tasty-output/build.sbt @@ -4,14 +4,14 @@ // early out is a jar lazy val a = project.in(file("a")) .settings( - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), scalacOptions += "-Ycheck:all", ) // early out is a directory lazy val b = project.in(file("b")) .settings( - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "b-early-out").toString), + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "b-early-out").toString), scalacOptions += "-Ycheck:all", ) diff --git a/sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala b/sbt-test/pipelining/Xearly-tasty-output/c/src/main/scala/c/C.scala similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala rename to sbt-test/pipelining/Xearly-tasty-output/c/src/main/scala/c/C.scala diff --git a/sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xearly-tasty-output/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xearly-tasty-output/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yearly-tasty-output/test b/sbt-test/pipelining/Xearly-tasty-output/test similarity index 100% rename from sbt-test/pipelining/Yearly-tasty-output/test rename to sbt-test/pipelining/Xearly-tasty-output/test diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-annotation/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-annotation/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-annotation/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-annotation/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-annotation/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-annotation/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-annotation/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-annotation/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt b/sbt-test/pipelining/Xjava-tasty-annotation/build.sbt similarity index 87% rename from sbt-test/pipelining/Yjava-tasty-annotation/build.sbt rename to sbt-test/pipelining/Xjava-tasty-annotation/build.sbt index 20a13d7d4ba0..440ef8eced59 100644 --- a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-annotation/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-annotation-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-annotation/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-annotation/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/test b/sbt-test/pipelining/Xjava-tasty-annotation/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/test rename to sbt-test/pipelining/Xjava-tasty-annotation/test diff --git a/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-enum/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-enum/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-enum/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-enum/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-enum/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-enum/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-enum/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-enum/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-enum/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt b/sbt-test/pipelining/Xjava-tasty-enum/build.sbt similarity index 92% rename from sbt-test/pipelining/Yjava-tasty-enum/build.sbt rename to sbt-test/pipelining/Xjava-tasty-enum/build.sbt index 2083003d9ebe..5adbe6ec992e 100644 --- a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-enum/build.sbt @@ -1,8 +1,8 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-enum/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-enum/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-enum/test b/sbt-test/pipelining/Xjava-tasty-enum/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/test rename to sbt-test/pipelining/Xjava-tasty-enum/test diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-from-tasty/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-from-tasty/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-from-tasty/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-from-tasty/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a_from_tasty/.keep b/sbt-test/pipelining/Xjava-tasty-from-tasty/a_from_tasty/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/a_from_tasty/.keep rename to sbt-test/pipelining/Xjava-tasty-from-tasty/a_from_tasty/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-from-tasty/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-from-tasty/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-from-tasty/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-from-tasty/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt b/sbt-test/pipelining/Xjava-tasty-from-tasty/build.sbt similarity index 87% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt rename to sbt-test/pipelining/Xjava-tasty-from-tasty/build.sbt index 040c3bf6eac8..3876ce28693d 100644 --- a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-from-tasty/build.sbt @@ -2,22 +2,22 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-pre-classes"), // send classfiles to a different directory ) // recompile `a` with `-from-tasty` flag to test idempotent read/write java signatures. -// Requires -Yjava-tasty to be set in order to read them. +// Requires -Xjava-tasty to be set in order to read them. lazy val a_from_tasty = project.in(file("a_from_tasty")) .settings( Compile / sources := Seq((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar"), Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar")), scalacOptions += "-from-tasty", // read the jar file tasties as the source files - scalacOptions += "-Yjava-tasty", - scalacOptions += "-Yallow-outline-from-tasty", // allow outline signatures to be read with -from-tasty - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", + scalacOptions += "-Xallow-outline-from-tasty", // allow outline signatures to be read with -from-tasty + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a_from_tasty-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-from-tasty/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-from-tasty/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/test b/sbt-test/pipelining/Xjava-tasty-from-tasty/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/test rename to sbt-test/pipelining/Xjava-tasty-from-tasty/test diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a-check/.keep b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/a-check/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/a-check/.keep rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/a-check/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/build.sbt similarity index 91% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/build.sbt index 9013490f1f54..c51a266c2ee9 100644 --- a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/build.sbt @@ -1,8 +1,8 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory ) @@ -13,8 +13,8 @@ lazy val aCheck = project.in(file("a-check")) scalacOptions += "-Ytest-pickler", // check that the pickler is correct Compile / sources := (a / Compile / sources).value, // use the same sources as a compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty-2.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty-2.jar").toString), Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes-2"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c-alt/.keep b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/c-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/c-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/c-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/test b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/test rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/test diff --git a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-generic/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-generic/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-generic/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-generic/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-generic/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-generic/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-generic/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-generic/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-generic/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-generic/build.sbt b/sbt-test/pipelining/Xjava-tasty-generic/build.sbt similarity index 91% rename from sbt-test/pipelining/Yjava-tasty-generic/build.sbt rename to sbt-test/pipelining/Xjava-tasty-generic/build.sbt index 9e2796600333..c043d597c6cc 100644 --- a/sbt-test/pipelining/Yjava-tasty-generic/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-generic/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-generic-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-generic/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-generic/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-generic/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-generic/test b/sbt-test/pipelining/Xjava-tasty-generic/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/test rename to sbt-test/pipelining/Xjava-tasty-generic/test diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/InnerClass.java b/sbt-test/pipelining/Xjava-tasty-paths/a/InnerClass.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/InnerClass.java rename to sbt-test/pipelining/Xjava-tasty-paths/a/InnerClass.java diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/InnerClassGen.java b/sbt-test/pipelining/Xjava-tasty-paths/a/InnerClassGen.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/InnerClassGen.java rename to sbt-test/pipelining/Xjava-tasty-paths/a/InnerClassGen.java diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/InnerClassSub.java b/sbt-test/pipelining/Xjava-tasty-paths/a/InnerClassSub.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/InnerClassSub.java rename to sbt-test/pipelining/Xjava-tasty-paths/a/InnerClassSub.java diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/RawTypes.java b/sbt-test/pipelining/Xjava-tasty-paths/a/RawTypes.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/RawTypes.java rename to sbt-test/pipelining/Xjava-tasty-paths/a/RawTypes.java diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/package.scala b/sbt-test/pipelining/Xjava-tasty-paths/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-paths/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-paths/b/Test.scala b/sbt-test/pipelining/Xjava-tasty-paths/b/Test.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/b/Test.scala rename to sbt-test/pipelining/Xjava-tasty-paths/b/Test.scala diff --git a/sbt-test/pipelining/Yjava-tasty-paths/build.sbt b/sbt-test/pipelining/Xjava-tasty-paths/build.sbt similarity index 92% rename from sbt-test/pipelining/Yjava-tasty-paths/build.sbt rename to sbt-test/pipelining/Xjava-tasty-paths/build.sbt index 49487fccb57e..24a6f582647f 100644 --- a/sbt-test/pipelining/Yjava-tasty-paths/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-paths/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-paths-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-paths-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-paths-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-paths/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-paths/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-paths/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-paths/test b/sbt-test/pipelining/Xjava-tasty-paths/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/test rename to sbt-test/pipelining/Xjava-tasty-paths/test diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-result-types/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-result-types/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-result-types/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-result-types/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-result-types/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-result-types/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-result-types/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-result-types/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt b/sbt-test/pipelining/Xjava-tasty-result-types/build.sbt similarity index 92% rename from sbt-test/pipelining/Yjava-tasty-result-types/build.sbt rename to sbt-test/pipelining/Xjava-tasty-result-types/build.sbt index 80bcf71b3365..f540de2d6599 100644 --- a/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-result-types/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-result-types-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-result-types/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-result-types/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/test b/sbt-test/pipelining/Xjava-tasty-result-types/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/test rename to sbt-test/pipelining/Xjava-tasty-result-types/test diff --git a/sbt-test/pipelining/pipelining-test/test b/sbt-test/pipelining/pipelining-test/test index e2b8e39082b2..1c996d6962e8 100644 --- a/sbt-test/pipelining/pipelining-test/test +++ b/sbt-test/pipelining/pipelining-test/test @@ -1,12 +1,12 @@ # run the tests on a project with pipelining -# exercises the fact that -Ypickle-java and -Ypickle-write +# exercises the fact that -Xjava-tasty and -Xpickle-write # flags are set twice. # steps: -# - Compile scope is compiled with flags `-Ypickle-java -Ypickle-write early/a-early-7423784.jar` +# - Compile scope is compiled with flags `-Xjava-tasty -Xpickle-write early/a-early-7423784.jar` # - sbt copies `early/a-early-7423784.jar` to `early/a-early.jar` -# - Test scope is compiled with flags `-Ypickle-java -Ypickle-write early-test/a-early-963232.jar -Ypickle-java -Ypickle-write early/a-early.jar -classpath early/a-early.jar` +# - Test scope is compiled with flags `-Xjava-tasty -Xpickle-write early-test/a-early-963232.jar -Xjava-tasty -Xpickle-write early/a-early.jar -classpath early/a-early.jar` # e.g. for some reason the classpath has the same `a-early.jar` that -# is passed with `Ypickle-write`. -# Therefore we MUST avoid even reading the second `-Ypickle-write` setting, +# is passed with `Xpickle-write`. +# Therefore we MUST avoid even reading the second `-Xpickle-write` setting, # otherwise we will zero-out `a-early.jar`, causing type errors because its contents are blank. > a/test diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala index 2d8ca15d9c4f..1648dbe2917b 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala @@ -29,8 +29,8 @@ class SnippetCompiler( val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions).addMode(Mode.Interactive) rootCtx.setSetting(rootCtx.settings.experimental, true) rootCtx.setSetting(rootCtx.settings.YretainTrees, true) - rootCtx.setSetting(rootCtx.settings.YcookComments, true) - rootCtx.setSetting(rootCtx.settings.YreadComments, true) + rootCtx.setSetting(rootCtx.settings.XcookComments, true) + rootCtx.setSetting(rootCtx.settings.XreadComments, true) rootCtx.setSetting(rootCtx.settings.color, "never") rootCtx.setSetting(rootCtx.settings.XimportSuggestionTimeout, 0) diff --git a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala index 03b3aadedc4d..906578c9d405 100644 --- a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala +++ b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala @@ -103,7 +103,7 @@ object TastyInspector: reset() val ctx2 = ctx.fresh .addMode(Mode.ReadPositions) - .setSetting(ctx.settings.YreadComments, true) + .setSetting(ctx.settings.XreadComments, true) new TASTYRun(this, ctx2) new InspectorDriver diff --git a/staging/src/scala/quoted/staging/QuoteDriver.scala b/staging/src/scala/quoted/staging/QuoteDriver.scala index 7eb99bce4ff8..0131a56cd8aa 100644 --- a/staging/src/scala/quoted/staging/QuoteDriver.scala +++ b/staging/src/scala/quoted/staging/QuoteDriver.scala @@ -41,7 +41,7 @@ private class QuoteDriver(appClassloader: ClassLoader) extends Driver: setCompilerSettings(ctx1.fresh.setSetting(ctx1.settings.outputDir, outDir), settings) } - val compiledExpr = + val compiledExpr = try new QuoteCompiler().newRun(ctx).compileExpr(exprBuilder) catch case ex: dotty.tools.FatalError => @@ -50,10 +50,10 @@ private class QuoteDriver(appClassloader: ClassLoader) extends Driver: |This might be caused by using an incorrect classloader |when creating the `staging.Compiler` instance with `staging.Compiler.make`. |For details, please refer to the documentation. - |For non-enriched exceptions, compile with -Yno-enrich-error-messages.""".stripMargin - if ctx.settings.YnoEnrichErrorMessages.value(using ctx) then throw ex + |For non-enriched exceptions, compile with -Xno-enrich-error-messages.""".stripMargin + if ctx.settings.XnoEnrichErrorMessages.value(using ctx) then throw ex else throw new Exception(enrichedMessage, ex) - + compiledExpr match case Right(value) => value.asInstanceOf[T] diff --git a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala index ea3f0a95dded..7c5476d35940 100644 --- a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +++ b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala @@ -100,7 +100,7 @@ object TastyInspector: reset() val ctx2 = ctx.fresh .addMode(Mode.ReadPositions) - .setSetting(ctx.settings.YreadComments, true) + .setSetting(ctx.settings.XreadComments, true) new TASTYRun(this, ctx2) new InspectorDriver diff --git a/tests/explicit-nulls/pos/i14682.scala b/tests/explicit-nulls/pos/i14682.scala index 318de6094a88..76ae621a406e 100644 --- a/tests/explicit-nulls/pos/i14682.scala +++ b/tests/explicit-nulls/pos/i14682.scala @@ -1,4 +1,4 @@ -//> using options -Ysafe-init +//> using options -Wsafe-init class C1: sealed abstract class Name { @@ -26,4 +26,4 @@ class C2: val localName = LocalName() println(localName) - var count = 0 \ No newline at end of file + var count = 0 diff --git a/tests/neg/i16438.scala b/tests/neg/i16438.scala index a2b88080c2cd..793e6518ea71 100644 --- a/tests/neg/i16438.scala +++ b/tests/neg/i16438.scala @@ -1,4 +1,4 @@ -//> using options -Ysafe-init +//> using options -Wsafe-init trait ATrait(val string: String, val int: Int) trait AnotherTrait( override val string: String, override val int: Int) extends ATrait case class ACaseClass(override val string: String) extends AnotherTrait(string, 3) // error diff --git a/tests/neg/i2887b.check b/tests/neg/i2887b.check index 7b85d1a0223b..5bd5f570fbf7 100644 --- a/tests/neg/i2887b.check +++ b/tests/neg/i2887b.check @@ -4,7 +4,7 @@ | Recursion limit exceeded. | Maybe there is an illegal cyclic reference? | If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. - | For the unprocessed stack trace, compile with -Yno-decode-stacktraces. + | For the unprocessed stack trace, compile with -Xno-decode-stacktraces. | A recurring operation is (inner to outer): | | try to instantiate Z[Z] diff --git a/tests/neg/kind-projector-underscores.scala b/tests/neg/kind-projector-underscores.scala index 76aada871fae..e2cdee917e81 100644 --- a/tests/neg/kind-projector-underscores.scala +++ b/tests/neg/kind-projector-underscores.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector:underscores +//> using options -Xkind-projector:underscores package kind_projector_neg diff --git a/tests/neg/kind-projector.scala b/tests/neg/kind-projector.scala index a7fc24c70b93..a03dd83a1945 100644 --- a/tests/neg/kind-projector.scala +++ b/tests/neg/kind-projector.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector +//> using options -Xkind-projector package kind_projector_neg diff --git a/tests/pos/i16777.scala b/tests/pos/i16777.scala index 302ace3ea9aa..4b7399ac053c 100644 --- a/tests/pos/i16777.scala +++ b/tests/pos/i16777.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector:underscores +//> using options -Xkind-projector:underscores sealed abstract class Free[+S[_, _], +E, +A] { @inline final def flatMap[S1[e, a] >: S[e, a], B, E1 >: E](fun: A => Free[S1, E1, B]): Free[S1, E1, B] = Free.FlatMapped[S1, E, E1, A, B](this, fun) diff --git a/tests/pos/i19806/Module.scala b/tests/pos/i19806/Module.scala index d0142fc24682..328dcd213d2e 100644 --- a/tests/pos/i19806/Module.scala +++ b/tests/pos/i19806/Module.scala @@ -1,4 +1,4 @@ -//> using options -Yjava-tasty -Ytest-pickler-check +//> using options -Xjava-tasty -Ytest-pickler-check package p diff --git a/tests/pos/kind-projector-underscores.scala b/tests/pos/kind-projector-underscores.scala index f72a300a64eb..6f4349a8ec7c 100644 --- a/tests/pos/kind-projector-underscores.scala +++ b/tests/pos/kind-projector-underscores.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector:underscores +//> using options -Xkind-projector:underscores package kind_projector diff --git a/tests/pos/kind-projector.scala b/tests/pos/kind-projector.scala index 4d6ec8c932a9..7e4a2c7f5c1b 100644 --- a/tests/pos/kind-projector.scala +++ b/tests/pos/kind-projector.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector +//> using options -Xkind-projector package kind_projector diff --git a/tests/run-macros/i12351/Test_2.scala b/tests/run-macros/i12351/Test_2.scala index e480b3c7e86e..a48d30772d5c 100644 --- a/tests/run-macros/i12351/Test_2.scala +++ b/tests/run-macros/i12351/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -Yread-docs +//> using options -Xread-docs @main def Test(): Unit = { println(getDocString[Data]) diff --git a/tests/run-macros/i12352/Main.scala b/tests/run-macros/i12352/Main.scala index b62bd80eaf2c..19cdf2a82d3b 100644 --- a/tests/run-macros/i12352/Main.scala +++ b/tests/run-macros/i12352/Main.scala @@ -1,4 +1,4 @@ -//> using options -Yread-docs +//> using options -Xread-docs @main def Test(): Unit = { val res = getDocString[scala.quoted.Quotes] diff --git a/tests/untried/neg/choices.check b/tests/untried/neg/choices.check index b114394e9609..2e45461ca178 100644 --- a/tests/untried/neg/choices.check +++ b/tests/untried/neg/choices.check @@ -1,2 +1,2 @@ -error: bad options: -Yresolve-term-conflict +error: bad options: -Xresolve-term-conflict one error found diff --git a/tests/untried/neg/choices.flags b/tests/untried/neg/choices.flags index 9718467d4ca2..7a04890a6dee 100644 --- a/tests/untried/neg/choices.flags +++ b/tests/untried/neg/choices.flags @@ -1 +1 @@ --Yresolve-term-conflict +-Xresolve-term-conflict From e85a12a2fcaf005fd281416b35a63d12a25b02eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20Fornal?= <24961583+Florian3k@users.noreply.github.com> Date: Tue, 7 May 2024 15:15:31 +0200 Subject: [PATCH 332/465] Fix scaladoc crash on Windows - illegal path character (#20311) Fixes #19853 --- .../tools/scaladoc/renderers/SiteRenderer.scala | 10 ++++++++-- .../tools/scaladoc/site/StaticSiteContext.scala | 14 ++++++++------ .../src/dotty/tools/scaladoc/util/escape.scala | 17 +++++++++++++++++ 3 files changed, 33 insertions(+), 8 deletions(-) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala index 7f64ce92ffc8..71b0a1b572ac 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala @@ -13,6 +13,7 @@ import java.nio.file.Path import java.nio.file.Files import java.io.File import scala.util.chaining._ +import dotty.tools.scaladoc.util.Escape.escapeFilename case class ResolvedTemplate(template: LoadedTemplate, ctx: StaticSiteContext): val resolved = template.resolveToHtml(ctx) @@ -55,11 +56,16 @@ trait SiteRenderer(using DocContext) extends Locations: val staticSiteRootPath = content.ctx.root.toPath.toAbsolutePath def asValidURL: Option[String] = Try(URI(str).toURL).toOption.map(_ => str) def asAsset: Option[String] = Option.when( - Files.exists(staticSiteRootPath.resolve("_assets").resolve(str.stripPrefix("/"))) + Try( + Files.exists(staticSiteRootPath.resolve("_assets").resolve(str.stripPrefix("/"))) + ).getOrElse(false) )( resolveLink(pageDri, str.stripPrefix("/")) ) - def asStaticSite: Option[String] = tryAsDriPlain(str).orElse(tryAsDri(str)) + def asStaticSite: Option[String] = + tryAsDriPlain(str) + .orElse(tryAsDri(str)) + .orElse(tryAsDriPlain(escapeFilename(str))) /* Link resolving checks performs multiple strategies with following priority: 1. We check if the link is a valid URL e.g. http://dotty.epfl.ch diff --git a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala index 7a90a462cba0..a610e41f12f0 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala @@ -7,9 +7,8 @@ import java.nio.file.FileVisitOption import java.nio.file.Path import java.nio.file.Paths -import scala.util.Try import scala.jdk.CollectionConverters._ -import scala.annotation.static +import scala.util.control.NonFatal class StaticSiteContext( val root: File, @@ -75,10 +74,13 @@ class StaticSiteContext( val templateSourceLocation = staticSiteRoot.reverseSiteMappings.get(templateDestLocation) // Check if link is relative or absolute - if link.startsWith("/") - then Seq(root.toPath.resolve(link.drop(1))) - else Seq(templateDestLocation.getParent.resolve(link).normalize) ++ - templateSourceLocation.map(_.getParent.resolve(link).normalize) + try + if link.startsWith("/") + then Seq(root.toPath.resolve(link.drop(1))) + else Seq(templateDestLocation.getParent.resolve(link).normalize) ++ + templateSourceLocation.map(_.getParent.resolve(link).normalize) + catch + case NonFatal(_) => Seq.empty // Try to strip site extension and create all possible file paths val fileNames = if siteExtensions.exists(link.endsWith(_)) diff --git a/scaladoc/src/dotty/tools/scaladoc/util/escape.scala b/scaladoc/src/dotty/tools/scaladoc/util/escape.scala index 686d384337c1..5d4bf02e8b38 100644 --- a/scaladoc/src/dotty/tools/scaladoc/util/escape.scala +++ b/scaladoc/src/dotty/tools/scaladoc/util/escape.scala @@ -5,7 +5,24 @@ object Escape: .replace("#","%23") def escapeFilename(filename: String) = + // from compiler/src/dotty/tools/dotc/util/NameTransformer.scala val escaped = filename + .replace("~", "$tilde") + .replace("=", "$eq") + .replace("<", "$less") + .replace(">", "$greater") + .replace("!", "$bang") + .replace("#", "$hash") + .replace("%", "$percent") + .replace("^", "$up") + .replace("&", "$amp") + .replace("|", "$bar") + .replace("*", "$times") .replace("/", "$div") + .replace("+", "$plus") + .replace("-", "$minus") + .replace(":", "$colon") .replace("\\", "$bslash") + .replace("?", "$qmark") + .replace("@", "$at") if escaped != filename then escaped + "$" else escaped From 4a476141d9b3fe91424a7e8aba434590287d0f9c Mon Sep 17 00:00:00 2001 From: som-snytt Date: Tue, 7 May 2024 06:41:23 -0700 Subject: [PATCH 333/465] Amend test file name (#20326) Follow up https://github.com/scala/scala3/pull/18467 The issue link in the commit message is also a typo, which is what I noticed. [skip ci] --- tests/pos/{i18361.scala => i18351.scala} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/pos/{i18361.scala => i18351.scala} (100%) diff --git a/tests/pos/i18361.scala b/tests/pos/i18351.scala similarity index 100% rename from tests/pos/i18361.scala rename to tests/pos/i18351.scala From e85fa42ec27ce98ba95dc24ac36efff08e47033c Mon Sep 17 00:00:00 2001 From: som-snytt Date: Tue, 7 May 2024 06:46:03 -0700 Subject: [PATCH 334/465] ConsoleReporter sends INFO to stdout (#20328) Fixes #16701 Tested only manually. For example, with `tailrec` printer enabled, ``` scalac -Vprint:typer example.scala > x 2> y ``` puts tree output and tailrec trace in x, warnings & errors & summary count in y. x is the output I asked for. This makes it easier to see trace and trees correctly interleaved, since `> out 2>&1` does not guarantee it. Tests may depend on how output is captured. Scala 2 partest captures stderr to log, for example. --- .../dotc/reporting/ConsoleReporter.scala | 22 ++++++++++++------- .../dotty/tools/dotc/reporting/Reporter.scala | 7 +++--- .../src/dotty/tools/repl/ReplDriver.scala | 1 + .../tools/dotc/reporting/TestReporter.scala | 2 ++ 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala index 270c35d0add7..3dc73983056a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala @@ -5,15 +5,18 @@ package reporting import core.Contexts.* import java.io.{ BufferedReader, PrintWriter } import Diagnostic.* +import dotty.tools.dotc.interfaces.Diagnostic.INFO /** * This class implements a Reporter that displays messages on a text console */ class ConsoleReporter( reader: BufferedReader = Console.in, - writer: PrintWriter = new PrintWriter(Console.err, true) + writer: PrintWriter = new PrintWriter(Console.err, true), + echoer: PrintWriter = new PrintWriter(Console.out, true) ) extends ConsoleReporter.AbstractConsoleReporter { - override def printMessage(msg: String): Unit = { writer.print(msg + "\n"); writer.flush() } + override def printMessage(msg: String): Unit = { writer.println(msg); writer.flush() } + override def echoMessage(msg: String): Unit = { echoer.println(msg); echoer.flush() } override def flush()(using Context): Unit = writer.flush() override def doReport(dia: Diagnostic)(using Context): Unit = { @@ -22,18 +25,21 @@ class ConsoleReporter( dia match case _: Error => Reporter.displayPrompt(reader, writer) case _: Warning if ctx.settings.XfatalWarnings.value => Reporter.displayPrompt(reader, writer) - case _ => + case _ => } } object ConsoleReporter { abstract class AbstractConsoleReporter extends AbstractReporter { - /** Prints the message. */ + /** Print the diagnostic message. */ def printMessage(msg: String): Unit - /** Prints the message with the given position indication. */ - def doReport(dia: Diagnostic)(using Context): Unit = { - printMessage(messageAndPos(dia)) - } + /** Print the informative message. */ + def echoMessage(msg: String): Unit + + /** Print the message with the given position indication. */ + def doReport(dia: Diagnostic)(using Context): Unit = + if dia.level == INFO then echoMessage(messageAndPos(dia)) + else printMessage(messageAndPos(dia)) } } diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index ddea384f4832..61f842800b78 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -14,7 +14,7 @@ import dotty.tools.dotc.util.NoSourcePosition import java.io.{BufferedReader, PrintWriter} import scala.annotation.internal.sharable import scala.collection.mutable -import core.Decorators.em +import core.Decorators.{em, toMessage} import core.handleRecursive object Reporter { @@ -236,10 +236,9 @@ abstract class Reporter extends interfaces.ReporterResult { report(Warning(msg, NoSourcePosition)) /** Print the summary of warnings and errors */ - def printSummary()(using Context): Unit = { + def printSummary()(using Context): Unit = val s = summary - if (s != "") report(new Info(s, NoSourcePosition)) - } + if (s != "") doReport(Warning(s.toMessage, NoSourcePosition)) /** Returns a string meaning "n elements". */ protected def countString(n: Int, elements: String): String = n match { diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 121d00bcf8b3..f22523ac6f64 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -545,6 +545,7 @@ class ReplDriver(settings: Array[String], private object ReplConsoleReporter extends ConsoleReporter.AbstractConsoleReporter { override def posFileStr(pos: SourcePosition) = "" // omit file paths override def printMessage(msg: String): Unit = out.println(msg) + override def echoMessage(msg: String): Unit = printMessage(msg) override def flush()(using Context): Unit = out.flush() } diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala index 3b30742a8d4f..953dd16e170b 100644 --- a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala +++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala @@ -75,6 +75,8 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M _diagnosticBuf.append(dia) printMessageAndPos(dia, extra) } + + override def printSummary()(using Context): Unit = () } object TestReporter { From 56b276f974b97387209feb5cb1ef70fccc1ccb19 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 10 Apr 2024 14:13:27 +0200 Subject: [PATCH 335/465] Implement match type amendment: extractors follow aliases and singletons This implements the change proposed in https://github.com/scala/improvement-proposals/pull/84. The added pos test case presents motivating examples, the added neg test cases demonstrate that errors are correctly reported when cycles are present. The potential for cycle is no worse than with the existing extraction logic as demonstrated by the existing test in `tests/neg/mt-deskolemize.scala`. [Cherry-picked 1a235c6719f56e1597241dc38eeda49087b323e8] --- .../dotty/tools/dotc/core/TypeComparer.scala | 65 +++++++++++++++++-- tests/neg/mt-deskolemize.scala | 42 ++++++++++++ tests/pos/mt-deskolemize.scala | 55 ++++++++++++++++ 3 files changed, 157 insertions(+), 5 deletions(-) create mode 100644 tests/pos/mt-deskolemize.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index cee1ec7fffa8..dad159ace55f 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3518,20 +3518,75 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { false case MatchTypeCasePattern.TypeMemberExtractor(typeMemberName, capture) => + /** Try to remove references to `skolem` from a type in accordance with the spec. + * + * If any reference to `skolem` remains in the result type, + * `refersToSkolem` is set to true. + */ + class DropSkolemMap(skolem: SkolemType) extends TypeMap: + var refersToSkolem = false + def apply(tp: Type): Type = + tp match + case `skolem` => + refersToSkolem = true + tp + case tp: NamedType => + var savedRefersToSkolem = refersToSkolem + refersToSkolem = false + try + val pre1 = apply(tp.prefix) + if refersToSkolem then + tp match + case tp: TermRef => tp.info.widenExpr.dealias match + case info: SingletonType => + refersToSkolem = false + apply(info) + case _ => + tp.derivedSelect(pre1) + case tp: TypeRef => tp.info match + case info: AliasingBounds => + refersToSkolem = false + apply(info.alias) + case _ => + tp.derivedSelect(pre1) + else + tp.derivedSelect(pre1) + finally + refersToSkolem |= savedRefersToSkolem + case tp: LazyRef => + // By default, TypeMap maps LazyRefs lazily. We need to + // force it for `refersToSkolem` to be correctly set. + apply(tp.ref) + case _ => + mapOver(tp) + end DropSkolemMap + /** Try to remove references to `skolem` from `u` in accordance with the spec. + * + * If any reference to `skolem` remains in the result type, return + * NoType instead. + */ + def dropSkolem(u: Type, skolem: SkolemType): Type = + val dmap = DropSkolemMap(skolem) + val res = dmap(u) + if dmap.refersToSkolem then NoType else res + val stableScrut: SingletonType = scrut match case scrut: SingletonType => scrut case _ => SkolemType(scrut) + stableScrut.member(typeMemberName) match case denot: SingleDenotation if denot.exists => val info = denot.info match case alias: AliasingBounds => alias.alias // Extract the alias case ClassInfo(prefix, cls, _, _, _) => prefix.select(cls) // Re-select the class from the prefix case info => info // Notably, RealTypeBounds, which will eventually give a MatchResult.NoInstances - val infoRefersToSkolem = stableScrut.isInstanceOf[SkolemType] && stableScrut.occursIn(info) - val info1 = info match - case info: TypeBounds => info // Will already trigger a MatchResult.NoInstances - case _ if infoRefersToSkolem => RealTypeBounds(info, info) // Explicitly trigger a MatchResult.NoInstances - case _ => info // We have a match + val info1 = stableScrut match + case skolem: SkolemType => + dropSkolem(info, skolem).orElse: + info match + case info: TypeBounds => info // Will already trigger a MatchResult.NoInstances + case _ => RealTypeBounds(info, info) // Explicitly trigger a MatchResult.NoInstances + case _ => info rec(capture, info1, variance = 0, scrutIsWidenedAbstract) case _ => false diff --git a/tests/neg/mt-deskolemize.scala b/tests/neg/mt-deskolemize.scala index 0a58d5db7bc4..505e47637ac4 100644 --- a/tests/neg/mt-deskolemize.scala +++ b/tests/neg/mt-deskolemize.scala @@ -14,3 +14,45 @@ class SimpleLoop2 extends Expr: object Test1: val x: ExtractValue[SimpleLoop1] = 1 // error + +trait Description: + type Elem <: Tuple + +class PrimBroken extends Expr: + type Value = Alias + type Alias = Value // error + +class Prim extends Expr: + type Value = BigInt + +class VecExpr[E <: Expr] extends Expr: + type Value = Vector[ExtractValue[E]] + +trait ProdExpr extends Expr: + val description: Description + type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] + + +class MyExpr1 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[Prim], MyExpr2) + +class MyExpr2 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) + +trait Constable[E <: Expr]: + def lit(v: ExtractValue[E]): E +object Constable: + given [E <: Expr]: Constable[E] = ??? + +object Test2: + def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = + summon[Constable[E]].lit(v) + val x0: ExtractValue[Prim] = "" // error + val x1: ExtractValue[PrimBroken] = 1 // error + + val foo: MyExpr2 = new MyExpr2 + val v: foo.Value = (Vector(Vector()), 1) // error: Recursion limit exceeded + val c: MyExpr2 = fromLiteral: + (Vector(Vector()), 1) // error: Recursion limit exceeded diff --git a/tests/pos/mt-deskolemize.scala b/tests/pos/mt-deskolemize.scala new file mode 100644 index 000000000000..34f38289b24d --- /dev/null +++ b/tests/pos/mt-deskolemize.scala @@ -0,0 +1,55 @@ +trait Expr: + type Value + +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[F <: Expr] = F match + case Expr.Of[v] => v +import Expr.ExtractValue + +class Prim extends Expr: + type Value = Alias + type Alias = BigInt + +class VecExpr[E <: Expr] extends Expr: + type Value = Vector[ExtractValue[E]] + +trait Description: + type Elem <: Tuple + +trait ProdExpr extends Expr: + val description: Description + type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] + +class MyExpr1 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[Prim], Prim) + +class MyExpr2 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) + +trait ProdExprAlt[T <: Tuple] extends Expr: + type Value = Tuple.Map[T, [X] =>> ExtractValue[X & Expr]] + +class MyExpr3 extends ProdExprAlt[(Prim, VecExpr[Prim], Prim)] + +trait Constable[E <: Expr]: + def lit(v: ExtractValue[E]): E +object Constable: + given [E <: Expr]: Constable[E] = ??? + +object Test: + def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = + summon[Constable[E]].lit(v) + val a: Prim = fromLiteral(1) + val b: VecExpr[Prim] = fromLiteral(Vector(1)) + val c: MyExpr1 = fromLiteral((Vector(1), 1)) + val d: MyExpr2 = fromLiteral(Vector(Vector((Vector(1), 1))), 2) + val e: MyExpr3 = fromLiteral((1, Vector(1), 1)) + val f: ProdExprAlt[(MyExpr1, VecExpr[MyExpr3])] = fromLiteral: + ( + (Vector(1), 1), + Vector((1, Vector(1), 1), (2, Vector(1), 2)) + ) + val g: Expr { type Alias = Int; type Value = Alias } = fromLiteral(1) From bfa18520c76820e16b2274ed9b8850c663c15ab0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Thu, 9 May 2024 16:24:14 +0200 Subject: [PATCH 336/465] Move logic under feature.experimental.betterMatchTypesExtractors This way we can merge this PR without waiting for the SIP committee to approve it. [Cherry-picked 61b5a7b6a52f32c68a4f3aa8842f6c4850349b87][modified] --- .../src/dotty/tools/dotc/config/Feature.scala | 3 + .../dotty/tools/dotc/core/TypeComparer.scala | 11 +++- .../runtime/stdLibPatches/language.scala | 7 +++ tests/neg/mt-deskolemize-2.scala | 60 +++++++++++++++++++ tests/neg/mt-deskolemize.scala | 42 ------------- tests/pos/mt-deskolemize.scala | 2 + 6 files changed, 80 insertions(+), 45 deletions(-) create mode 100644 tests/neg/mt-deskolemize-2.scala diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 1fe9cae936c9..5c27f20fcba1 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -34,6 +34,7 @@ object Feature: val captureChecking = experimental("captureChecking") val into = experimental("into") val namedTuples = experimental("namedTuples") + val betterMatchTypeExtractors = experimental("betterMatchTypeExtractors") def experimentalAutoEnableFeatures(using Context): List[TermName] = defn.languageExperimentalFeatures @@ -88,6 +89,8 @@ object Feature: def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + def betterMatchTypeExtractorsEnabled(using Context) = enabled(betterMatchTypeExtractors) + /** Is pureFunctions enabled for this compilation unit? */ def pureFunsEnabled(using Context) = enabledBySetting(pureFunctions) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index dad159ace55f..3ce98e5447a2 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -10,7 +10,7 @@ import TypeOps.refineUsingParent import collection.mutable import util.{Stats, NoSourcePosition, EqHashMap} import config.Config -import config.Feature.{migrateTo3, sourceVersion} +import config.Feature.{betterMatchTypeExtractorsEnabled, migrateTo3, sourceVersion} import config.Printers.{subtyping, gadts, matchTypes, noPrinter} import config.SourceVersion import TypeErasure.{erasedLub, erasedGlb} @@ -3519,6 +3519,11 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { case MatchTypeCasePattern.TypeMemberExtractor(typeMemberName, capture) => /** Try to remove references to `skolem` from a type in accordance with the spec. + * + * If `betterMatchTypeExtractorsEnabled` is enabled then references + * to `skolem` occuring are avoided by following aliases and + * singletons, otherwise no attempt made to avoid references to + * `skolem`. * * If any reference to `skolem` remains in the result type, * `refersToSkolem` is set to true. @@ -3530,7 +3535,7 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { case `skolem` => refersToSkolem = true tp - case tp: NamedType => + case tp: NamedType if betterMatchTypeExtractorsEnabled => var savedRefersToSkolem = refersToSkolem refersToSkolem = false try @@ -3553,7 +3558,7 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { tp.derivedSelect(pre1) finally refersToSkolem |= savedRefersToSkolem - case tp: LazyRef => + case tp: LazyRef if betterMatchTypeExtractorsEnabled => // By default, TypeMap maps LazyRefs lazily. We need to // force it for `refersToSkolem` to be correctly set. apply(tp.ref) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index b2bd4b791423..78755b8df757 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -105,6 +105,13 @@ object language: @compileTimeOnly("`relaxedExtensionImports` can only be used at compile time in import statements") @deprecated("The experimental.relaxedExtensionImports language import is no longer needed since the feature is now standard", since = "3.4") object relaxedExtensionImports + + /** Enhance match type extractors to follow aliases and singletons. + * + * @see [[https://github.com/scala/improvement-proposals/pull/84]] + */ + @compileTimeOnly("`betterMatchTypeExtractors` can only be used at compile time in import statements") + object betterMatchTypeExtractors end experimental /** The deprecated object contains features that are no longer officially suypported in Scala. diff --git a/tests/neg/mt-deskolemize-2.scala b/tests/neg/mt-deskolemize-2.scala new file mode 100644 index 000000000000..90d506a42e6f --- /dev/null +++ b/tests/neg/mt-deskolemize-2.scala @@ -0,0 +1,60 @@ +//> using options -language:experimental.betterMatchTypeExtractors + +trait Expr: + type Value +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[F <: Expr] = F match + case Expr.Of[v] => v +import Expr.ExtractValue + +class SimpleLoop1 extends Expr: + type Value = ExtractValue[SimpleLoop2] + +class SimpleLoop2 extends Expr: + type Value = ExtractValue[SimpleLoop1] + +object Test1: + val x: ExtractValue[SimpleLoop1] = 1 // error + +trait Description: + type Elem <: Tuple + +class PrimBroken extends Expr: + type Value = Alias + type Alias = Value // error + +class Prim extends Expr: + type Value = BigInt + +class VecExpr[E <: Expr] extends Expr: + type Value = Vector[ExtractValue[E]] + +trait ProdExpr extends Expr: + val description: Description + type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] + + +class MyExpr1 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[Prim], MyExpr2) + +class MyExpr2 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) + +trait Constable[E <: Expr]: + def lit(v: ExtractValue[E]): E +object Constable: + given [E <: Expr]: Constable[E] = ??? + +object Test2: + def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = + summon[Constable[E]].lit(v) + val x0: ExtractValue[Prim] = "" // error + val x1: ExtractValue[PrimBroken] = 1 // error + + val foo: MyExpr2 = new MyExpr2 + val v: foo.Value = (Vector(Vector()), 1) // error: Recursion limit exceeded + val c: MyExpr2 = fromLiteral: + (Vector(Vector()), 1) // error: Recursion limit exceeded diff --git a/tests/neg/mt-deskolemize.scala b/tests/neg/mt-deskolemize.scala index 505e47637ac4..0a58d5db7bc4 100644 --- a/tests/neg/mt-deskolemize.scala +++ b/tests/neg/mt-deskolemize.scala @@ -14,45 +14,3 @@ class SimpleLoop2 extends Expr: object Test1: val x: ExtractValue[SimpleLoop1] = 1 // error - -trait Description: - type Elem <: Tuple - -class PrimBroken extends Expr: - type Value = Alias - type Alias = Value // error - -class Prim extends Expr: - type Value = BigInt - -class VecExpr[E <: Expr] extends Expr: - type Value = Vector[ExtractValue[E]] - -trait ProdExpr extends Expr: - val description: Description - type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] - - -class MyExpr1 extends ProdExpr: - final val description = new Description: - type Elem = (VecExpr[Prim], MyExpr2) - -class MyExpr2 extends ProdExpr: - final val description = new Description: - type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) - -trait Constable[E <: Expr]: - def lit(v: ExtractValue[E]): E -object Constable: - given [E <: Expr]: Constable[E] = ??? - -object Test2: - def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = - summon[Constable[E]].lit(v) - val x0: ExtractValue[Prim] = "" // error - val x1: ExtractValue[PrimBroken] = 1 // error - - val foo: MyExpr2 = new MyExpr2 - val v: foo.Value = (Vector(Vector()), 1) // error: Recursion limit exceeded - val c: MyExpr2 = fromLiteral: - (Vector(Vector()), 1) // error: Recursion limit exceeded diff --git a/tests/pos/mt-deskolemize.scala b/tests/pos/mt-deskolemize.scala index 34f38289b24d..abd61d9d55e6 100644 --- a/tests/pos/mt-deskolemize.scala +++ b/tests/pos/mt-deskolemize.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.betterMatchTypeExtractors + trait Expr: type Value From 3e1c4defc3cb1db0367f5e0fbece1db01279fc7b Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 7 May 2024 12:34:30 +0200 Subject: [PATCH 337/465] DropSkolemMap: simplify logic No need to save the value of `refersToSkolem`: if it's true before we enter `NamedType` it will be true after and `dropSkolem` will return `NoType`. The previous logic could still be useful if we want to give more easily actionable error messages in the future by only keeping in the type the skolems we couldn't remove. [Cherry-picked a1930c4ca38673885a4ebc2ce95689e9e65d08be] --- .../dotty/tools/dotc/core/TypeComparer.scala | 41 +++++++++---------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 3ce98e5447a2..27dd4b7134a9 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3531,33 +3531,30 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { class DropSkolemMap(skolem: SkolemType) extends TypeMap: var refersToSkolem = false def apply(tp: Type): Type = + if refersToSkolem then + return tp tp match case `skolem` => refersToSkolem = true tp case tp: NamedType if betterMatchTypeExtractorsEnabled => - var savedRefersToSkolem = refersToSkolem - refersToSkolem = false - try - val pre1 = apply(tp.prefix) - if refersToSkolem then - tp match - case tp: TermRef => tp.info.widenExpr.dealias match - case info: SingletonType => - refersToSkolem = false - apply(info) - case _ => - tp.derivedSelect(pre1) - case tp: TypeRef => tp.info match - case info: AliasingBounds => - refersToSkolem = false - apply(info.alias) - case _ => - tp.derivedSelect(pre1) - else - tp.derivedSelect(pre1) - finally - refersToSkolem |= savedRefersToSkolem + val pre1 = apply(tp.prefix) + if refersToSkolem then + tp match + case tp: TermRef => tp.info.widenExpr.dealias match + case info: SingletonType => + refersToSkolem = false + apply(info) + case _ => + tp.derivedSelect(pre1) + case tp: TypeRef => tp.info match + case info: AliasingBounds => + refersToSkolem = false + apply(info.alias) + case _ => + tp.derivedSelect(pre1) + else + tp.derivedSelect(pre1) case tp: LazyRef if betterMatchTypeExtractorsEnabled => // By default, TypeMap maps LazyRefs lazily. We need to // force it for `refersToSkolem` to be correctly set. From 11f01d2fc199db95d59139d78f552a8e8fed7341 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 8 May 2024 14:57:35 +0200 Subject: [PATCH 338/465] Deprecate `StandardPlugin.init` in favor of `initialize` method taking implicit Context (#20330) We do deprecate `StandardPlugin.init` in favour of `StandardPlugin.initialize` method tak takes additional `Context` parameter - it would e.g. allow to use reporting mechanism when parsing compiler plugin options. Introduces changes to akka/akka fork used in Community Build [Cherry-picked 1276034e48114b9422ae5c5f1b25708e62517d45] --- community-build/community-projects/akka | 2 +- .../src/dotty/tools/dotc/plugins/Plugin.scala | 16 +++++++++++++++- .../src/dotty/tools/dotc/plugins/Plugins.scala | 2 +- .../changed-features/compiler-plugins.md | 4 ++-- .../changed-features/compiler-plugins.md | 4 ++-- .../analyzer-plugin/plugin/Analyzer.scala | 2 +- .../compiler-plugin/plugin/DivideZero.scala | 3 ++- tests/plugins/custom/analyzer/Analyzer_1.scala | 2 +- tests/plugins/neg/divideZero/plugin_1.scala | 2 +- 9 files changed, 26 insertions(+), 11 deletions(-) diff --git a/community-build/community-projects/akka b/community-build/community-projects/akka index 7f5115ebc9cd..79b294048f89 160000 --- a/community-build/community-projects/akka +++ b/community-build/community-projects/akka @@ -1 +1 @@ -Subproject commit 7f5115ebc9cde408433040f11834f5218b4a3357 +Subproject commit 79b294048f893d9d6b9332618f7aebedce9a5340 diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index ce77a5b9d97a..fdb41fc56689 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -13,6 +13,7 @@ import java.io.InputStream import java.util.Properties import scala.util.{ Try, Success, Failure } +import scala.annotation.nowarn trait PluginPhase extends MiniPhase { def runsBefore: Set[String] = Set.empty @@ -50,7 +51,20 @@ trait StandardPlugin extends Plugin { * @param options commandline options to the plugin. * @return a list of phases to be added to the phase plan */ - def init(options: List[String]): List[PluginPhase] + @deprecatedOverriding("Method 'init' does not allow to access 'Context', use 'initialize' instead.", since = "Scala 3.5.0") + @deprecated("Use 'initialize' instead.", since = "Scala 3.5.0") + def init(options: List[String]): List[PluginPhase] = Nil + + /** Non-research plugins should override this method to return the phases + * + * The phases returned must be freshly constructed (not reused + * and returned again on subsequent calls). + * + * @param options commandline options to the plugin. + * @return a list of phases to be added to the phase plan + */ + @nowarn("cat=deprecation") + def initialize(options: List[String])(using Context): List[PluginPhase] = init(options) } /** A research plugin may customize the compilation pipeline freely diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index 31176bb2fb2c..a6672d475129 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -125,7 +125,7 @@ trait Plugins { } // schedule plugins according to ordering constraints - val pluginPhases = plugins.collect { case p: StandardPlugin => p }.flatMap { plug => plug.init(options(plug)) } + val pluginPhases = plugins.collect { case p: StandardPlugin => p }.flatMap { plug => plug.initialize(options(plug)) } val updatedPlan = Plugins.schedule(plan, pluginPhases) // add research plugins diff --git a/docs/_docs/reference/changed-features/compiler-plugins.md b/docs/_docs/reference/changed-features/compiler-plugins.md index 6be8a62c7ac4..c0bfccec8172 100644 --- a/docs/_docs/reference/changed-features/compiler-plugins.md +++ b/docs/_docs/reference/changed-features/compiler-plugins.md @@ -67,7 +67,7 @@ class DivideZero extends StandardPlugin: val name: String = "divideZero" override val description: String = "divide zero check" - def init(options: List[String]): List[PluginPhase] = + override def initialize(options: List[String])(using Context): List[PluginPhase] = (new DivideZeroPhase) :: Nil class DivideZeroPhase extends PluginPhase: @@ -90,7 +90,7 @@ end DivideZeroPhase ``` The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` -and implement the method `init` that takes the plugin's options as argument +and implement the method `initialize` that takes the plugin's options as argument and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. Our plugin adds one compiler phase to the pipeline. A compiler phase must extend diff --git a/docs/_spec/TODOreference/changed-features/compiler-plugins.md b/docs/_spec/TODOreference/changed-features/compiler-plugins.md index 20bdb7f49836..719e204fc803 100644 --- a/docs/_spec/TODOreference/changed-features/compiler-plugins.md +++ b/docs/_spec/TODOreference/changed-features/compiler-plugins.md @@ -67,7 +67,7 @@ class DivideZero extends StandardPlugin: val name: String = "divideZero" override val description: String = "divide zero check" - def init(options: List[String]): List[PluginPhase] = + override def initialize(options: List[String])(using Context): List[PluginPhase] = (new DivideZeroPhase) :: Nil class DivideZeroPhase extends PluginPhase: @@ -90,7 +90,7 @@ end DivideZeroPhase ``` The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` -and implement the method `init` that takes the plugin's options as argument +and implement the method `initialize` that takes the plugin's options as argument and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. Our plugin adds one compiler phase to the pipeline. A compiler phase must extend diff --git a/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala b/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala index c1fab5c13f42..01aa57d7a971 100644 --- a/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala +++ b/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala @@ -21,7 +21,7 @@ class InitPlugin extends StandardPlugin { val name: String = "initPlugin" override val description: String = "checks that under -Yretain-trees we may get tree for all symbols" - def init(options: List[String]): List[PluginPhase] = + override def initialize(options: List[String])(using Context): List[PluginPhase] = (new SetDefTree) :: (new InitChecker) :: Nil } diff --git a/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala b/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala index c6fac6b796c0..3d1698250e5d 100644 --- a/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala +++ b/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala @@ -22,7 +22,8 @@ class DivideZero extends PluginPhase with StandardPlugin { override val runsAfter = Set(Pickler.name) override val runsBefore = Set(Staging.name) - def init(options: List[String]): List[PluginPhase] = this :: Nil + // We keep using deprecated variant here just to ensure it still works correctly + override def init(options: List[String]): List[PluginPhase] = this :: Nil private def isNumericDivide(sym: Symbol)(implicit ctx: Context): Boolean = { def test(tpe: String): Boolean = diff --git a/tests/plugins/custom/analyzer/Analyzer_1.scala b/tests/plugins/custom/analyzer/Analyzer_1.scala index 0e1cc53290d0..d611972e0e48 100644 --- a/tests/plugins/custom/analyzer/Analyzer_1.scala +++ b/tests/plugins/custom/analyzer/Analyzer_1.scala @@ -52,7 +52,7 @@ class InitChecker extends PluginPhase with StandardPlugin { override val runsAfter = Set(SetDefTree.name) override val runsBefore = Set(FirstTransform.name) - def init(options: List[String]): List[PluginPhase] = this :: (new SetDefTree) :: Nil + override def initialize(options: List[String])(using Context): List[PluginPhase] = this :: (new SetDefTree) :: Nil private def checkDef(tree: Tree)(implicit ctx: Context): Tree = { if (tree.symbol.defTree.isEmpty) diff --git a/tests/plugins/neg/divideZero/plugin_1.scala b/tests/plugins/neg/divideZero/plugin_1.scala index ef8e077fd14d..68b2a8eae478 100644 --- a/tests/plugins/neg/divideZero/plugin_1.scala +++ b/tests/plugins/neg/divideZero/plugin_1.scala @@ -20,7 +20,7 @@ class DivideZero extends PluginPhase with StandardPlugin { override val runsAfter = Set(Pickler.name) override val runsBefore = Set(PickleQuotes.name) - override def init(options: List[String]): List[PluginPhase] = this :: Nil + override def initialize(options: List[String])(using Context): List[PluginPhase] = this :: Nil private def isNumericDivide(sym: Symbol)(implicit ctx: Context): Boolean = { def test(tpe: String): Boolean = From 5fdfb977114c6593f99117b79316012ec2747c19 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 7 Jan 2024 13:22:06 +0100 Subject: [PATCH 339/465] New modularity language import [Cherry-picked 34f17b753ad8dc5fcc038d592a8fc1c748ec62b4] --- .../src/scala/runtime/stdLibPatches/language.scala | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 78755b8df757..e9c480919902 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -96,7 +96,18 @@ object language: * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] */ @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") - object namedTuples + object namedTupleas + + /** Experimental support for new features for better modularity, including + * - better tracking of dependencies through classes + * - better usability of context bounds + * - better syntax and conventions for type classes + * - ability to merge exported types in intersections + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/modularity]] + */ + @compileTimeOnly("`modularity` can only be used at compile time in import statements") + object modularity /** Was needed to add support for relaxed imports of extension methods. * The language import is no longer needed as this is now a standard feature since SIP was accepted. From 813af6907362b8ba06bc9bd4ef9914a4d5804b51 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 18 Nov 2023 15:10:34 +0100 Subject: [PATCH 340/465] Allow vals in using clauses of givens [Cherry-picked 31c9e8a850e3f40dd797dc9e3669dcadb020586d] --- .../dotty/tools/dotc/parsing/Parsers.scala | 25 +++++++++++++------ 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 60b2a2b1d3cf..8d5c50d6d608 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -62,7 +62,7 @@ object Parsers { case ExtensionFollow // extension clause, following extension parameter def isClass = // owner is a class - this == Class || this == CaseClass + this == Class || this == CaseClass || this == Given def takesOnlyUsingClauses = // only using clauses allowed for this owner this == Given || this == ExtensionFollow def acceptsVariance = @@ -3372,7 +3372,7 @@ object Parsers { val isAbstractOwner = paramOwner == ParamOwner.Type || paramOwner == ParamOwner.TypeParam val start = in.offset var mods = annotsAsMods() | Param - if paramOwner == ParamOwner.Class || paramOwner == ParamOwner.CaseClass then + if paramOwner.isClass then mods |= PrivateLocal if isIdent(nme.raw.PLUS) && checkVarianceOK() then mods |= Covariant @@ -4100,6 +4100,14 @@ object Parsers { val nameStart = in.offset val name = if isIdent && followingIsGivenSig() then ident() else EmptyTermName + // TODO Change syntax description + def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = + paramss.nestedMap: param => + if !param.mods.isAllOf(PrivateLocal) then + syntaxError(em"method parameter ${param.name} may not be `a val`", param.span) + param.withMods(param.mods &~ (AccessFlags | ParamAccessor | Mutable) | Param) + .asInstanceOf[List[ParamClause]] + val gdef = val tparams = typeParamClauseOpt(ParamOwner.Given) newLineOpt() @@ -4121,16 +4129,17 @@ object Parsers { mods1 |= Lazy ValDef(name, parents.head, subExpr()) else - DefDef(name, joinParams(tparams, vparamss), parents.head, subExpr()) + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) else if (isStatSep || isStatSeqEnd) && parentsIsType then if name.isEmpty then syntaxError(em"anonymous given cannot be abstract") - DefDef(name, joinParams(tparams, vparamss), parents.head, EmptyTree) + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else - val tparams1 = tparams.map(tparam => tparam.withMods(tparam.mods | PrivateLocal)) - val vparamss1 = vparamss.map(_.map(vparam => - vparam.withMods(vparam.mods &~ Param | ParamAccessor | Protected))) - val constr = makeConstructor(tparams1, vparamss1) + val vparamss1 = vparamss.nestedMap: vparam => + if vparam.mods.is(Private) + then vparam.withMods(vparam.mods &~ PrivateLocal | Protected) + else vparam + val constr = makeConstructor(tparams, vparamss1) val templ = if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil) else withTemplate(constr, parents) From b5d48fda4954567d5a0851723213ffdb8d4cd844 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Thu, 9 May 2024 17:06:45 +0200 Subject: [PATCH 341/465] A relaxation concerning exported type aliases The rules for export forwarders are changed as follows. Previously, all export forwarders were declared `final`. Now, only term members are declared `final`. Type aliases left aside. This makes it possible to export the same type member into several traits and then mix these traits in the same class. `typeclass-aggregates.scala` shows why this is essential to be able to combine multiple givens with type members. The change does not lose safety since different type aliases would in any case lead to uninstantiatable classes. [Cherry-picked 84655ca3409c3ec2c1645b0c8f56ff7d17cc304d][modified] --- .../src/dotty/tools/dotc/config/Feature.scala | 1 + .../src/dotty/tools/dotc/core/Flags.scala | 2 - .../src/dotty/tools/dotc/typer/Namer.scala | 6 ++- .../reference/other-new-features/export.md | 16 +++++-- tests/neg/i0248-inherit-refined.check | 12 +++++ tests/pos/typeclass-aggregates.scala | 47 +++++++++++++++++++ 6 files changed, 77 insertions(+), 7 deletions(-) create mode 100644 tests/neg/i0248-inherit-refined.check create mode 100644 tests/pos/typeclass-aggregates.scala diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 5c27f20fcba1..0d551094da4d 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -34,6 +34,7 @@ object Feature: val captureChecking = experimental("captureChecking") val into = experimental("into") val namedTuples = experimental("namedTuples") + val modularity = experimental("modularity") val betterMatchTypeExtractors = experimental("betterMatchTypeExtractors") def experimentalAutoEnableFeatures(using Context): List[TermName] = diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 8110bc769d4f..98c57a96a5c0 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -543,8 +543,6 @@ object Flags { /** Flags retained in type export forwarders */ val RetainedExportTypeFlags = Infix - val MandatoryExportTypeFlags = Exported | Final - /** Flags that apply only to classes */ val ClassOnlyFlags = Sealed | Open | Abstract.toTypeFlags diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 72ca6a35bf4b..d2121ede2a67 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -26,7 +26,7 @@ import Nullables.* import transform.ValueClasses.* import TypeErasure.erasure import reporting.* -import config.Feature.sourceVersion +import config.Feature.{sourceVersion, modularity} import config.SourceVersion.* import scala.compiletime.uninitialized @@ -1203,7 +1203,9 @@ class Namer { typer: Typer => target = target.etaExpand newSymbol( cls, forwarderName, - MandatoryExportTypeFlags | (sym.flags & RetainedExportTypeFlags), + Exported + | (sym.flags & RetainedExportTypeFlags) + | (if Feature.enabled(modularity) then EmptyFlags else Final), TypeAlias(target), coord = span) // Note: This will always create unparameterzied aliases. So even if the original type is diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md index 98e9a7d3d711..e21d369b6b5e 100644 --- a/docs/_docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -37,7 +37,12 @@ final def print(bits: BitMap): Unit = printUnit.print(bits) final type PrinterType = printUnit.PrinterType ``` -They can be accessed inside `Copier` as well as from outside: +With the experimental `modularity` language import, only exported methods and values are final, whereas the generated `PrinterType` would be a simple type alias +```scala + type PrinterType = printUnit.PrinterType +``` + +These aliases can be accessed inside `Copier` as well as from outside: ```scala val copier = new Copier @@ -90,12 +95,17 @@ export O.* ``` Export aliases copy the type and value parameters of the members they refer to. -Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: +Export aliases of term members are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: - - Export aliases cannot be overridden, since they are final. + - Export aliases of methods or fields cannot be overridden, since they are final. - Export aliases cannot override concrete members in base classes, since they are not marked `override`. - However, export aliases can implement deferred members of base classes. + - Export type aliases are normally also final, except when the experimental + language import `modularity` is present. The general + rules for type aliases ensure in any case that if there are several type aliases in a class, + they must agree on their right hand sides, or the class could not be instantiated. + So dropping the `final` for export type aliases is safe. Export aliases for public value definitions that are accessed without referring to private values in the qualifier path diff --git a/tests/neg/i0248-inherit-refined.check b/tests/neg/i0248-inherit-refined.check new file mode 100644 index 000000000000..4e14c3c6f14b --- /dev/null +++ b/tests/neg/i0248-inherit-refined.check @@ -0,0 +1,12 @@ +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:8:18 ------------------------------------------------------- +8 | class C extends Y // error + | ^ + | test.A & test.B is not a class type + | + | longer explanation available when compiling with `-explain` +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:10:18 ------------------------------------------------------ +10 | class D extends Z // error + | ^ + | test.A | test.B is not a class type + | + | longer explanation available when compiling with `-explain` diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala new file mode 100644 index 000000000000..77b0f1a9f04a --- /dev/null +++ b/tests/pos/typeclass-aggregates.scala @@ -0,0 +1,47 @@ +//> using options -source future -language:experimental.modularity +trait Ord: + type This + extension (x: This) + def compareTo(y: This): Int + def < (y: This): Boolean = compareTo(y) < 0 + def > (y: This): Boolean = compareTo(y) > 0 + + trait OrdProxy extends Ord: + export Ord.this.* + +trait SemiGroup: + type This + extension (x: This) def combine(y: This): This + + trait SemiGroupProxy extends SemiGroup: + export SemiGroup.this.* + +trait Monoid extends SemiGroup: + def unit: This + + trait MonoidProxy extends Monoid: + export Monoid.this.* + +def ordWithMonoid(ord: Ord, monoid: Monoid{ type This = ord.This }): Ord & Monoid = + new ord.OrdProxy with monoid.MonoidProxy {} + +trait OrdWithMonoid extends Ord, Monoid + +def ordWithMonoid2(ord: Ord, monoid: Monoid{ type This = ord.This }) = //: OrdWithMonoid { type This = ord.This} = + new OrdWithMonoid with ord.OrdProxy with monoid.MonoidProxy {} + +given intOrd: Ord { type This = Int } = ??? +given intMonoid: Monoid { type This = Int } = ??? + +//given (using ord: Ord, monoid: Monoid{ type This = ord.This }): (Ord & Monoid { type This = ord.This}) = +// ordWithMonoid2(ord, monoid) + +val x = summon[Ord & Monoid { type This = Int}] +val y: Int = ??? : x.This + +// given [A, B](using ord: A is Ord, monoid: A is Monoid) => A is Ord & Monoid = +// new ord.OrdProxy with monoid.MonoidProxy {} + +given [A](using ord: Ord { type This = A }, monoid: Monoid { type This = A}): (Ord & Monoid) { type This = A} = + new ord.OrdProxy with monoid.MonoidProxy {} + From 48e2aa7329d85f754f2d4aaec3d7ea638f3fd83d Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 13 Dec 2023 10:54:15 +0100 Subject: [PATCH 342/465] Allow class parents to be refined types. Refinements of a class parent are added as synthetic members to the inheriting class. [Cherry-picked 48944142182932b0bb1f97d7261d6033aa96888a] --- .../src/dotty/tools/dotc/core/NamerOps.scala | 21 +++++ .../tools/dotc/core/tasty/TreeUnpickler.scala | 2 +- .../tools/dotc/transform/init/Util.scala | 1 + .../src/dotty/tools/dotc/typer/Namer.scala | 37 +++++++-- .../src/dotty/tools/dotc/typer/Typer.scala | 30 +++++-- tests/neg/i0248-inherit-refined.scala | 6 +- tests/neg/parent-refinement-access.check | 7 ++ tests/neg/parent-refinement-access.scala | 6 ++ tests/neg/parent-refinement.check | 29 ++++++- tests/neg/parent-refinement.scala | 20 ++++- tests/pos/parent-refinement.scala | 48 +++++++++++ tests/pos/typeclasses.scala | 79 ++++--------------- 12 files changed, 200 insertions(+), 86 deletions(-) create mode 100644 tests/neg/parent-refinement-access.check create mode 100644 tests/neg/parent-refinement-access.scala create mode 100644 tests/pos/parent-refinement.scala diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 75a135826785..8d096913e285 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -5,6 +5,7 @@ package core import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.* import SymDenotations.{LazyType, SymDenotation}, StdNames.nme import TypeApplications.EtaExpansion +import collection.mutable /** Operations that are shared between Namer and TreeUnpickler */ object NamerOps: @@ -18,6 +19,26 @@ object NamerOps: case TypeSymbols(tparams) :: _ => ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)) case _ => ctor.owner.typeRef + /** Split dependent class refinements off parent type. Add them to `refinements`, + * unless it is null. + */ + extension (tp: Type) + def separateRefinements(cls: ClassSymbol, refinements: mutable.LinkedHashMap[Name, Type] | Null)(using Context): Type = + tp match + case RefinedType(tp1, rname, rinfo) => + try tp1.separateRefinements(cls, refinements) + finally + if refinements != null then + refinements(rname) = refinements.get(rname) match + case Some(tp) => tp & rinfo + case None => rinfo + case tp @ AnnotatedType(tp1, ann) => + tp.derivedAnnotatedType(tp1.separateRefinements(cls, refinements), ann) + case tp: RecType => + tp.parent.substRecThis(tp, cls.thisType).separateRefinements(cls, refinements) + case tp => + tp + /** If isConstructor, make sure it has at least one non-implicit parameter list * This is done by adding a () in front of a leading old style implicit parameter, * or by adding a () as last -- or only -- parameter list if the constructor has diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 04d19f2f8821..f6fa9faf0114 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1074,7 +1074,7 @@ class TreeUnpickler(reader: TastyReader, } val parentReader = fork val parents = readParents(withArgs = false)(using parentCtx) - val parentTypes = parents.map(_.tpe.dealias) + val parentTypes = parents.map(_.tpe.dealiasKeepAnnots.separateRefinements(cls, null)) if cls.is(JavaDefined) && parentTypes.exists(_.derivesFrom(defn.JavaAnnotationClass)) then cls.setFlag(JavaAnnotation) val self = diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala index 756fd1a0a8e7..e11d0e1e21a5 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -20,6 +20,7 @@ object Util: def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match case tref: TypeRef => tref + case RefinedType(parent, _, _) => typeRefOf(parent) case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index d2121ede2a67..530423fd2613 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -55,11 +55,12 @@ class Namer { typer: Typer => import untpd.* - val TypedAhead : Property.Key[tpd.Tree] = new Property.Key - val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key - val ExportForwarders: Property.Key[List[tpd.MemberDef]] = new Property.Key - val SymOfTree : Property.Key[Symbol] = new Property.Key - val AttachedDeriver : Property.Key[Deriver] = new Property.Key + val TypedAhead : Property.Key[tpd.Tree] = new Property.Key + val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key + val ExportForwarders : Property.Key[List[tpd.MemberDef]] = new Property.Key + val ParentRefinements: Property.Key[List[Symbol]] = new Property.Key + val SymOfTree : Property.Key[Symbol] = new Property.Key + val AttachedDeriver : Property.Key[Deriver] = new Property.Key // was `val Deriver`, but that gave shadowing problems with constructor proxies /** A partial map from unexpanded member and pattern defs and to their expansions. @@ -1515,6 +1516,7 @@ class Namer { typer: Typer => /** The type signature of a ClassDef with given symbol */ override def completeInCreationContext(denot: SymDenotation): Unit = { val parents = impl.parents + val parentRefinements = new mutable.LinkedHashMap[Name, Type] /* The type of a parent constructor. Types constructor arguments * only if parent type contains uninstantiated type parameters. @@ -1569,8 +1571,13 @@ class Namer { typer: Typer => val ptype = parentType(parent)(using completerCtx.superCallContext).dealiasKeepAnnots if (cls.isRefinementClass) ptype else { - val pt = checkClassType(ptype, parent.srcPos, - traitReq = parent ne parents.head, stablePrefixReq = !isJava) + val pt = checkClassType( + if Feature.enabled(modularity) + then ptype.separateRefinements(cls, parentRefinements) + else ptype, + parent.srcPos, + traitReq = parent ne parents.head, + stablePrefixReq = !isJava) if (pt.derivesFrom(cls)) { val addendum = parent match { case Select(qual: Super, _) if Feature.migrateTo3 => @@ -1597,6 +1604,21 @@ class Namer { typer: Typer => } } + /** Enter all parent refinements as public class members, unless a definition + * with the same name already exists in the class. + */ + def enterParentRefinementSyms(refinements: List[(Name, Type)]) = + val refinedSyms = mutable.ListBuffer[Symbol]() + for (name, tp) <- refinements do + if decls.lookupEntry(name) == null then + val flags = tp match + case tp: MethodOrPoly => Method | Synthetic | Deferred + case _ => Synthetic | Deferred + refinedSyms += newSymbol(cls, name, flags, tp, coord = original.rhs.span.startPos).entered + if refinedSyms.nonEmpty then + typr.println(i"parent refinement symbols: ${refinedSyms.toList}") + original.pushAttachment(ParentRefinements, refinedSyms.toList) + /** If `parents` contains references to traits that have supertraits with implicit parameters * add those supertraits in linearization order unless they are already covered by other * parent types. For instance, in @@ -1667,6 +1689,7 @@ class Namer { typer: Typer => cls.invalidateMemberCaches() // we might have checked for a member when parents were not known yet. cls.setNoInitsFlags(parentsKind(parents), untpd.bodyKind(rest)) cls.setStableConstructor() + enterParentRefinementSyms(parentRefinements.toList) processExports(using localCtx) defn.patchStdLibClass(cls) addConstructorProxies(cls) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 46982cf1406d..c5b6faf455f7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -40,8 +40,7 @@ import annotation.tailrec import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} -import config.Feature -import config.Feature.{sourceVersion, migrateTo3} +import config.Feature, Feature.{sourceVersion, migrateTo3, modularity} import config.SourceVersion.* import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel @@ -1004,10 +1003,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tp.exists && !tp.typeSymbol.is(Final) && (!tp.isTopType || tp.isAnyRef) // Object is the only toplevel class that can be instantiated - if (templ1.parents.isEmpty && - isFullyDefined(pt, ForceDegree.flipBottom) && - isSkolemFree(pt) && - isEligible(pt.underlyingClassRef(refinementOK = false))) + if templ1.parents.isEmpty + && isFullyDefined(pt, ForceDegree.flipBottom) + && isSkolemFree(pt) + && isEligible(pt.underlyingClassRef(refinementOK = Feature.enabled(modularity))) + then templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) for case parent: RefTree <- templ1.parents do typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) @@ -2871,6 +2871,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } + /** Add all parent refinement symbols as declarations to this class */ + def addParentRefinements(body: List[Tree])(using Context): List[Tree] = + cdef.getAttachment(ParentRefinements) match + case Some(refinedSyms) => + val refinements = refinedSyms.map: sym => + ( if sym.isType then TypeDef(sym.asType) + else if sym.is(Method) then DefDef(sym.asTerm) + else ValDef(sym.asTerm) + ).withSpan(impl.span.startPos) + body ++ refinements + case None => + body + ensureCorrectSuperClass() completeAnnotations(cdef, cls) val constr1 = typed(constr).asInstanceOf[DefDef] @@ -2891,7 +2904,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cdef.withType(UnspecifiedErrorType) else { val dummy = localDummy(cls, impl) - val body1 = addAccessorDefs(cls, typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1) + val body1 = + addParentRefinements( + addAccessorDefs(cls, + typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1)) checkNoDoubleDeclaration(cls) val impl1 = cpy.Template(impl)(constr1, parents1, Nil, self1, body1) diff --git a/tests/neg/i0248-inherit-refined.scala b/tests/neg/i0248-inherit-refined.scala index 97b6f5cdab73..f7cd6375afc9 100644 --- a/tests/neg/i0248-inherit-refined.scala +++ b/tests/neg/i0248-inherit-refined.scala @@ -1,10 +1,12 @@ +//> using options -source future -language:experimental.modularity + object test { class A { type T } type X = A { type T = Int } - class B extends X // error + class B extends X // was error, now OK type Y = A & B class C extends Y // error type Z = A | B class D extends Z // error - abstract class E extends ({ val x: Int }) // error + abstract class E extends ({ val x: Int }) // was error, now OK } diff --git a/tests/neg/parent-refinement-access.check b/tests/neg/parent-refinement-access.check new file mode 100644 index 000000000000..5cde9d51558f --- /dev/null +++ b/tests/neg/parent-refinement-access.check @@ -0,0 +1,7 @@ +-- [E164] Declaration Error: tests/neg/parent-refinement-access.scala:6:6 ---------------------------------------------- +6 |trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error + | ^ + | error overriding value x in trait Year2 of type Int; + | value x in trait Gen of type Any has weaker access privileges; it should be public + | (Note that value x in trait Year2 of type Int is abstract, + | and is therefore overridden by concrete value x in trait Gen of type Any) diff --git a/tests/neg/parent-refinement-access.scala b/tests/neg/parent-refinement-access.scala new file mode 100644 index 000000000000..57d45f4fb201 --- /dev/null +++ b/tests/neg/parent-refinement-access.scala @@ -0,0 +1,6 @@ +//> using options -source future -language:experimental.modularity + +trait Gen: + private[Gen] val x: Any = () + +trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error diff --git a/tests/neg/parent-refinement.check b/tests/neg/parent-refinement.check index 550430bd35a7..cf9a57bc7821 100644 --- a/tests/neg/parent-refinement.check +++ b/tests/neg/parent-refinement.check @@ -1,4 +1,25 @@ --- Error: tests/neg/parent-refinement.scala:5:2 ------------------------------------------------------------------------ -5 | with Ordered[Year] { // error - | ^^^^ - | end of toplevel definition expected but 'with' found +-- Error: tests/neg/parent-refinement.scala:11:6 ----------------------------------------------------------------------- +11 |class Bar extends IdOf[Int], (X { type Value = String }) // error + | ^^^ + |class Bar cannot be instantiated since it has a member Value with possibly conflicting bounds Int | String <: ... <: Int & String +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:15:17 ------------------------------------------------- +15 | val x: Value = 0 // error + | ^ + | Found: (0 : Int) + | Required: Baz.this.Value + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:21:6 -------------------------------------------------- +21 | foo(2) // error + | ^ + | Found: (2 : Int) + | Required: Boolean + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:17:22 ------------------------------------------------- +17 |val x: IdOf[Int] = Baz() // error + | ^^^^^ + | Found: Baz + | Required: IdOf[Int] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/parent-refinement.scala b/tests/neg/parent-refinement.scala index ca2b88a75fd8..868747faba57 100644 --- a/tests/neg/parent-refinement.scala +++ b/tests/neg/parent-refinement.scala @@ -1,7 +1,21 @@ +//> using options -source future -language:experimental.modularity trait Id { type Value } +trait X { type Value } +type IdOf[T] = Id { type Value = T } + case class Year(value: Int) extends AnyVal - with Id { type Value = Int } - with Ordered[Year] { // error + with (Id { type Value = Int }) + with Ordered[Year] + +class Bar extends IdOf[Int], (X { type Value = String }) // error + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = 0 // error + +val x: IdOf[Int] = Baz() // error -} \ No newline at end of file +object Clash extends ({ def foo(x: Int): Int }): + def foo(x: Boolean): Int = 1 + foo(2) // error diff --git a/tests/pos/parent-refinement.scala b/tests/pos/parent-refinement.scala new file mode 100644 index 000000000000..eaa74228c5d6 --- /dev/null +++ b/tests/pos/parent-refinement.scala @@ -0,0 +1,48 @@ +//> using options -source future -language:experimental.modularity + +class A +class B extends A +class C extends B + +trait Id { type Value } +type IdOf[T] = Id { type Value = T } +trait X { type Value } + +case class Year(value: Int) extends IdOf[Int]: + val x: Value = 2 + +type Between[Lo, Hi] = X { type Value >: Lo <: Hi } + +class Foo() extends IdOf[B], Between[C, A]: + val x: Value = B() + +trait Bar extends IdOf[Int], (X { type Value = String }) + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = "" + +trait Gen: + type T + val x: T + +type IntInst = Gen: + type T = Int + val x: 0 + +trait IntInstTrait extends IntInst + +abstract class IntInstClass extends IntInstTrait, IntInst + +object obj1 extends IntInstTrait: + val x = 0 + +object obj2 extends IntInstClass: + val x = 0 + +def main = + val x: obj1.T = 2 - obj2.x + val y: obj2.T = 2 - obj1.x + + + diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala index 07fe5a31ce5d..2bf7f76f0804 100644 --- a/tests/pos/typeclasses.scala +++ b/tests/pos/typeclasses.scala @@ -1,7 +1,6 @@ -class Common: +//> using options -source future -language:experimental.modularity - // this should go in Predef - infix type at [A <: { type This}, B] = A { type This = B } +class Common: trait Ord: type This @@ -26,41 +25,23 @@ class Common: extension [A](x: This[A]) def flatMap[B](f: A => This[B]): This[B] def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + infix type is[A <: AnyKind, B <: {type This <: AnyKind}] = B { type This = A } + end Common object Instances extends Common: -/* - instance Int: Ord as intOrd with - extension (x: Int) - def compareTo(y: Int) = - if x < y then -1 - else if x > y then +1 - else 0 -*/ - given intOrd: Ord with + given intOrd: (Int is Ord) with type This = Int extension (x: Int) def compareTo(y: Int) = if x < y then -1 else if x > y then +1 else 0 -/* - instance List[T: Ord]: Ord as listOrd with - extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match - case (Nil, Nil) => 0 - case (Nil, _) => -1 - case (_, Nil) => +1 - case (x :: xs1, y :: ys1) => - val fst = x.compareTo(y) - if (fst != 0) fst else xs1.compareTo(ys1) -*/ - // Proposed short syntax: - // given listOrd[T: Ord as ord]: Ord at T with - given listOrd[T](using ord: Ord { type This = T}): Ord with - type This = List[T] + given listOrd[T](using ord: T is Ord): (List[T] is Ord) with extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -70,32 +51,18 @@ object Instances extends Common: if (fst != 0) fst else xs1.compareTo(ys1) end listOrd -/* - instance List: Monad as listMonad with + given listMonad: (List is Monad) with extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = List(x) -*/ - given listMonad: Monad with - type This[A] = List[A] - extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = - xs.flatMap(f) - def pure[A](x: A): List[A] = - List(x) -/* - type Reader[Ctx] = X =>> Ctx => X - instance Reader[Ctx: _]: Monad as readerMonad with - extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = - ctx => f(r(ctx))(ctx) - def pure[A](x: A): Ctx => A = - ctx => x -*/ + type Reader[Ctx] = [X] =>> Ctx => X - given readerMonad[Ctx]: Monad with - type This[X] = Ctx => X + //given [Ctx] => Reader[Ctx] is Monad as readerMonad: + + given readerMonad[Ctx]: (Reader[Ctx] is Monad) with extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -110,29 +77,17 @@ object Instances extends Common: def second = xs.tail.head def third = xs.tail.tail.head - //Proposed short syntax: - //extension [M: Monad as m, A](xss: M[M[A]]) - // def flatten: M[A] = - // xs.flatMap(identity) - extension [M, A](using m: Monad)(xss: m.This[m.This[A]]) def flatten: m.This[A] = xss.flatMap(identity) - // Proposed short syntax: - //def maximum[T: Ord](xs: List[T]: T = - def maximum[T](xs: List[T])(using Ord at T): T = + def maximum[T](xs: List[T])(using T is Ord): T = xs.reduceLeft((x, y) => if (x < y) y else x) - // Proposed short syntax: - // def descending[T: Ord as asc]: Ord at T = new Ord: - def descending[T](using asc: Ord at T): Ord at T = new Ord: - type This = T + def descending[T](using asc: T is Ord): T is Ord = new: extension (x: T) def compareTo(y: T) = asc.compareTo(y)(x) - // Proposed short syntax: - // def minimum[T: Ord](xs: List[T]) = - def minimum[T](xs: List[T])(using Ord at T) = + def minimum[T](xs: List[T])(using T is Ord) = maximum(xs)(using descending) def test(): Unit = @@ -177,10 +132,10 @@ instance Sheep: Animal with override def talk(): Unit = println(s"$name pauses briefly... $noise") */ +import Instances.is // Implement the `Animal` trait for `Sheep`. -given Animal with - type This = Sheep +given (Sheep is Animal) with def apply(name: String) = Sheep(name) extension (self: This) def name: String = self.name From 96c76e91ff4a9a43e020baf84fd40c7ffdafa387 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 19:11:11 +0200 Subject: [PATCH 343/465] Introduce tracked class parameters For a tracked class parameter we add a refinement in the constructor type that the class member is the same as the parameter. E.g. ```scala class C { type T } class D(tracked val x: C) { type T = x.T } ``` This will generate the constructor type: ```scala (x1: C): D { val x: x1.type } ``` Without `tracked` the refinement would not be added. This can solve several problems with dependent class types where previously we lost track of type dependencies. [Cherry-picked 5189e6854ad1dacc3454542c2f124f5bcb7e2a9c] --- .../src/dotty/tools/dotc/ast/Desugar.scala | 13 +- compiler/src/dotty/tools/dotc/ast/untpd.scala | 2 + .../src/dotty/tools/dotc/core/Flags.scala | 9 +- .../src/dotty/tools/dotc/core/NamerOps.scala | 17 +- .../dotc/core/PatternTypeConstrainer.scala | 9 +- .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../tools/dotc/core/SymDenotations.scala | 12 +- .../src/dotty/tools/dotc/core/TypeUtils.scala | 15 +- .../tools/dotc/core/tasty/TreePickler.scala | 1 + .../tools/dotc/core/tasty/TreeUnpickler.scala | 6 +- .../dotty/tools/dotc/parsing/Parsers.scala | 15 +- .../tools/dotc/printing/PlainPrinter.scala | 2 +- .../dotty/tools/dotc/printing/Printer.scala | 5 +- .../tools/dotc/transform/PostTyper.scala | 16 +- .../src/dotty/tools/dotc/typer/Checking.scala | 13 +- .../src/dotty/tools/dotc/typer/Namer.scala | 55 +++-- .../dotty/tools/dotc/typer/RefChecks.scala | 17 +- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- .../test/dotc/pos-test-pickling.blacklist | 5 + docs/_docs/internals/syntax.md | 2 +- .../reference/experimental/modularity.md | 189 ++++++++++++++++++ docs/sidebar.yml | 1 + project/MiMaFilters.scala | 3 + tasty/src/dotty/tools/tasty/TastyFormat.scala | 5 +- tests/neg/i3964.scala | 12 ++ tests/neg/tracked.check | 50 +++++ tests/neg/tracked.scala | 20 ++ tests/neg/tracked2.scala | 1 + tests/new/tracked-mixin-traits.scala | 16 ++ tests/pos/depclass-1.scala | 19 ++ tests/pos/i3920.scala | 32 +++ tests/pos/i3964.scala | 32 +++ tests/pos/i3964a/Defs_1.scala | 18 ++ tests/pos/i3964a/Uses_2.scala | 16 ++ tests/pos/parsercombinators-expanded.scala | 64 ++++++ tests/pos/parsercombinators-givens-2.scala | 52 +++++ tests/pos/parsercombinators-givens.scala | 54 +++++ tests/run/i3920.scala | 26 +++ 38 files changed, 758 insertions(+), 69 deletions(-) create mode 100644 docs/_docs/reference/experimental/modularity.md create mode 100644 tests/neg/i3964.scala create mode 100644 tests/neg/tracked.check create mode 100644 tests/neg/tracked.scala create mode 100644 tests/neg/tracked2.scala create mode 100644 tests/new/tracked-mixin-traits.scala create mode 100644 tests/pos/depclass-1.scala create mode 100644 tests/pos/i3920.scala create mode 100644 tests/pos/i3964.scala create mode 100644 tests/pos/i3964a/Defs_1.scala create mode 100644 tests/pos/i3964a/Uses_2.scala create mode 100644 tests/pos/parsercombinators-expanded.scala create mode 100644 tests/pos/parsercombinators-givens-2.scala create mode 100644 tests/pos/parsercombinators-givens.scala create mode 100644 tests/run/i3920.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 1801a7fada7c..c3a0c05088cb 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -429,13 +429,13 @@ object desugar { private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { var mods = tparam.rawMods if (!keepAnnotations) mods = mods.withAnnotations(Nil) - tparam.withMods(mods & (EmptyFlags | Sealed) | Param) + tparam.withMods(mods & EmptyFlags | Param) } private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { var mods = vparam.rawMods if (!keepAnnotations) mods = mods.withAnnotations(Nil) val hasDefault = if keepDefault then HasDefault else EmptyFlags - vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) + vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = @@ -860,9 +860,8 @@ object desugar { // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. DefDef( - className.toTermName, joinParams(constrTparams, defParamss), - classTypeRef, creatorExpr) - .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) + className.toTermName, joinParams(constrTparams, defParamss), classTypeRef, creatorExpr + ) .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) .withSpan(cdef.span) :: Nil } @@ -890,7 +889,9 @@ object desugar { } if mods.isAllOf(Given | Inline | Transparent) then report.error("inline given instances cannot be trasparent", cdef) - val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + var classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + if vparamAccessors.exists(_.mods.is(Tracked)) then + classMods |= Dependent cpy.TypeDef(cdef: TypeDef)( name = className, rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 0dfe52c421d9..91ef462bcf05 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -230,6 +230,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) + case class Tracked()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Tracked) + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) } diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 98c57a96a5c0..2bc7610bb0ce 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -377,6 +377,9 @@ object Flags { /** Symbol cannot be found as a member during typer */ val (Invisible @ _, _, _) = newFlags(45, "") + /** Tracked modifier for class parameter / a class with some tracked parameters */ + val (Tracked @ _, _, Dependent @ _) = newFlags(46, "tracked") + // ------------ Flags following this one are not pickled ---------------------------------- /** Symbol is not a member of its owner */ @@ -452,7 +455,7 @@ object Flags { CommonSourceModifierFlags.toTypeFlags | Abstract | Sealed | Opaque | Open val TermSourceModifierFlags: FlagSet = - CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy + CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy | Tracked /** Flags representing modifiers that can appear in trees */ val ModifierFlags: FlagSet = @@ -466,7 +469,7 @@ object Flags { val FromStartFlags: FlagSet = commonFlags( Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessor, Scala2SpecialFlags, MutableOrOpen, Opaque, Touched, JavaStatic, - OuterOrCovariant, LabelOrContravariant, CaseAccessor, + OuterOrCovariant, LabelOrContravariant, CaseAccessor, Tracked, Extension, NonMember, Implicit, Given, Permanent, Synthetic, Exported, SuperParamAliasOrScala2x, Inline, Macro, ConstructorProxy, Invisible) @@ -477,7 +480,7 @@ object Flags { */ val AfterLoadFlags: FlagSet = commonFlags( FromStartFlags, AccessFlags, Final, AccessorOrSealed, - Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent) + Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent, Tracked) /** A value that's unstable unless complemented with a Stable flag */ val UnstableValueFlags: FlagSet = Mutable | Method diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 8d096913e285..af03573da4a8 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -16,8 +16,21 @@ object NamerOps: */ def effectiveResultType(ctor: Symbol, paramss: List[List[Symbol]])(using Context): Type = paramss match - case TypeSymbols(tparams) :: _ => ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)) - case _ => ctor.owner.typeRef + case TypeSymbols(tparams) :: rest => + addParamRefinements(ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)), rest) + case _ => + addParamRefinements(ctor.owner.typeRef, paramss) + + /** Given a method with tracked term-parameters `p1, ..., pn`, and result type `R`, add the + * refinements R { p1 = p1' } ... { pn = pn' }, where pi' is the term parameter ref + * of the parameter and pi is its name. This matters only under experimental.modularity, + * since wothout it there are no tracked parameters. Parameter refinements are added for + * constructors and given companion methods. + */ + def addParamRefinements(resType: Type, paramss: List[List[Symbol]])(using Context): Type = + paramss.flatten.foldLeft(resType): (rt, param) => + if param.is(Tracked) then RefinedType(rt, param.name, param.termRef) + else rt /** Split dependent class refinements off parent type. Add them to `refinements`, * unless it is null. diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 6d6a47cf6a1e..9baf0c40a80b 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -88,11 +88,6 @@ trait PatternTypeConstrainer { self: TypeComparer => } } - def stripRefinement(tp: Type): Type = tp match { - case tp: RefinedOrRecType => stripRefinement(tp.parent) - case tp => tp - } - def tryConstrainSimplePatternType(pat: Type, scrut: Type) = { val patCls = pat.classSymbol val scrCls = scrut.classSymbol @@ -182,14 +177,14 @@ trait PatternTypeConstrainer { self: TypeComparer => case AndType(scrut1, scrut2) => constrainPatternType(pat, scrut1) && constrainPatternType(pat, scrut2) case scrut: RefinedOrRecType => - constrainPatternType(pat, stripRefinement(scrut)) + constrainPatternType(pat, scrut.stripRefinement) case scrut => dealiasDropNonmoduleRefs(pat) match { case OrType(pat1, pat2) => either(constrainPatternType(pat1, scrut), constrainPatternType(pat2, scrut)) case AndType(pat1, pat2) => constrainPatternType(pat1, scrut) && constrainPatternType(pat2, scrut) case pat: RefinedOrRecType => - constrainPatternType(stripRefinement(pat), scrut) + constrainPatternType(pat.stripRefinement, scrut) case pat => tryConstrainSimplePatternType(pat, scrut) || classesMayBeCompatible && constrainUpcasted(scrut) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 62d7afa22ed2..7545cf5c4ba1 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -629,6 +629,7 @@ object StdNames { val toString_ : N = "toString" val toTypeConstructor: N = "toTypeConstructor" val tpe : N = "tpe" + val tracked: N = "tracked" val transparent : N = "transparent" val tree : N = "tree" val true_ : N = "true" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 09d45dbdf06b..49c466f0bfd5 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1187,21 +1187,25 @@ object SymDenotations { final def isExtensibleClass(using Context): Boolean = isClass && !isOneOf(FinalOrModuleClass) && !isAnonymousClass - /** A symbol is effectively final if it cannot be overridden in a subclass */ + /** A symbol is effectively final if it cannot be overridden */ final def isEffectivelyFinal(using Context): Boolean = isOneOf(EffectivelyFinalFlags) || is(Inline, butNot = Deferred) || is(JavaDefinedVal, butNot = Method) || isConstructor - || !owner.isExtensibleClass + || !owner.isExtensibleClass && !is(Deferred) + // Deferred symbols can arise through parent refinements. + // For them, the overriding relationship reverses anyway, so + // being in a final class does not mean the symbol cannot be + // implemented concretely in a superclass. /** A class is effectively sealed if has the `final` or `sealed` modifier, or it * is defined in Scala 3 and is neither abstract nor open. */ final def isEffectivelySealed(using Context): Boolean = isOneOf(FinalOrSealed) - || isClass && (!isOneOf(EffectivelyOpenFlags) - || isLocalToCompilationUnit) + || isClass + && (!isOneOf(EffectivelyOpenFlags) || isLocalToCompilationUnit) final def isLocalToCompilationUnit(using Context): Boolean = is(Private) diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index d4be03e9aae4..dd881bb1adf6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -7,12 +7,13 @@ import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* import Names.{Name, TermName} import Constants.Constant -class TypeUtils { +import Names.Name +class TypeUtils: /** A decorator that provides methods on types * that are needed in the transformer pipeline. */ - extension (self: Type) { + extension (self: Type) def isErasedValueType(using Context): Boolean = self.isInstanceOf[ErasedValueType] @@ -178,5 +179,11 @@ class TypeUtils { def isThisTypeOf(cls: Symbol)(using Context) = self match case self: Types.ThisType => self.cls == cls case _ => false - } -} + + /** Strip all outer refinements off this type */ + def stripRefinement: Type = self match + case self: RefinedOrRecType => self.parent.stripRefinement + case seld => self + +end TypeUtils + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 186e039c4d74..8d1eca8fb5f0 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -867,6 +867,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { if (flags.is(Exported)) writeModTag(EXPORTED) if (flags.is(Given)) writeModTag(GIVEN) if (flags.is(Implicit)) writeModTag(IMPLICIT) + if (flags.is(Tracked)) writeModTag(TRACKED) if (isTerm) { if (flags.is(Lazy, butNot = Module)) writeModTag(LAZY) if (flags.is(AbsOverride)) { writeModTag(ABSTRACT); writeModTag(OVERRIDE) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index f6fa9faf0114..15f58956fbe3 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -31,7 +31,8 @@ import util.{SourceFile, Property} import ast.{Trees, tpd, untpd} import Trees.* import Decorators.* -import dotty.tools.dotc.quoted.QuotePatterns +import config.Feature +import quoted.QuotePatterns import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer.* @@ -755,6 +756,7 @@ class TreeUnpickler(reader: TastyReader, case INVISIBLE => addFlag(Invisible) case TRANSPARENT => addFlag(Transparent) case INFIX => addFlag(Infix) + case TRACKED => addFlag(Tracked) case PRIVATEqualified => readByte() privateWithin = readWithin @@ -922,6 +924,8 @@ class TreeUnpickler(reader: TastyReader, val resType = if name == nme.CONSTRUCTOR then effectiveResultType(sym, paramss) + else if sym.isAllOf(Given | Method) && Feature.enabled(Feature.modularity) then + addParamRefinements(tpt.tpe, paramss) else tpt.tpe sym.info = methodType(paramss, resType) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 8d5c50d6d608..94814457523e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3189,6 +3189,7 @@ object Parsers { case nme.open => Mod.Open() case nme.transparent => Mod.Transparent() case nme.infix => Mod.Infix() + case nme.tracked => Mod.Tracked() } } @@ -3255,7 +3256,8 @@ object Parsers { * | AccessModifier * | override * | opaque - * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | inline | transparent | infix | erased + * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | erased | + * inline | transparent | infix */ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = { @tailrec @@ -3408,8 +3410,8 @@ object Parsers { /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause * UsingClsTermParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ * ClsParams ::= ClsParam {‘,’ ClsParam} - * ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’)] Param - * + * ClsParam ::= {Annotation} + * [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param * TypelessClause ::= DefTermParamClause * | UsingParamClause * @@ -3445,6 +3447,8 @@ object Parsers { if isErasedKw then mods = addModifier(mods) if paramOwner.isClass then + if isIdent(nme.tracked) && in.featureEnabled(Feature.modularity) && !in.lookahead.isColon then + mods = addModifier(mods) mods = addFlag(modifiers(start = mods), ParamAccessor) mods = if in.token == VAL then @@ -3516,7 +3520,8 @@ object Parsers { val isParams = !impliedMods.is(Given) || startParamTokens.contains(in.token) - || isIdent && (in.name == nme.inline || in.lookahead.isColon) + || isIdent + && (in.name == nme.inline || in.name == nme.tracked || in.lookahead.isColon) (mods, isParams) (if isParams then commaSeparated(() => param()) else contextTypes(paramOwner, numLeadParams, impliedMods)) match { @@ -4104,7 +4109,7 @@ object Parsers { def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = paramss.nestedMap: param => if !param.mods.isAllOf(PrivateLocal) then - syntaxError(em"method parameter ${param.name} may not be `a val`", param.span) + syntaxError(em"method parameter ${param.name} may not be a `val`", param.span) param.withMods(param.mods &~ (AccessFlags | ParamAccessor | Mutable) | Param) .asInstanceOf[List[ParamClause]] diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 87f7c88e0407..5808707326a0 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -113,7 +113,7 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def refinementNameString(tp: RefinedType): String = nameString(tp.refinedName) /** String representation of a refinement */ - protected def toTextRefinement(rt: RefinedType): Text = + def toTextRefinement(rt: RefinedType): Text = val keyword = rt.refinedInfo match { case _: ExprType | _: MethodOrPoly => "def " case _: TypeBounds => "type " diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 8687925ed5fb..297dc31ea94a 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -4,7 +4,7 @@ package printing import core.* import Texts.*, ast.Trees.* -import Types.{Type, SingletonType, LambdaParam, NamedType}, +import Types.{Type, SingletonType, LambdaParam, NamedType, RefinedType}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation, Contexts.Context import typer.Implicits.* @@ -104,6 +104,9 @@ abstract class Printer { /** Textual representation of a prefix of some reference, ending in `.` or `#` */ def toTextPrefixOf(tp: NamedType): Text + /** textual representation of a refinement, with no enclosing {...} */ + def toTextRefinement(rt: RefinedType): Text + /** Textual representation of a reference in a capture set */ def toTextCaptureRef(tp: Type): Text diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index d107de31829f..954b08c24ac1 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -369,11 +369,15 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => case Select(nu: New, nme.CONSTRUCTOR) if isCheckable(nu) => // need to check instantiability here, because the type of the New itself // might be a type constructor. - ctx.typer.checkClassType(tree.tpe, tree.srcPos, traitReq = false, stablePrefixReq = true) + def checkClassType(tpe: Type, stablePrefixReq: Boolean) = + ctx.typer.checkClassType(tpe, tree.srcPos, + traitReq = false, stablePrefixReq = stablePrefixReq, + refinementOK = Feature.enabled(Feature.modularity)) + checkClassType(tree.tpe, true) if !nu.tpe.isLambdaSub then // Check the constructor type as well; it could be an illegal singleton type // which would not be reflected as `tree.tpe` - ctx.typer.checkClassType(nu.tpe, tree.srcPos, traitReq = false, stablePrefixReq = false) + checkClassType(nu.tpe, false) Checking.checkInstantiable(tree.tpe, nu.tpe, nu.srcPos) withNoCheckNews(nu :: Nil)(app1) case _ => @@ -448,8 +452,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => // Constructor parameters are in scope when typing a parent. // While they can safely appear in a parent tree, to preserve // soundness we need to ensure they don't appear in a parent - // type (#16270). - val illegalRefs = parent.tpe.namedPartsWith(p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym)) + // type (#16270). We can strip any refinement of a parent type since + // these refinements are split off from the parent type constructor + // application `parent` in Namer and don't show up as parent types + // of the class. + val illegalRefs = parent.tpe.dealias.stripRefinement.namedPartsWith: + p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym) if illegalRefs.nonEmpty then report.error( em"The type of a class parent cannot refer to constructor parameters, but ${parent.tpe} refers to ${illegalRefs.map(_.name.show).mkString(",")}", parent.srcPos) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 7745c620312c..5839ec1766af 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -33,8 +33,7 @@ import Applications.UnapplyArgs import Inferencing.isFullyDefined import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotePattern} import transform.ValueClasses.underlyingOfValueClass -import config.Feature -import config.Feature.sourceVersion +import config.Feature, Feature.{sourceVersion, modularity} import config.SourceVersion.* import config.MigrationVersion import printing.Formatting.hlAsKeyword @@ -198,7 +197,7 @@ object Checking { * and that the instance conforms to the self type of the created class. */ def checkInstantiable(tp: Type, srcTp: Type, pos: SrcPos)(using Context): Unit = - tp.underlyingClassRef(refinementOK = false) match + tp.underlyingClassRef(refinementOK = Feature.enabled(modularity)) match case tref: TypeRef => val cls = tref.symbol if (cls.isOneOf(AbstractOrTrait)) { @@ -601,6 +600,7 @@ object Checking { // The issue with `erased inline` is that the erased semantics get lost // as the code is inlined and the reference is removed before the erased usage check. checkCombination(Erased, Inline) + checkNoConflict(Tracked, Mutable, em"mutable variables may not be `tracked`") checkNoConflict(Lazy, ParamAccessor, em"parameter may not be `lazy`") } @@ -1067,8 +1067,8 @@ trait Checking { * check that class prefix is stable. * @return `tp` itself if it is a class or trait ref, ObjectType if not. */ - def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = - tp.underlyingClassRef(refinementOK = false) match { + def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean = false)(using Context): Type = + tp.underlyingClassRef(refinementOK) match case tref: TypeRef => if (traitReq && !tref.symbol.is(Trait)) report.error(TraitIsExpected(tref.symbol), pos) if (stablePrefixReq && ctx.phase <= refchecksPhase) checkStable(tref.prefix, pos, "class prefix") @@ -1076,7 +1076,6 @@ trait Checking { case _ => report.error(NotClassType(tp), pos) defn.ObjectType - } /** If `sym` is an old-style implicit conversion, check that implicit conversions are enabled. * @pre sym.is(GivenOrImplicit) @@ -1626,7 +1625,7 @@ trait NoChecking extends ReChecking { override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = info override def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = () override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = () - override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = tp + override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean)(using Context): Type = tp override def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = () override def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = () override def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = tp diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 530423fd2613..e48c2fdf5066 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -122,7 +122,8 @@ class Namer { typer: Typer => /** Record `sym` as the symbol defined by `tree` */ def recordSym(sym: Symbol, tree: Tree)(using Context): Symbol = { - for (refs <- tree.removeAttachment(References); ref <- refs) ref.watching(sym) + for refs <- tree.removeAttachment(References); ref <- refs do + ref.watching(sym) tree.pushAttachment(SymOfTree, sym) sym } @@ -295,12 +296,15 @@ class Namer { typer: Typer => createOrRefine[Symbol](tree, name, flags, ctx.owner, _ => info, (fs, _, pwithin) => newSymbol(ctx.owner, name, fs, info, pwithin, tree.nameSpan)) case tree: Import => - recordSym(newImportSymbol(ctx.owner, Completer(tree)(ctx), tree.span), tree) + recordSym(newImportSym(tree), tree) case _ => NoSymbol } } + private def newImportSym(imp: Import)(using Context): Symbol = + newImportSymbol(ctx.owner, Completer(imp)(ctx), imp.span) + /** If `sym` exists, enter it in effective scope. Check that * package members are not entered twice in the same run. */ @@ -525,11 +529,9 @@ class Namer { typer: Typer => } /** Transfer all references to `from` to `to` */ - def transferReferences(from: ValDef, to: ValDef): Unit = { - val fromRefs = from.removeAttachment(References).getOrElse(Nil) - val toRefs = to.removeAttachment(References).getOrElse(Nil) - to.putAttachment(References, fromRefs ++ toRefs) - } + def transferReferences(from: ValDef, to: ValDef): Unit = + for ref <- from.removeAttachment(References).getOrElse(Nil) do + ref.watching(to) /** Merge the module class `modCls` in the expanded tree of `mdef` with the * body and derived clause of the synthetic module class `fromCls`. @@ -707,7 +709,18 @@ class Namer { typer: Typer => enterSymbol(companion) end addAbsentCompanions - stats.foreach(expand) + /** Expand each statement, keeping track of language imports in the context. This is + * necessary since desugaring might depend on language imports. + */ + def expandTopLevel(stats: List[Tree])(using Context): Unit = stats match + case (imp @ Import(qual, _)) :: stats1 if untpd.languageImport(qual).isDefined => + expandTopLevel(stats1)(using ctx.importContext(imp, newImportSym(imp))) + case stat :: stats1 => + expand(stat) + expandTopLevel(stats1) + case Nil => + + expandTopLevel(stats) mergeCompanionDefs() val ctxWithStats = stats.foldLeft(ctx)((ctx, stat) => indexExpanded(stat)(using ctx)) inContext(ctxWithStats) { @@ -1530,8 +1543,9 @@ class Namer { typer: Typer => core match case Select(New(tpt), nme.CONSTRUCTOR) => val targs1 = targs map (typedAheadType(_)) - val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes - if (ptype.typeParams.isEmpty) ptype + val ptype = typedAheadType(tpt).tpe.appliedTo(targs1.tpes) + if ptype.typeParams.isEmpty && !ptype.dealias.typeSymbol.is(Dependent) then + ptype else if (denot.is(ModuleClass) && denot.sourceModule.isOneOf(GivenOrImplicit)) missingType(denot.symbol, "parent ")(using creationContext) @@ -1612,7 +1626,8 @@ class Namer { typer: Typer => for (name, tp) <- refinements do if decls.lookupEntry(name) == null then val flags = tp match - case tp: MethodOrPoly => Method | Synthetic | Deferred + case tp: MethodOrPoly => Method | Synthetic | Deferred | Tracked + case _ if name.isTermName => Synthetic | Deferred | Tracked case _ => Synthetic | Deferred refinedSyms += newSymbol(cls, name, flags, tp, coord = original.rhs.span.startPos).entered if refinedSyms.nonEmpty then @@ -1660,11 +1675,9 @@ class Namer { typer: Typer => val parentTypes = defn.adjustForTuple(cls, cls.typeParams, defn.adjustForBoxedUnit(cls, - addUsingTraits( - locally: - val isJava = ctx.isJava - ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) - ) + addUsingTraits: + val isJava = ctx.isJava + ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) ) ) typr.println(i"completing $denot, parents = $parents%, %, parentTypes = $parentTypes%, %") @@ -1824,7 +1837,7 @@ class Namer { typer: Typer => } /** The type signature of a DefDef with given symbol */ - def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = { + def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR @@ -1863,13 +1876,19 @@ class Namer { typer: Typer => def wrapMethType(restpe: Type): Type = instantiateDependent(restpe, paramSymss) methodType(paramSymss, restpe, ddef.mods.is(JavaDefined)) + + def wrapRefinedMethType(restpe: Type): Type = + wrapMethType(addParamRefinements(restpe, paramSymss)) + if isConstructor then // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) wrapMethType(effectiveResultType(sym, paramSymss)) + else if sym.isAllOf(Given | Method) && Feature.enabled(modularity) then + valOrDefDefSig(ddef, sym, paramSymss, wrapRefinedMethType) else valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) - } + end defDefSig def inferredResultType( mdef: ValOrDefDef, diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 2bf4b959ebca..7cd1d67e9aa5 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -610,8 +610,13 @@ object RefChecks { overrideError("is not inline, cannot implement an inline method") else if (other.isScala2Macro && !member.isScala2Macro) // (1.11) overrideError("cannot be used here - only Scala-2 macros can override Scala-2 macros") - else if (!compatTypes(memberTp(self), otherTp(self)) && - !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf))) + else if !compatTypes(memberTp(self), otherTp(self)) + && !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf)) + && !member.is(Tracked) + // Tracked members need to be excluded since they are abstract type members with + // singleton types. Concrete overrides usually have a wider type. + // TODO: Should we exclude all refinements inherited from parents? + then overrideError("has incompatible type", compareTypes = true) else if (member.targetName != other.targetName) if (other.targetName != other.name) @@ -620,7 +625,9 @@ object RefChecks { overrideError("cannot have a @targetName annotation since external names would be different") else if intoOccurrences(memberTp(self)) != intoOccurrences(otherTp(self)) then overrideError("has different occurrences of `into` modifiers", compareTypes = true) - else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.12) + else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) + && !member.is(Tracked) + then // (1.12) report.errorOrMigrationWarning( em"cannot override val parameter ${other.showLocated}", member.srcPos, @@ -670,6 +677,10 @@ object RefChecks { mbr.isType || mbr.isSuperAccessor // not yet synthesized || mbr.is(JavaDefined) && hasJavaErasedOverriding(mbr) + || mbr.is(Tracked) + // Tracked members correspond to existing val parameters, so they don't + // count as deferred. The val parameter could not implement the tracked + // refinement since it usually has a wider type. def isImplemented(mbr: Symbol) = val mbrDenot = mbr.asSeenFrom(clazz.thisType) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index c5b6faf455f7..8f2b7ce95785 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4416,7 +4416,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) case _ => errorTree(tree, em"cannot convert from $tree to an instance creation expression") - val tycon = ctorResultType.underlyingClassRef(refinementOK = false) + val tycon = ctorResultType.underlyingClassRef(refinementOK = true) typed( untpd.Select( untpd.New(untpd.TypedSplice(tpt.withType(tycon))), diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index a856a5b84d92..ad9befa72f5f 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -124,3 +124,8 @@ i19955a.scala i19955b.scala i20053b.scala +# alias types at different levels of dereferencing +parsercombinators-givens.scala +parsercombinators-givens-2.scala + + diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 8cc070d5dbc5..c711d5f63db8 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -372,7 +372,7 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var - [{Modifier} (‘val’ | ‘var’)] Param + [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent DefParamClause ::= DefTypeParamClause diff --git a/docs/_docs/reference/experimental/modularity.md b/docs/_docs/reference/experimental/modularity.md new file mode 100644 index 000000000000..2062c4d5eda2 --- /dev/null +++ b/docs/_docs/reference/experimental/modularity.md @@ -0,0 +1,189 @@ +--- +layout: doc-page +title: "Modularity Improvements" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/modularity.html +--- + +# Modularity Improvements + +Martin Odersky, 7.1.2024 + +Scala is a language in the SML tradition, in the sense that it has +abstract and alias types as members of modules (which in Scala take the form of objects and classes). This leads to a simple dependently +typed system, where dependencies in types are on paths instead of full terms. + +So far, some key ingredients were lacking which meant that module composition with functors is harder in Scala than in SML. In particular, one often needs to resort the infamous `Aux` pattern that lifts type members into type parameters so that they can be tracked across class instantiations. This makes modular, dependently typed programs +much harder to write and read, and makes such programming only accessible to experts. + +In this note I propose some small changes to Scala's dependent typing that makes +modular programming much more straightforward. + +The suggested improvements have been implemented and are available +in source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +## Tracked Parameters + +Scala is dependently typed for functions, but unfortunately not for classes. +For instance, consider the following definitions: + +```scala + class C: + type T + ... + + def f(x: C): x.T = ... + + val y: C { type T = Int } +``` +Then `f(y)` would have type `Int`, since the compiler will substitute the +concrete parameter reference `y` for the formal parameter `x` in the result +type of `f`, and `y.T = Int` + +However, if we use a class `F` instead of a method `f`, things go wrong. + +```scala + class F(val x: C): + val result: x.T = ... +``` +Now `F(y).result` would not have type `Int` but instead the rather less useful type `?1.T` where `?1` is a so-called skolem constant of type `C` (a skolem represents an unknown value). + +This shortcoming means that classes cannot really be used for advanced +modularity constructs that rely on dependent typing. + +**Proposal:** Introduce a `tracked` modifier that can be added to +a `val` parameter of a class or trait. For every tracked class parameter of a class `C`, add a refinement in the constructor type of `C` that the class member is the same as the parameter. + +**Example:** In the setting above, assume `F` is instead declared like this: +```scala + class F(tracked val x: C): + val result: x.T = ... +``` +Then the constructor `F` would get roughly the following type: +```scala + F(x1: C): F { val x: x1.type } +``` +_Aside:_ More precisely, both parameter and refinement would apply to the same name `x` but the refinement still refers to the parameter. We unfortunately can't express that in source, however, so we chose the new name `x1` for the parameter in the explanation. + +With the new constructor type, the expression `F(y).result` would now have the type `Int`, as hoped for. The reasoning to get there is as follows: + + - The result of the constructor `F(y)` has type `F { val x: y.type }` by + the standard typing for dependent functions. + - The type of `result` inside `F` is `x.T`. + - Hence, the type of `result` as a member of `F { val x: y.type }` is `y.T`, which is equal to `Int`. + +The addition of tracked parameters makes classes suitable as a fundamental modularity construct supporting dependent typing. Here is an example, taken from issue #3920: + +```scala +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) +``` +This works as it should now. Without the addition of `tracked` to the +parameter of `SetFunctor` typechecking would immediately lose track of +the element type `T` after an `add`, and would therefore fail. + +**Syntax Change** + +``` +ClsParam ::= {Annotation} [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param +``` + +The (soft) `tracked` modifier is only allowed for `val` parameters of classes. + +**Discussion** + +Since `tracked` is so useful, why not assume it by default? First, `tracked` makes sense only for `val` parameters. If a class parameter is not also a field declared using `val` then there's nothing to refine in the constructor result type. One could think of at least making all `val` parameters tracked by default, but that would be a backwards incompatible change. For instance, the following code would break: + +```scala +case class Foo(x: Int) +var foo = Foo(1) +if someCondition then foo = Foo(2) +``` +If we assume `tracked` for parameter `x` (which is implicitly a `val`), +then `foo` would get inferred type `Foo { val x: 1 }`, so it could not +be reassigned to a value of type `Foo { val x: 2 }` on the next line. + +Another approach might be to assume `tracked` for a `val` parameter `x` +only if the class refers to a type member of `x`. But it turns out that this +scheme is unimplementable since it would quickly lead to cyclic references +when typechecking recursive class graphs. So an explicit `tracked` looks like the best available option. + +## Allow Class Parents to be Refined Types + +Since `tracked` parameters create refinements in constructor types, +it is now possible that a class has a parent that is a refined type. +Previously such types were not permitted, since we were not quite sure how to handle them. But with tracked parameters it becomes pressing so +admit such types. + +**Proposal** Allow refined types as parent types of classes. All refinements that are inherited in this way become synthetic members of the class. + +**Example** + +```scala +class C: + type T + def m(): T + +type R = C: + type T = Int + def m(): 22 + +class D extends R: + def next(): D +``` +This code now compiles. The definition of `D` is expanded as follows: + +```scala +class D extends C: + def next(): D + /*synthetic*/ type T = Int + /*synthetic*/ def m(): 22 +``` +Note how class refinements are moved from the parent constructor of `D` into the body of class `D` itself. + +This change does not entail a syntax change. Syntactically, parent types cannot be refined types themselves. So the following would be illegal: +```scala +class D extends C { type T = Int; def m(): 22 }: // error + def next(): D +``` +If a refined type should be used directly as a parent type of a class, it needs to come in parentheses: +```scala +class D extends (C { type T = Int; def m(): 22 }) // ok + def next(): D +``` + +## A Small Relaxation To Export Rules + +The rules for export forwarders are changed as follows. + +Previously, all export forwarders were declared `final`. Now, only term members are declared `final`. Type aliases are left aside. + +This makes it possible to export the same type member into several traits and then mix these traits in the same class. The test file `tests/pos/typeclass-aggregates.scala` shows why this is essential if we want to combine multiple givens with type members in a new given that aggregates all these givens in an intersection type. + +The change does not lose safety since different type aliases would in any case lead to uninstantiatable classes. \ No newline at end of file diff --git a/docs/sidebar.yml b/docs/sidebar.yml index b38e057f06b1..160698f1f44b 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -155,6 +155,7 @@ subsection: - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md - page: reference/experimental/named-tuples.md + - page: reference/experimental/modularity.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 40a3918b5943..3b28733226a0 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -18,6 +18,8 @@ object MiMaFilters { ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromIArray"), ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.namedTuples"), ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), ), // Additions since last LTS @@ -95,6 +97,7 @@ object MiMaFilters { // Additions that require a new minor version of tasty core Build.mimaPreviousDottyVersion -> Seq( ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype") + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.TRACKED"), ), // Additions since last LTS diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 164243d3b469..c29ea99bcd8d 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -228,6 +228,7 @@ Standard-Section: "ASTs" TopLevelStat* EXPORTED -- An export forwarder OPEN -- an open class INVISIBLE -- invisible during typechecking + TRACKED -- a tracked class parameter / a dependent class Annotation Variance = STABLE -- invariant @@ -509,6 +510,7 @@ object TastyFormat { final val INVISIBLE = 44 final val EMPTYCLAUSE = 45 final val SPLITCLAUSE = 46 + final val TRACKED = 47 // Tree Cat. 2: tag Nat final val firstNatTreeTag = SHAREDterm @@ -700,7 +702,8 @@ object TastyFormat { | INVISIBLE | ANNOTATION | PRIVATEqualified - | PROTECTEDqualified => true + | PROTECTEDqualified + | TRACKED => true case _ => false } diff --git a/tests/neg/i3964.scala b/tests/neg/i3964.scala new file mode 100644 index 000000000000..eaf3953bc230 --- /dev/null +++ b/tests/neg/i3964.scala @@ -0,0 +1,12 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test1: + + abstract class Bar { val x: Animal } + val bar: Bar { val x: Cat } = new Bar { val x = new Cat } // error, but should work + + trait Foo { val x: Animal } + val foo: Foo { val x: Cat } = new Foo { val x = new Cat } // error, but should work diff --git a/tests/neg/tracked.check b/tests/neg/tracked.check new file mode 100644 index 000000000000..ae734e7aa0b4 --- /dev/null +++ b/tests/neg/tracked.check @@ -0,0 +1,50 @@ +-- Error: tests/neg/tracked.scala:2:16 --------------------------------------------------------------------------------- +2 |class C(tracked x: Int) // error + | ^ + | `val` or `var` expected +-- [E040] Syntax Error: tests/neg/tracked.scala:7:18 ------------------------------------------------------------------- +7 | def foo(tracked a: Int) = // error + | ^ + | ':' expected, but identifier found +-- Error: tests/neg/tracked.scala:8:12 --------------------------------------------------------------------------------- +8 | tracked val b: Int = 2 // error + | ^^^ + | end of statement expected but 'val' found +-- Error: tests/neg/tracked.scala:11:10 -------------------------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^ + | end of statement expected but 'object' found +-- Error: tests/neg/tracked.scala:14:10 -------------------------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^ + | end of statement expected but 'class' found +-- Error: tests/neg/tracked.scala:17:10 -------------------------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^ + | end of statement expected but 'type' found +-- Error: tests/neg/tracked.scala:20:29 -------------------------------------------------------------------------------- +20 | given g2(using tracked val x: Int): C = C(x) // error + | ^^^^^^^^^^^^^^^^^^ + | method parameter x may not be a `val` +-- Error: tests/neg/tracked.scala:4:21 --------------------------------------------------------------------------------- +4 |class C2(tracked var x: Int) // error + | ^ + | mutable variables may not be `tracked` +-- [E006] Not Found Error: tests/neg/tracked.scala:11:2 ---------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:14:2 ---------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:17:2 ---------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/tracked.scala b/tests/neg/tracked.scala new file mode 100644 index 000000000000..8d315a7b89ac --- /dev/null +++ b/tests/neg/tracked.scala @@ -0,0 +1,20 @@ +//> using options -source future -language:experimental.modularity +class C(tracked x: Int) // error + +class C2(tracked var x: Int) // error + +object A: + def foo(tracked a: Int) = // error + tracked val b: Int = 2 // error + +object B: + tracked object Foo // error // error + +object C: + tracked class D // error // error + +object D: + tracked type T = Int // error // error + +object E: + given g2(using tracked val x: Int): C = C(x) // error diff --git a/tests/neg/tracked2.scala b/tests/neg/tracked2.scala new file mode 100644 index 000000000000..2e6fa8cf6045 --- /dev/null +++ b/tests/neg/tracked2.scala @@ -0,0 +1 @@ +class C(tracked val x: Int) // error diff --git a/tests/new/tracked-mixin-traits.scala b/tests/new/tracked-mixin-traits.scala new file mode 100644 index 000000000000..21d890d44f42 --- /dev/null +++ b/tests/new/tracked-mixin-traits.scala @@ -0,0 +1,16 @@ +trait A: + type T +object a extends A: + type T = Int + +trait B(tracked val b: A): + type T = b.T + +trait C(tracked val c: A): + type T = c.T + +class D extends B(a), C(a): + val x: T = 2 + + + diff --git a/tests/pos/depclass-1.scala b/tests/pos/depclass-1.scala new file mode 100644 index 000000000000..38daef85ae98 --- /dev/null +++ b/tests/pos/depclass-1.scala @@ -0,0 +1,19 @@ +//> using options -source future -language:experimental.modularity +class A(tracked val source: String) + +class B(x: Int, tracked val source1: String) extends A(source1) + +class C(tracked val source2: String) extends B(1, source2) + +//class D(source1: String) extends C(source1) +val x = C("hello") +val _: A{ val source: "hello" } = x + +class Vec[Elem](tracked val size: Int) +class Vec8 extends Vec[Float](8) + +val v = Vec[Float](10) +val v2 = Vec8() +val xx: 10 = v.size +val x2: 8 = v2.size + diff --git a/tests/pos/i3920.scala b/tests/pos/i3920.scala new file mode 100644 index 000000000000..6cd74187098f --- /dev/null +++ b/tests/pos/i3920.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Ordering { + type T + def compare(t1:T, t2: T): Int +} + +class SetFunctor(tracked val ord: Ordering) { + type Set = List[ord.T] + def empty: Set = Nil + + implicit class helper(s: Set) { + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def member(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + } +} + +object Test { + val orderInt = new Ordering { + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + } + + val IntSet = new SetFunctor(orderInt) + import IntSet.* + + def main(args: Array[String]) = { + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.member(7)) + assert(set.member(8)) + } +} \ No newline at end of file diff --git a/tests/pos/i3964.scala b/tests/pos/i3964.scala new file mode 100644 index 000000000000..42412b910899 --- /dev/null +++ b/tests/pos/i3964.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +object Test3: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) + class Lst8 extends Lst(8) + + val v8a: Vec { val size: 8 } = new Vec8 + val v8b: Vec { val size: 8 } = new Vec(8) {} + + val l8a: Lst { val size: 8 } = new Lst8 + val l8b: Lst { val size: 8 } = new Lst(8) {} + + class VecN(tracked val n: Int) extends Vec(n) + class Vec9 extends VecN(9) + val v9a = VecN(9) + val _: Vec { val size: 9 } = v9a + val v9b = Vec9() + val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/i3964a/Defs_1.scala b/tests/pos/i3964a/Defs_1.scala new file mode 100644 index 000000000000..7dcc89f7003e --- /dev/null +++ b/tests/pos/i3964a/Defs_1.scala @@ -0,0 +1,18 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +package coll: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) \ No newline at end of file diff --git a/tests/pos/i3964a/Uses_2.scala b/tests/pos/i3964a/Uses_2.scala new file mode 100644 index 000000000000..9d1b6ebaa58b --- /dev/null +++ b/tests/pos/i3964a/Uses_2.scala @@ -0,0 +1,16 @@ +//> using options -source future -language:experimental.modularity +import coll.* +class Lst8 extends Lst(8) + +val v8a: Vec { val size: 8 } = new Vec8 +val v8b: Vec { val size: 8 } = new Vec(8) {} + +val l8a: Lst { val size: 8 } = new Lst8 +val l8b: Lst { val size: 8 } = new Lst(8) {} + +class VecN(tracked val n: Int) extends Vec(n) +class Vec9 extends VecN(9) +val v9a = VecN(9) +val _: Vec { val size: 9 } = v9a +val v9b = Vec9() +val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/parsercombinators-expanded.scala b/tests/pos/parsercombinators-expanded.scala new file mode 100644 index 000000000000..cf8137bfe8eb --- /dev/null +++ b/tests/pos/parsercombinators-expanded.scala @@ -0,0 +1,64 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +object test: + + class apply[C, E] extends Combinator[Apply[C, E]]: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + + def apply[C, E]: apply[C, E] = new apply[C, E] + + class combine[A, B]( + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context} + ) extends Combinator[Combine[A, B]]: + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + + def combine[A, B]( + _f: Combinator[A], + _s: Combinator[B] { type Context = _f.Context} + ) = new combine[A, B](_f, _s) + // cast is needed since the type of new combine[A, B](_f, _s) + // drops the required refinement. + + extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + + @main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val c = combine( + apply[mutable.ListBuffer[Int], Int], + apply[mutable.ListBuffer[Int], Int] + ) + val r = c.parse(m)(stream) // was type mismatch, now OK + val rc: Option[(Int, Int)] = r + } diff --git a/tests/pos/parsercombinators-givens-2.scala b/tests/pos/parsercombinators-givens-2.scala new file mode 100644 index 000000000000..8349d69a30af --- /dev/null +++ b/tests/pos/parsercombinators-givens-2.scala @@ -0,0 +1,52 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B, C](using + f: Combinator[A] { type Context = C }, + s: Combinator[B] { type Context = C } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // works, but Element type is not resolved correctly +} diff --git a/tests/pos/parsercombinators-givens.scala b/tests/pos/parsercombinators-givens.scala new file mode 100644 index 000000000000..5b5588c93840 --- /dev/null +++ b/tests/pos/parsercombinators-givens.scala @@ -0,0 +1,54 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B](using + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/run/i3920.scala b/tests/run/i3920.scala new file mode 100644 index 000000000000..c66fd8908976 --- /dev/null +++ b/tests/run/i3920.scala @@ -0,0 +1,26 @@ +//> using options -source future -language:experimental.modularity +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) \ No newline at end of file From 70fb91cfe257a2c2cbe98c53a6cbba9e779a7bc2 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 19:44:35 +0200 Subject: [PATCH 344/465] Make explicit arguments for context bounds an error from 3.5 [Cherry-picked ea3c688b94d9982cceda7b63969cd7e2a1887a46] --- compiler/src/dotty/tools/dotc/typer/ReTyper.scala | 1 + compiler/src/dotty/tools/dotc/typer/Typer.scala | 3 +++ tests/warn/context-bounds-migration.scala | 9 +++++++++ 3 files changed, 13 insertions(+) create mode 100644 tests/warn/context-bounds-migration.scala diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 9741a366da89..7a5c838848ac 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -182,4 +182,5 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override protected def checkEqualityEvidence(tree: tpd.Tree, pt: Type)(using Context): Unit = () override protected def matchingApply(methType: MethodOrPoly, pt: FunProto)(using Context): Boolean = true override protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = promote(call) + override protected def migrate[T](migration: => T, disabled: => T = ()): T = disabled } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 8f2b7ce95785..17a2cba25019 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -183,6 +183,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Overridden in derived typers def newLikeThis(nestingLevel: Int): Typer = new Typer(nestingLevel) + // Overridden to do nothing in derived typers + protected def migrate[T](migration: => T, disabled: => T = ()): T = migration + /** Find the type of an identifier with given `name` in given context `ctx`. * @param name the name of the identifier * @param pt the expected type diff --git a/tests/warn/context-bounds-migration.scala b/tests/warn/context-bounds-migration.scala new file mode 100644 index 000000000000..cdd3eca62b5c --- /dev/null +++ b/tests/warn/context-bounds-migration.scala @@ -0,0 +1,9 @@ + +class C[T] +def foo[X: C] = () + +given [T]: C[T] = C[T]() + +def Test = + foo(C[Int]()) // warning + foo(using C[Int]()) // ok From 90e84b96a9e53d8e8203b09efc56d3cf0679783e Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 20:02:13 +0200 Subject: [PATCH 345/465] Drop restriction against typedefs at level * only Allow the RHS of a type def to be higher-kinded. But keep the restriction for opaque type aliases; their RHS must be fully applied. I am not sure why the restriction applies to them, but there was a test specifically about that, so there night be a reason. # Conflicts: # compiler/src/dotty/tools/dotc/typer/Typer.scala # Conflicts: # compiler/src/dotty/tools/dotc/typer/Typer.scala # tests/pos/typeclasses-this.scala [Cherry-picked f96a769b17f362d14d2265693e72ad7311301172] --- .../src/dotty/tools/dotc/typer/Checking.scala | 16 ++++++++-------- compiler/src/dotty/tools/dotc/typer/Typer.scala | 5 +++-- tests/neg/i12456.scala | 2 +- tests/neg/i13757-match-type-anykind.scala | 2 +- tests/neg/i9328.scala | 2 +- tests/neg/parser-stability-12.scala | 2 +- tests/neg/unapplied-types.scala | 7 ------- tests/pos/unapplied-types.scala | 7 +++++++ 8 files changed, 22 insertions(+), 21 deletions(-) delete mode 100644 tests/neg/unapplied-types.scala create mode 100644 tests/pos/unapplied-types.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 5839ec1766af..073055ba5b58 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -1331,20 +1331,20 @@ trait Checking { } /** Check that user-defined (result) type is fully applied */ - def checkFullyAppliedType(tree: Tree)(using Context): Unit = tree match + def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = tree match case TypeBoundsTree(lo, hi, alias) => - checkFullyAppliedType(lo) - checkFullyAppliedType(hi) - checkFullyAppliedType(alias) + checkFullyAppliedType(lo, prefix) + checkFullyAppliedType(hi, prefix) + checkFullyAppliedType(alias, prefix) case Annotated(arg, annot) => - checkFullyAppliedType(arg) + checkFullyAppliedType(arg, prefix) case LambdaTypeTree(_, body) => - checkFullyAppliedType(body) + checkFullyAppliedType(body, prefix) case _: TypeTree => case _ => if tree.tpe.typeParams.nonEmpty then val what = if tree.symbol.exists then tree.symbol.show else i"type $tree" - report.error(em"$what takes type parameters", tree.srcPos) + report.error(em"$prefix$what takes type parameters", tree.srcPos) /** Check that we are in an inline context (inside an inline method or in inline code) */ def checkInInlineContext(what: String, pos: SrcPos)(using Context): Unit = @@ -1609,7 +1609,7 @@ trait ReChecking extends Checking { override def checkEnumParent(cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = () - override def checkFullyAppliedType(tree: Tree)(using Context): Unit = () + override def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = () override def checkEnumCaseRefsLegal(cdef: TypeDef, enumCtx: Context)(using Context): Unit = () override def checkAnnotApplicable(annot: Tree, sym: Symbol)(using Context): Boolean = true override def checkMatchable(tp: Type, pos: SrcPos, pattern: Boolean)(using Context): Unit = () diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 17a2cba25019..a357f06e4ee8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2780,8 +2780,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typeIndexedLambdaTypeTree(rhs, tparams, body) case rhs => typedType(rhs) - checkFullyAppliedType(rhs1) - if sym.isOpaqueAlias then checkNoContextFunctionType(rhs1) + if sym.isOpaqueAlias then + checkFullyAppliedType(rhs1, "Opaque type alias must be fully applied, but ") + checkNoContextFunctionType(rhs1) assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } diff --git a/tests/neg/i12456.scala b/tests/neg/i12456.scala index b9fb0283dcd7..c1a3ada5a420 100644 --- a/tests/neg/i12456.scala +++ b/tests/neg/i12456.scala @@ -1 +1 @@ -object F { type T[G[X] <: X, F <: G[F]] } // error // error +object F { type T[G[X] <: X, F <: G[F]] } // error diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala index a80e8b2b289b..998c54292b15 100644 --- a/tests/neg/i13757-match-type-anykind.scala +++ b/tests/neg/i13757-match-type-anykind.scala @@ -8,7 +8,7 @@ object Test: type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int - type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded + type AnyKindMatchType4[X <: Option] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded diff --git a/tests/neg/i9328.scala b/tests/neg/i9328.scala index dabde498e1dc..c13d33e103b9 100644 --- a/tests/neg/i9328.scala +++ b/tests/neg/i9328.scala @@ -3,7 +3,7 @@ type Id[T] = T match { case _ => T } -class Foo2[T <: Id[T]] // error // error +class Foo2[T <: Id[T]] // error object Foo { // error object Foo { } diff --git a/tests/neg/parser-stability-12.scala b/tests/neg/parser-stability-12.scala index 78ff178d010c..17a611d70e34 100644 --- a/tests/neg/parser-stability-12.scala +++ b/tests/neg/parser-stability-12.scala @@ -1,4 +1,4 @@ trait x0[]: // error - trait x1[x1 <:x0] // error: type x0 takes type parameters + trait x1[x1 <:x0] extends x1[ // error // error \ No newline at end of file diff --git a/tests/neg/unapplied-types.scala b/tests/neg/unapplied-types.scala deleted file mode 100644 index 2f2339baa026..000000000000 --- a/tests/neg/unapplied-types.scala +++ /dev/null @@ -1,7 +0,0 @@ -trait T { - type L[X] = List[X] - type T1 <: L // error: takes type parameters - type T2 = L // error: takes type parameters - type T3 = List // error: takes type parameters - type T4 <: List // error: takes type parameters -} diff --git a/tests/pos/unapplied-types.scala b/tests/pos/unapplied-types.scala new file mode 100644 index 000000000000..604e63deb8ad --- /dev/null +++ b/tests/pos/unapplied-types.scala @@ -0,0 +1,7 @@ +trait T { + type L[X] = List[X] + type T1 <: L // was error: takes type parameters + type T2 = L // was error: takes type parameters + type T3 = List // was error: takes type parameters + type T4 <: List // was error: takes type parameters +} From 62eed876ca35d942a3fa84ee7ffbb1999f917f6a Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 20:10:36 +0200 Subject: [PATCH 346/465] Allow types in given definitions to be infix types A type implemented in a given definition can now be an infix type, without enclosing parens being necessary. By contrast, it cannot anymore be a refined type. Refined types have to be enclosed in parens. This second point aligns the dotty parser with the published syntax and the scala meta parser. # Conflicts: # tests/pos/typeclasses-this.scala [Cherry-picked ef71dcb45a0f31b72c5fe05fc48764865e1cea8e] --- .../dotty/tools/dotc/parsing/Parsers.scala | 26 +++++++++++++------ docs/_docs/internals/syntax.md | 4 ++- docs/_docs/reference/syntax.md | 9 ++++--- tests/neg/i12348.check | 16 ++++++------ tests/neg/i12348.scala | 3 +-- tests/neg/i7045.scala | 7 +++++ tests/pos/i7045.scala | 9 ------- tests/pos/typeclass-aggregates.scala | 6 ++--- 8 files changed, 45 insertions(+), 35 deletions(-) create mode 100644 tests/neg/i7045.scala delete mode 100644 tests/pos/i7045.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 94814457523e..6c0f19de3dd1 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1806,8 +1806,8 @@ object Parsers { */ def infixType(): Tree = infixTypeRest(refinedType()) - def infixTypeRest(t: Tree): Tree = - infixOps(t, canStartInfixTypeTokens, refinedTypeFn, Location.ElseWhere, ParseKind.Type, + def infixTypeRest(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = + infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, isOperator = !followingIsVararg() && !isPureArrow && nextCanFollowOperator(canStartInfixTypeTokens)) @@ -1872,6 +1872,10 @@ object Parsers { */ def annotType(): Tree = annotTypeRest(simpleType()) + /** AnnotType1 ::= SimpleType1 {Annotation} + */ + def annotType1(): Tree = annotTypeRest(simpleType1()) + def annotTypeRest(t: Tree): Tree = if (in.token == AT) annotTypeRest(atSpan(startOffset(t)) { @@ -4097,8 +4101,10 @@ object Parsers { syntaxError(em"extension clause can only define methods", stat.span) } - /** GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) - * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + /** GivenDef ::= [GivenSig] (GivenType [‘=’ Expr] | StructuralInstance) + * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + * GivenType ::= AnnotType1 {id [nl] AnnotType1} + * StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] */ def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) @@ -4124,8 +4130,12 @@ object Parsers { val noParams = tparams.isEmpty && vparamss.isEmpty if !(name.isEmpty && noParams) then acceptColon() val parents = - if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil - else refinedTypeRest(constrApp()) :: withConstrApps() + if isSimpleLiteral then + rejectWildcardType(annotType()) :: Nil + else constrApp() match + case parent: Apply => parent :: withConstrApps() + case parent if in.isIdent => infixTypeRest(parent, _ => annotType1()) :: Nil + case parent => parent :: withConstrApps() val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then accept(EQUALS) @@ -4219,10 +4229,10 @@ object Parsers { /* -------- TEMPLATES ------------------------------------------- */ - /** ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} + /** ConstrApp ::= AnnotType1 {ParArgumentExprs} */ val constrApp: () => Tree = () => - val t = rejectWildcardType(annotTypeRest(simpleType1()), + val t = rejectWildcardType(annotType1(), fallbackTree = Ident(tpnme.ERROR)) // Using Ident(tpnme.ERROR) to avoid causing cascade errors on non-user-written code if in.token == LPAREN then parArgumentExprss(wrapNew(t)) else t diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index c711d5f63db8..6ef346ab22cc 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -191,6 +191,7 @@ MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) AnnotType ::= SimpleType {Annotation} Annotated(t, annot) +AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) SimpleType ::= SimpleLiteral SingletonTypeTree(l) | ‘?’ TypeBounds @@ -466,8 +467,9 @@ ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) +GivenDef ::= [GivenSig] (GivenType [‘=’ Expr] | StructuralInstance) GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +GivenType ::= AnnotType1 {id [nl] AnnotType1} StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index ae541b65d8c4..66cf5a18fac9 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -200,8 +200,8 @@ SimpleType ::= SimpleLiteral | Singleton ‘.’ ‘type’ | ‘(’ [Types] ‘)’ | Refinement - | SimpleType1 TypeArgs - | SimpleType1 ‘#’ id + | SimpleType TypeArgs + | SimpleType ‘#’ id Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id @@ -392,7 +392,7 @@ LocalModifier ::= ‘abstract’ AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] AccessQualifier ::= ‘[’ id ‘]’ -Annotation ::= ‘@’ SimpleType1 {ParArgumentExprs} +Annotation ::= ‘@’ SimpleType {ParArgumentExprs} Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} @@ -444,6 +444,7 @@ ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +GivenType ::= AnnotType {id [nl] AnnotType} StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods @@ -453,7 +454,7 @@ ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef Template ::= InheritClauses [TemplateBody] InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) -ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrApp ::= SimpleType {Annotation} {ParArgumentExprs} ConstrExpr ::= SelfInvocation | <<< SelfInvocation {semi BlockStat} >>> SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check index ccc2b9f7ed00..eded51f70f31 100644 --- a/tests/neg/i12348.check +++ b/tests/neg/i12348.check @@ -1,8 +1,8 @@ --- [E040] Syntax Error: tests/neg/i12348.scala:2:15 -------------------------------------------------------------------- -2 | given inline x: Int = 0 // error - | ^ - | 'with' expected, but identifier found --- [E040] Syntax Error: tests/neg/i12348.scala:3:10 -------------------------------------------------------------------- -3 |} // error - | ^ - | '}' expected, but eof found +-- [E040] Syntax Error: tests/neg/i12348.scala:2:16 -------------------------------------------------------------------- +2 | given inline x: Int = 0 // error // error + | ^ + | an identifier expected, but ':' found +-- [E067] Syntax Error: tests/neg/i12348.scala:2:8 --------------------------------------------------------------------- +2 | given inline x: Int = 0 // error // error + | ^ + |Declaration of given instance given_x_inline_ not allowed here: only classes can have declared but undefined members diff --git a/tests/neg/i12348.scala b/tests/neg/i12348.scala index 69fc77fb532e..43daf9a2801b 100644 --- a/tests/neg/i12348.scala +++ b/tests/neg/i12348.scala @@ -1,3 +1,2 @@ object A { - given inline x: Int = 0 // error -} // error \ No newline at end of file + given inline x: Int = 0 // error // error diff --git a/tests/neg/i7045.scala b/tests/neg/i7045.scala new file mode 100644 index 000000000000..b4c6d60cd35a --- /dev/null +++ b/tests/neg/i7045.scala @@ -0,0 +1,7 @@ +trait Bar { type Y } +trait Foo { type X } + +class Test: + given a1(using b: Bar): Foo = new Foo { type X = b.Y } // ok + given a2(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } // ok + given a3(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } // error \ No newline at end of file diff --git a/tests/pos/i7045.scala b/tests/pos/i7045.scala deleted file mode 100644 index e683654dd5c3..000000000000 --- a/tests/pos/i7045.scala +++ /dev/null @@ -1,9 +0,0 @@ -trait Bar { type Y } -trait Foo { type X } - -class Test: - given a1(using b: Bar): Foo = new Foo { type X = b.Y } - - given a2(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } - - given a3(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala index 77b0f1a9f04a..9bb576603b7b 100644 --- a/tests/pos/typeclass-aggregates.scala +++ b/tests/pos/typeclass-aggregates.scala @@ -30,8 +30,8 @@ trait OrdWithMonoid extends Ord, Monoid def ordWithMonoid2(ord: Ord, monoid: Monoid{ type This = ord.This }) = //: OrdWithMonoid { type This = ord.This} = new OrdWithMonoid with ord.OrdProxy with monoid.MonoidProxy {} -given intOrd: Ord { type This = Int } = ??? -given intMonoid: Monoid { type This = Int } = ??? +given intOrd: (Ord { type This = Int }) = ??? +given intMonoid: (Monoid { type This = Int }) = ??? //given (using ord: Ord, monoid: Monoid{ type This = ord.This }): (Ord & Monoid { type This = ord.This}) = // ordWithMonoid2(ord, monoid) @@ -42,6 +42,6 @@ val y: Int = ??? : x.This // given [A, B](using ord: A is Ord, monoid: A is Monoid) => A is Ord & Monoid = // new ord.OrdProxy with monoid.MonoidProxy {} -given [A](using ord: Ord { type This = A }, monoid: Monoid { type This = A}): (Ord & Monoid) { type This = A} = +given [A](using ord: Ord { type This = A }, monoid: Monoid { type This = A}): ((Ord & Monoid) { type This = A}) = new ord.OrdProxy with monoid.MonoidProxy {} From 305dd2ea526b0693a4808f9467d12dc46a23a072 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 20:44:59 +0200 Subject: [PATCH 347/465] New syntax for given defs given [A: Ord] => A is Ord: ... given [A: Ord] => A is Ord as listOrd: ... [Cherry-picked 2f58cbc145dec06679b571f8b90b8729fc2a1094] --- .../dotty/tools/dotc/parsing/Parsers.scala | 70 +++++++-- .../test/dotc/pos-test-pickling.blacklist | 2 + docs/_docs/internals/syntax.md | 9 +- tests/pos/typeclasses-arrow0.scala | 136 ++++++++++++++++++ 4 files changed, 201 insertions(+), 16 deletions(-) create mode 100644 tests/pos/typeclasses-arrow0.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 6c0f19de3dd1..a5b33994d4a9 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -976,12 +976,14 @@ object Parsers { * i.e. an identifier followed by type and value parameters, followed by `:`? * @pre The current token is an identifier */ - def followingIsGivenSig() = + def followingIsOldStyleGivenSig() = val lookahead = in.LookaheadScanner() if lookahead.isIdent then lookahead.nextToken() + var paramsSeen = false def skipParams(): Unit = if lookahead.token == LPAREN || lookahead.token == LBRACKET then + paramsSeen = true lookahead.skipParens() skipParams() else if lookahead.isNewLine then @@ -989,6 +991,16 @@ object Parsers { skipParams() skipParams() lookahead.isColon + && { + !in.featureEnabled(Feature.modularity) + || { // with modularity language import, a `:` at EOL after an identifier represents a single identifier given + // Example: + // given C: + // def f = ... + lookahead.nextToken() + !lookahead.isAfterLineEnd + } + } def followingIsExtension() = val next = in.lookahead.token @@ -1808,7 +1820,9 @@ object Parsers { def infixTypeRest(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, - isOperator = !followingIsVararg() && !isPureArrow + isOperator = !followingIsVararg() + && !isPureArrow + && !(isIdent(nme.as) && in.featureEnabled(Feature.modularity)) && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] @@ -4101,15 +4115,30 @@ object Parsers { syntaxError(em"extension clause can only define methods", stat.span) } - /** GivenDef ::= [GivenSig] (GivenType [‘=’ Expr] | StructuralInstance) - * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ - * GivenType ::= AnnotType1 {id [nl] AnnotType1} + /** GivenDef ::= OldGivenDef | NewGivenDef + * OldGivenDef ::= [OldGivenSig] (GivenType [‘=’ Expr] | StructuralInstance) + * OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ * StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + * + * NewGivenDef ::= [GivenConditional '=>'] NewGivenSig + * GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} + * NewGivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + * | ConstrApps ['as' id] TemplateBody + * + * GivenType ::= AnnotType1 {id [nl] AnnotType1} */ def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) val nameStart = in.offset - val name = if isIdent && followingIsGivenSig() then ident() else EmptyTermName + var name = if isIdent && followingIsOldStyleGivenSig() then ident() else EmptyTermName + var newSyntaxAllowed = in.featureEnabled(Feature.modularity) + + def moreConstrApps() = + if newSyntaxAllowed && in.token == COMMA then + in.nextToken() + constrApps() + else // need to be careful with last `with` + withConstrApps() // TODO Change syntax description def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = @@ -4128,14 +4157,24 @@ object Parsers { else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty - if !(name.isEmpty && noParams) then acceptColon() + if !(name.isEmpty && noParams) then + if in.isColon then + newSyntaxAllowed = false + in.nextToken() + else if newSyntaxAllowed then accept(ARROW) + else acceptColon() val parents = if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil else constrApp() match - case parent: Apply => parent :: withConstrApps() - case parent if in.isIdent => infixTypeRest(parent, _ => annotType1()) :: Nil - case parent => parent :: withConstrApps() + case parent: Apply => parent :: moreConstrApps() + case parent if in.isIdent => + infixTypeRest(parent, _ => annotType1()) :: Nil + case parent => parent :: moreConstrApps() + if newSyntaxAllowed && in.isIdent(nme.as) then + in.nextToken() + name = ident() + val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then accept(EQUALS) @@ -4145,7 +4184,7 @@ object Parsers { ValDef(name, parents.head, subExpr()) else DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) - else if (isStatSep || isStatSeqEnd) && parentsIsType then + else if (isStatSep || isStatSeqEnd) && parentsIsType && !newSyntaxAllowed then if name.isEmpty then syntaxError(em"anonymous given cannot be abstract") DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) @@ -4156,8 +4195,13 @@ object Parsers { else vparam val constr = makeConstructor(tparams, vparamss1) val templ = - if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil) - else withTemplate(constr, parents) + if isStatSep || isStatSeqEnd then + Template(constr, parents, Nil, EmptyValDef, Nil) + else if !newSyntaxAllowed || in.token == WITH then + withTemplate(constr, parents) + else + possibleTemplateStart() + templateBodyOpt(constr, parents, Nil) if noParams && !mods.is(Inline) then ModuleDef(name, templ) else TypeDef(name.toTypeName, templ) end gdef diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index ad9befa72f5f..3b14ce28569d 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -127,5 +127,7 @@ i20053b.scala # alias types at different levels of dereferencing parsercombinators-givens.scala parsercombinators-givens-2.scala +parsercombinators-arrow.scala + diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 6ef346ab22cc..db858ba05fbc 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -467,10 +467,13 @@ ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenSig] (GivenType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present + +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody GivenType ::= AnnotType1 {id [nl] AnnotType1} -StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> diff --git a/tests/pos/typeclasses-arrow0.scala b/tests/pos/typeclasses-arrow0.scala new file mode 100644 index 000000000000..22d84fe6478d --- /dev/null +++ b/tests/pos/typeclasses-arrow0.scala @@ -0,0 +1,136 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord[A]: + extension (x: A) + def compareTo(y: A): Int + def < (y: A): Boolean = compareTo(y) < 0 + def > (y: A): Boolean = compareTo(y) > 0 + def <= (y: A): Boolean = compareTo(y) <= 0 + def >= (y: A): Boolean = compareTo(y) >= 0 + def max(y: A): A = if x < y then y else x + + trait Show[A]: + extension (x: A) def show: String + + trait SemiGroup[A]: + extension (x: A) def combine(y: A): A + + trait Monoid[A] extends SemiGroup[A]: + def unit: A + + trait Functor[F[_]]: + extension [A](x: F[A]) def map[B](f: A => B): F[B] + + trait Monad[F[_]] extends Functor[F]: + def pure[A](x: A): F[A] + extension [A](x: F[A]) + def flatMap[B](f: A => F[B]): F[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given Ord[Int] as intOrd: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => Ord[List[T]]: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given Monad[List] as listMonad: + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Monad[Reader[Ctx]] as readerMonad: + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => Ord[T] as descending: + extension (x: T) def compareTo(y: T) = summon[Ord[T]].compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal[Self]: + + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Animal[Sheep]: + def apply(name: String) = Sheep(name) + extension (self: Sheep) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ From 22b681c3b5749b0e3ea58fd426b2b6c2ec9ab8c7 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 21:59:15 +0200 Subject: [PATCH 348/465] Allow multiple context bounds in `{...}` [Cherry-picked 598c6adff60179e1533a3dd0226d58363ea19d29] --- .../src/dotty/tools/dotc/ast/Desugar.scala | 2 ++ compiler/src/dotty/tools/dotc/ast/untpd.scala | 8 +++++++ .../dotty/tools/dotc/parsing/Parsers.scala | 11 +++++++--- .../tools/dotc/printing/RefinedPrinter.scala | 21 ++++++++++++++----- .../src/dotty/tools/dotc/typer/Typer.scala | 11 ++++++++++ tests/neg/i9330.scala | 2 +- tests/pos/FromString-typeparam.scala | 13 ++++++++++++ tests/semanticdb/expect/Methods.expect.scala | 2 +- .../semanticdb/expect/Synthetic.expect.scala | 2 +- tests/semanticdb/metac.expect | 9 ++++---- 10 files changed, 66 insertions(+), 15 deletions(-) create mode 100644 tests/pos/FromString-typeparam.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index c3a0c05088cb..774e77aa4b44 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1144,6 +1144,8 @@ object desugar { case tree: TypeDef => tree.name.toString case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" + case ContextBoundTypeTree(tycon, paramName, _) => + s"${apply(x, tycon)}_$paramName" case InfixOp(left, op, right) => if followArgs then s"${op.name}_${extractArgs(List(left, right))}" else op.name.toString diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 91ef462bcf05..0486e2e6d3d7 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -118,6 +118,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class ContextBoundTypeTree(tycon: Tree, paramName: TypeName, ownName: TermName)(implicit @constructorOnly src: SourceFile) extends Tree case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -677,6 +678,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def ContextBoundTypeTree(tree: Tree)(tycon: Tree, paramName: TypeName, ownName: TermName)(using Context): Tree = tree match + case tree: ContextBoundTypeTree if (tycon eq tree.tycon) && paramName == tree.paramName && ownName == tree.ownName => tree + case _ => finalize(tree, untpd.ContextBoundTypeTree(tycon, paramName, ownName)(tree.source)) def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) @@ -742,6 +746,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) case ExtMethods(paramss, methods) => cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case ContextBoundTypeTree(tycon, paramName, ownName) => + cpy.ContextBoundTypeTree(tree)(transform(tycon), paramName, ownName) case ImportSelector(imported, renamed, bound) => cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) case Number(_, _) | TypedSplice(_) => @@ -797,6 +803,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(this(x, pats), tpt), rhs) case ExtMethods(paramss, methods) => this(paramss.foldLeft(x)(apply), methods) + case ContextBoundTypeTree(tycon, paramName, ownName) => + this(x, tycon) case ImportSelector(imported, renamed, bound) => this(this(this(x, imported), renamed), bound) case Number(_, _) => diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index a5b33994d4a9..8680ba8c1335 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -2205,11 +2205,16 @@ object Parsers { else atSpan((t.span union cbs.head.span).start) { ContextBounds(t, cbs) } } + /** ContextBound ::= Type [`as` id] */ + def contextBound(pname: TypeName): Tree = + ContextBoundTypeTree(toplevelTyp(), pname, EmptyTermName) + def contextBounds(pname: TypeName): List[Tree] = if in.isColon then - atSpan(in.skipToken()) { - AppliedTypeTree(toplevelTyp(), Ident(pname)) - } :: contextBounds(pname) + in.nextToken() + if in.token == LBRACE && in.featureEnabled(Feature.modularity) + then inBraces(commaSeparated(() => contextBound(pname))) + else contextBound(pname) :: contextBounds(pname) else if in.token == VIEWBOUND then report.errorOrMigrationWarning( em"view bounds `<%' are no longer supported, use a context bound `:' instead", diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 0329f0639d87..1ff4c8cae339 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -386,7 +386,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(GlobalPrec) { keywordStr("for ") ~ Text(enums map enumText, "; ") ~ sep ~ toText(expr) } def cxBoundToText(bound: untpd.Tree): Text = bound match { // DD - case AppliedTypeTree(tpt, _) => " : " ~ toText(tpt) + case ContextBoundTypeTree(tpt, _, _) => " : " ~ toText(tpt) case untpd.Function(_, tpt) => " <% " ~ toText(tpt) } @@ -658,7 +658,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextAnnot = toTextLocal(arg) ~~ annotText(annot.symbol.enclosingClass, annot) def toTextRetainsAnnot = - try changePrec(GlobalPrec)(toText(arg) ~ "^" ~ toTextCaptureSet(captureSet)) + try changePrec(GlobalPrec)(toTextLocal(arg) ~ "^" ~ toTextCaptureSet(captureSet)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner.isRetains && Feature.ccEnabled && !printDebug @@ -747,9 +747,18 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case GenAlias(pat, expr) => toText(pat) ~ " = " ~ toText(expr) case ContextBounds(bounds, cxBounds) => - cxBounds.foldLeft(toText(bounds)) {(t, cxb) => - t ~ cxBoundToText(cxb) - } + if Feature.enabled(Feature.modularity) then + def boundsText(bounds: Tree) = bounds match + case ContextBoundTypeTree(tpt, _, ownName) => + toText(tpt) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) + case bounds => toText(bounds) + cxBounds match + case bound :: Nil => ": " ~ boundsText(bound) + case _ => ": {" ~ Text(cxBounds.map(boundsText), ", ") ~ "}" + else + cxBounds.foldLeft(toText(bounds)) {(t, cxb) => + t ~ cxBoundToText(cxb) + } case PatDef(mods, pats, tpt, rhs) => modText(mods, NoSymbol, keywordStr("val"), isType = false) ~~ toText(pats, ", ") ~ optAscription(tpt) ~ optText(rhs)(" = " ~ _) @@ -794,6 +803,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { prefix ~~ idx.toString ~~ "|" ~~ tpeText ~~ "|" ~~ argsText ~~ "|" ~~ contentText ~~ postfix case CapturesAndResult(refs, parent) => changePrec(GlobalPrec)("^{" ~ Text(refs.map(toText), ", ") ~ "}" ~ toText(parent)) + case ContextBoundTypeTree(tycon, pname, ownName) => + toText(pname) ~ " : " ~ toText(tycon) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) case _ => tree.fallbackToText(this) } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index a357f06e4ee8..b90b742aa0ec 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2284,6 +2284,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree.tpFun(tsyms, vsyms) completeTypeTree(InferredTypeTree(), tp, tree) + def typedContextBoundTypeTree(tree: untpd.ContextBoundTypeTree)(using Context): Tree = + val tycon = typedType(tree.tycon) + val tyconSplice = untpd.TypedSplice(tycon) + val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) + if tycon.tpe.typeParams.nonEmpty then + typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) + else + errorTree(tree, + em"""Illegal context bound: ${tycon.tpe} does not take type parameters.""") + def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) checkStable(ref1.tpe, tree.srcPos, "singleton type") @@ -3269,6 +3279,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.UnApply => typedUnApply(tree, pt) case tree: untpd.Tuple => typedTuple(tree, pt) case tree: untpd.InLambdaTypeTree => typedInLambdaTypeTree(tree, pt) + case tree: untpd.ContextBoundTypeTree => typedContextBoundTypeTree(tree) case tree: untpd.InfixOp => typedInfixOp(tree, pt) case tree: untpd.ParsedTry => typedTry(tree, pt) case tree @ untpd.PostfixOp(qual, Ident(nme.WILDCARD)) => typedAsFunction(tree, pt) diff --git a/tests/neg/i9330.scala b/tests/neg/i9330.scala index ca25582ef7e8..6ba57c033473 100644 --- a/tests/neg/i9330.scala +++ b/tests/neg/i9330.scala @@ -1,4 +1,4 @@ val x = { - () == "" // error + () == "" implicit def foo[A: A] // error // error // error } diff --git a/tests/pos/FromString-typeparam.scala b/tests/pos/FromString-typeparam.scala new file mode 100644 index 000000000000..893bcfd3decc --- /dev/null +++ b/tests/pos/FromString-typeparam.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[A]: + def fromString(s: String): A + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString, Numeric}](a: String, b: String): N = + val num = summon[Numeric[N]] + val N = summon[FromString[N]] + num.plus(N.fromString(a), N.fromString(b)) diff --git a/tests/semanticdb/expect/Methods.expect.scala b/tests/semanticdb/expect/Methods.expect.scala index f34c657b2f6d..4ec723ad584e 100644 --- a/tests/semanticdb/expect/Methods.expect.scala +++ b/tests/semanticdb/expect/Methods.expect.scala @@ -15,7 +15,7 @@ class Methods/*<-example::Methods#*/[T/*<-example::Methods#[T]*/] { def m6/*<-example::Methods#m6().*/(x/*<-example::Methods#m6().(x)*/: Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+1).*/(x/*<-example::Methods#m6(+1).(x)*/: List/*->example::Methods#List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+2).*/(x/*<-example::Methods#m6(+2).(x)*/: scala.List/*->scala::package.List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ - def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/: Ordering/*->scala::math::Ordering#*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ + def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ def `m8()./*<-example::Methods#`m8().`().*/`() = ???/*->scala::Predef.`???`().*/ class `m9()./*<-example::Methods#`m9().`#*/` def m9/*<-example::Methods#m9().*/(x/*<-example::Methods#m9().(x)*/: `m9().`/*->example::Methods#`m9().`#*/) = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/expect/Synthetic.expect.scala b/tests/semanticdb/expect/Synthetic.expect.scala index a4419aa8bd82..4d797ce2b856 100644 --- a/tests/semanticdb/expect/Synthetic.expect.scala +++ b/tests/semanticdb/expect/Synthetic.expect.scala @@ -30,7 +30,7 @@ class Synthetic/*<-example::Synthetic#*/ { null.asInstanceOf/*->scala::Any#asInstanceOf().*/[Int/*->scala::Int#*/ => Int/*->scala::Int#*/](2) } - class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*//*<-example::Synthetic#J#evidence$1.*/: Manifest/*->scala::Predef.Manifest#*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } + class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*/: /*<-example::Synthetic#J#evidence$1.*/Manifest/*->scala::Predef.Manifest#*//*->example::Synthetic#J#[T]*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } class F/*<-example::Synthetic#F#*/ implicit val ordering/*<-example::Synthetic#ordering.*/: Ordering/*->scala::package.Ordering#*/[F/*->example::Synthetic#F#*/] = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 2120cc633da8..84c3e7c6a110 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2732,8 +2732,8 @@ Occurrences: [16:29..16:32): ??? -> scala/Predef.`???`(). [17:6..17:8): m7 <- example/Methods#m7(). [17:9..17:10): U <- example/Methods#m7().[U] -[17:10..17:10): <- example/Methods#m7().(evidence$1) -[17:12..17:20): Ordering -> scala/math/Ordering# +[17:12..17:20): Ordering -> example/Methods#m7().[U] +[17:12..17:12): <- example/Methods#m7().(evidence$1) [17:22..17:23): c <- example/Methods#m7().(c) [17:25..17:32): Methods -> example/Methods# [17:33..17:34): T -> example/Methods#[T] @@ -3533,7 +3533,7 @@ Uri => Synthetic.scala Text => empty Language => Scala Symbols => 52 entries -Occurrences => 136 entries +Occurrences => 137 entries Synthetics => 39 entries Symbols: @@ -3659,8 +3659,9 @@ Occurrences: [32:8..32:9): J <- example/Synthetic#J# [32:9..32:9): <- example/Synthetic#J#``(). [32:10..32:11): T <- example/Synthetic#J#[T] -[32:11..32:11): <- example/Synthetic#J#evidence$1. +[32:13..32:13): <- example/Synthetic#J#evidence$1. [32:13..32:21): Manifest -> scala/Predef.Manifest# +[32:13..32:21): Manifest -> example/Synthetic#J#[T] [32:29..32:32): arr <- example/Synthetic#J#arr. [32:35..32:40): Array -> scala/Array. [32:41..32:46): empty -> scala/Array.empty(). From a57a512663c383506999c833e1756d949b1e7cfb Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 12:27:52 +0200 Subject: [PATCH 349/465] Allow renamings `as N` in context bounds Also, provide the possibility to use the parameter name for single context bounds. This is controlled by a Config setting, which is off by default. [Cherry-picked a61d2bc7b5c4ba97c037a2e46856fb8290594310] --- .../src/dotty/tools/dotc/ast/Desugar.scala | 180 +++++++++++------- .../src/dotty/tools/dotc/config/Config.scala | 7 + .../dotty/tools/dotc/parsing/Parsers.scala | 16 +- docs/_docs/internals/syntax.md | 8 +- tests/pos/FromString-named.scala | 11 ++ 5 files changed, 146 insertions(+), 76 deletions(-) create mode 100644 tests/pos/FromString-named.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 774e77aa4b44..04fd1afca8be 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -10,7 +10,7 @@ import Annotations.Annotation import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, SrcPos, Chars} -import config.Feature.{sourceVersion, migrateTo3, enabled} +import config.{Feature, Config} import config.SourceVersion.* import collection.mutable import reporting.* @@ -46,6 +46,11 @@ object desugar { */ val UntupledParam: Property.Key[Unit] = Property.StickyKey() + /** An attachment key to indicate that a ValDef is an evidence parameter + * for a context bound. + */ + val ContextBoundParam: Property.Key[Unit] = Property.StickyKey() + /** What static check should be applied to a Match? */ enum MatchCheck { case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom @@ -195,17 +200,6 @@ object desugar { else vdef1 end valDef - def makeImplicitParameters( - tpts: List[Tree], implicitFlag: FlagSet, - mkParamName: Int => TermName, - forPrimaryConstructor: Boolean = false - )(using Context): List[ValDef] = - for (tpt, i) <- tpts.zipWithIndex yield { - val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param - val epname = mkParamName(i) - ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) - } - def mapParamss(paramss: List[ParamClause]) (mapTypeParam: TypeDef => TypeDef) (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = @@ -232,34 +226,57 @@ object desugar { private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) - private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = - val DefDef(_, paramss, tpt, rhs) = meth - val evidenceParamBuf = mutable.ListBuffer[ValDef]() + private def desugarContextBounds( + tdef: TypeDef, + evidenceBuf: mutable.ListBuffer[ValDef], + flags: FlagSet, + freshName: untpd.Tree => TermName, + allParamss: List[ParamClause])(using Context): TypeDef = - var seenContextBounds: Int = 0 - def desugarContextBounds(rhs: Tree): Tree = rhs match + val evidenceNames = mutable.ListBuffer[TermName]() + + def desugarRhs(rhs: Tree): Tree = rhs match case ContextBounds(tbounds, cxbounds) => - val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit - evidenceParamBuf ++= makeImplicitParameters( - cxbounds, iflag, - // Just like with `makeSyntheticParameter` on nameless parameters of - // using clauses, we only need names that are unique among the - // parameters of the method since shadowing does not affect - // implicit resolution in Scala 3. - mkParamName = i => - val index = seenContextBounds + 1 // Start at 1 like FreshNameCreator. - val ret = ContextBoundParamName(EmptyTermName, index) - seenContextBounds += 1 - ret, - forPrimaryConstructor = isPrimaryConstructor) + for bound <- cxbounds do + val evidenceName = bound match + case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => + ownName + case _ if Config.nameSingleContextBounds && cxbounds.tail.isEmpty + && Feature.enabled(Feature.modularity) => + tdef.name.toTermName + case _ => + freshName(bound) + evidenceNames += evidenceName + val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(flags) + evidenceParam.pushAttachment(ContextBoundParam, ()) + evidenceBuf += evidenceParam tbounds case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) + cpy.LambdaTypeTree(rhs)(tparams, desugarRhs(body)) case _ => rhs + + cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + end desugarContextBounds + + private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = + val DefDef(_, paramss, tpt, rhs) = meth + val evidenceParamBuf = mutable.ListBuffer[ValDef]() + + var seenContextBounds: Int = 0 + def freshName(unused: Tree) = + seenContextBounds += 1 // Start at 1 like FreshNameCreator. + ContextBoundParamName(EmptyTermName, seenContextBounds) + // Just like with `makeSyntheticParameter` on nameless parameters of + // using clauses, we only need names that are unique among the + // parameters of the method since shadowing does not affect + // implicit resolution in Scala 3. + val paramssNoContextBounds = + val iflag = if Feature.sourceVersion.isAtLeast(`future`) then Given else Implicit + val flags = if isPrimaryConstructor then iflag | LocalParamAccessor else iflag | Param mapParamss(paramss) { - tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) + tparam => desugarContextBounds(tparam, evidenceParamBuf, flags, freshName, paramss) }(identity) rhs match @@ -399,43 +416,70 @@ object desugar { (Nil, tree) /** Add all evidence parameters in `params` as implicit parameters to `meth`. - * If the parameters of `meth` end in an implicit parameter list or using clause, - * evidence parameters are added in front of that list. Otherwise they are added - * as a separate parameter clause. + * The position of the added parameters is determined as follows: + * + * - If there is an existing parameter list that refers to one of the added + * parameters in one of its parameter types, add the new parameters + * in front of the first such parameter list. + * - Otherwise, if the last parameter list consists implicit or using parameters, + * join the new parameters in front of this parameter list, creating one + * parameter list (this is equilavent to Scala 2's scheme). + * - Otherwise, add the new parameter list at the end as a separate parameter clause. */ private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = - params match + if params.isEmpty then return meth + + val boundNames = params.map(_.name).toSet + + //println(i"add ev params ${meth.name}, ${boundNames.toList}") + + def references(vdef: ValDef): Boolean = + vdef.tpt.existsSubTree: + case Ident(name: TermName) => boundNames.contains(name) + case _ => false + + def recur(mparamss: List[ParamClause]): List[ParamClause] = mparamss match + case ValDefs(mparams) :: _ if mparams.exists(references) => + params :: mparamss + case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => + (params ++ mparams) :: Nil + case mparams :: mparamss1 => + mparams :: recur(mparamss1) case Nil => - meth - case evidenceParams => - val paramss1 = meth.paramss.reverse match - case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => - ((evidenceParams ++ vparams) :: rparamss).reverse - case _ => - meth.paramss :+ evidenceParams - cpy.DefDef(meth)(paramss = paramss1) + params :: Nil + + cpy.DefDef(meth)(paramss = recur(meth.paramss)) + end addEvidenceParams /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = meth.paramss.reverse match { case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.name.is(ContextBoundParamName)) + vparams.takeWhile(_.hasAttachment(ContextBoundParam)) case _ => Nil } @sharable private val synthetic = Modifiers(Synthetic) - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { - var mods = tparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) + /** Filter annotations in `mods` according to `keep` */ + private def filterAnnots(mods: Modifiers, keep: Boolean)(using Context) = + if keep then mods else mods.withAnnotations(Nil) + + private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean)(using Context): TypeDef = + val mods = filterAnnots(tparam.rawMods, keepAnnotations) tparam.withMods(mods & EmptyFlags | Param) - } - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { - var mods = vparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) + + private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean)(using Context): ValDef = { + val mods = filterAnnots(vparam.rawMods, keepAnnotations) val hasDefault = if keepDefault then HasDefault else EmptyFlags - vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) + // Need to ensure that tree is duplicated since term parameters can be watched + // and cloning a term parameter will copy its watchers to the clone, which means + // we'd get cross-talk between the original parameter and the clone. + ValDef(vparam.name, vparam.tpt, vparam.rhs) + .withSpan(vparam.span) + .withAttachmentsFrom(vparam) + .withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = @@ -609,6 +653,11 @@ object desugar { case _ => false } + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { + case PostfixOp(_, Ident(tpnme.raw.STAR)) => true + case _ => false + } + def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { val targs = for (tparam <- tparams) yield { val targ = refOfDef(tparam) @@ -625,11 +674,6 @@ object desugar { appliedTypeTree(tycon, targs) } - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { - case PostfixOp(_, Ident(tpnme.raw.STAR)) => true - case _ => false - } - // a reference to the class type bound by `cdef`, with type parameters coming from the constructor val classTypeRef = appliedRef(classTycon) @@ -667,7 +711,7 @@ object desugar { } ensureApplied(nu) - val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags + val copiedAccessFlags = if Feature.migrateTo3 then EmptyFlags else AccessFlags // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) // def _1: T1 = this.p1 @@ -850,12 +894,11 @@ object desugar { Nil } else { - val defParamss = constrVparamss match { + val defParamss = constrVparamss match case Nil :: paramss => paramss // drop leading () that got inserted by class // TODO: drop this once we do not silently insert empty class parameters anymore case paramss => paramss - } val finalFlag = if ctx.settings.YcompileScala2Library.value then EmptyFlags else Final // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. @@ -1681,14 +1724,13 @@ object desugar { .collect: case vd: ValDef => vd - def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = { - val mods = Given - val params = makeImplicitParameters(formals, mods, - mkParamName = i => - if paramNamesOrNil.isEmpty then ContextFunctionParamName.fresh() - else paramNamesOrNil(i)) - FunctionWithMods(params, body, Modifiers(mods), erasedParams) - } + def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = + val paramNames = + if paramNamesOrNil.nonEmpty then paramNamesOrNil + else formals.map(_ => ContextFunctionParamName.fresh()) + val params = for (tpt, pname) <- formals.zip(paramNames) yield + ValDef(pname, tpt, EmptyTree).withFlags(Given | Param) + FunctionWithMods(params, body, Modifiers(Given), erasedParams) private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { val vdef = ValDef(named.name.asTermName, tpt, rhs) diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 2746476261e5..293044c245ef 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -235,4 +235,11 @@ object Config { */ inline val checkLevelsOnConstraints = false inline val checkLevelsOnInstantiation = true + + /** If a type parameter `X` has a single context bound `X: C`, should the + * witness parameter be named `X`? This would prevent the creation of a + * context bound companion. + */ + inline val nameSingleContextBounds = false } + diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 8680ba8c1335..bbc4096f266b 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -2196,9 +2196,9 @@ object Parsers { if (in.token == tok) { in.nextToken(); toplevelTyp() } else EmptyTree - /** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} + /** TypeAndCtxBounds ::= TypeBounds [`:` ContextBounds] */ - def typeParamBounds(pname: TypeName): Tree = { + def typeAndCtxBounds(pname: TypeName): Tree = { val t = typeBounds() val cbs = contextBounds(pname) if (cbs.isEmpty) t @@ -2207,8 +2207,16 @@ object Parsers { /** ContextBound ::= Type [`as` id] */ def contextBound(pname: TypeName): Tree = - ContextBoundTypeTree(toplevelTyp(), pname, EmptyTermName) + val t = toplevelTyp() + val ownName = + if isIdent(nme.as) && in.featureEnabled(Feature.modularity) then + in.nextToken() + ident() + else EmptyTermName + ContextBoundTypeTree(t, pname, ownName) + /** ContextBounds ::= ContextBound | `{` ContextBound {`,` ContextBound} `}` + */ def contextBounds(pname: TypeName): List[Tree] = if in.isColon then in.nextToken() @@ -3411,7 +3419,7 @@ object Parsers { } else ident().toTypeName val hkparams = typeParamClauseOpt(ParamOwner.Type) - val bounds = if (isAbstractOwner) typeBounds() else typeParamBounds(name) + val bounds = if (isAbstractOwner) typeBounds() else typeAndCtxBounds(name) TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) } } diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index db858ba05fbc..e123fa900258 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -221,7 +221,9 @@ IntoTargetType ::= Type TypeArgs ::= ‘[’ Types ‘]’ ts Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) -TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] ContextBounds(typeBounds, tps) +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} NamesAndTypes ::= NameAndType {‘,’ NameAndType} NameAndType ::= id ':' Type @@ -359,7 +361,7 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ```ebnf ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) - id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) + id [HkTypeParamClause] TypeAndCtxBounds Bound(below, above, context) TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -384,7 +386,7 @@ TypelessClause ::= DefTermParamClause | UsingParamClause DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ diff --git a/tests/pos/FromString-named.scala b/tests/pos/FromString-named.scala new file mode 100644 index 000000000000..efa0882ae347 --- /dev/null +++ b/tests/pos/FromString-named.scala @@ -0,0 +1,11 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[A]: + def fromString(s: String): A + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString as N, Numeric as num}](a: String, b: String): N = + num.plus(N.fromString(a), N.fromString(b)) From 9a96cf0ee5f7cb511d32679fab85dd18e575c00e Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 13:38:52 +0200 Subject: [PATCH 350/465] Implement `deferred` givens A definition like `given T = deferred` in a trait will be expanded to an abstract given in the trait that is implemented automatically in all classes inheriting the trait. [Cherry-picked b48fb99fd607bd3955477db8c1d94ceec295b1a1] --- .../dotty/tools/dotc/core/Definitions.scala | 1 + .../src/dotty/tools/dotc/core/Flags.scala | 1 + .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../dotty/tools/dotc/transform/Erasure.scala | 8 +- .../dotty/tools/dotc/typer/Implicits.scala | 4 +- .../src/dotty/tools/dotc/typer/Namer.scala | 12 + .../dotty/tools/dotc/typer/RefChecks.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 81 +++- .../test/dotc/pos-test-pickling.blacklist | 4 +- library/src/scala/compiletime/package.scala | 13 + tests/neg/deferred-givens.check | 13 + tests/neg/deferred-givens.scala | 30 ++ tests/neg/deferredSummon.check | 17 + tests/neg/deferredSummon.scala | 19 + tests/pos/deferred-givens.scala | 26 ++ tests/pos/deferredSummon.scala | 30 ++ .../pos/hylolib-deferred-given-extract.scala | 19 + .../AnyCollection.scala | 69 ++++ .../pos/hylolib-deferred-given/AnyValue.scala | 76 ++++ .../pos/hylolib-deferred-given/BitArray.scala | 375 ++++++++++++++++++ .../hylolib-deferred-given/Collection.scala | 281 +++++++++++++ .../hylolib-deferred-given/CoreTraits.scala | 57 +++ tests/pos/hylolib-deferred-given/Hasher.scala | 38 ++ .../pos/hylolib-deferred-given/HyArray.scala | 224 +++++++++++ .../pos/hylolib-deferred-given/Integers.scala | 58 +++ tests/pos/hylolib-deferred-given/Range.scala | 37 ++ tests/pos/hylolib-deferred-given/Slice.scala | 49 +++ .../StringConvertible.scala | 14 + 28 files changed, 1545 insertions(+), 14 deletions(-) create mode 100644 tests/neg/deferred-givens.check create mode 100644 tests/neg/deferred-givens.scala create mode 100644 tests/neg/deferredSummon.check create mode 100644 tests/neg/deferredSummon.scala create mode 100644 tests/pos/deferred-givens.scala create mode 100644 tests/pos/deferredSummon.scala create mode 100644 tests/pos/hylolib-deferred-given-extract.scala create mode 100644 tests/pos/hylolib-deferred-given/AnyCollection.scala create mode 100644 tests/pos/hylolib-deferred-given/AnyValue.scala create mode 100644 tests/pos/hylolib-deferred-given/BitArray.scala create mode 100644 tests/pos/hylolib-deferred-given/Collection.scala create mode 100644 tests/pos/hylolib-deferred-given/CoreTraits.scala create mode 100644 tests/pos/hylolib-deferred-given/Hasher.scala create mode 100644 tests/pos/hylolib-deferred-given/HyArray.scala create mode 100644 tests/pos/hylolib-deferred-given/Integers.scala create mode 100644 tests/pos/hylolib-deferred-given/Range.scala create mode 100644 tests/pos/hylolib-deferred-given/Slice.scala create mode 100644 tests/pos/hylolib-deferred-given/StringConvertible.scala diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 15880207b3c8..9ee5891f1606 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -240,6 +240,7 @@ class Definitions { @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") + @tu lazy val Compiletime_deferred : Symbol = CompiletimePackageClass.requiredMethod("deferred") @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 2bc7610bb0ce..e17834d61fdc 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -573,6 +573,7 @@ object Flags { val DeferredOrLazyOrMethod: FlagSet = Deferred | Lazy | Method val DeferredOrTermParamOrAccessor: FlagSet = Deferred | ParamAccessor | TermParam // term symbols without right-hand sides val DeferredOrTypeParam: FlagSet = Deferred | TypeParam // type symbols without right-hand sides + val DeferredGivenFlags = Deferred | Given | HasDefault val EnumValue: FlagSet = Enum | StableRealizable // A Scala enum value val FinalOrInline: FlagSet = Final | Inline val FinalOrModuleClass: FlagSet = Final | ModuleClass // A module class or a final class diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 7545cf5c4ba1..c0eb8a690eb4 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -455,6 +455,7 @@ object StdNames { val create: N = "create" val currentMirror: N = "currentMirror" val curried: N = "curried" + val deferred: N = "deferred" val definitions: N = "definitions" val delayedInit: N = "delayedInit" val delayedInitArg: N = "delayedInit$body" diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 8bfbb90a0700..a25a2fcb5c6d 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -567,7 +567,13 @@ object Erasure { case Some(annot) => val message = annot.argumentConstant(0) match case Some(c) => - c.stringValue.toMessage + val addendum = tree match + case tree: RefTree + if tree.symbol == defn.Compiletime_deferred && tree.name != nme.deferred => + i".\nNote that `deferred` can only be used under its own name when implementing a given in a trait; `${tree.name}` is not accepted." + case _ => + "" + (c.stringValue ++ addendum).toMessage case _ => em"""Reference to ${tree.symbol.showLocated} should not have survived, |it should have been processed and eliminated during expansion of an enclosing macro or term erasure.""" diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index bc19e97b85d8..5ac12ce1aa0c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -924,10 +924,10 @@ trait Implicits: /** Search an implicit argument and report error if not found */ - def implicitArgTree(formal: Type, span: Span)(using Context): Tree = { + def implicitArgTree(formal: Type, span: Span, where: => String = "")(using Context): Tree = { val arg = inferImplicitArg(formal, span) if (arg.tpe.isInstanceOf[SearchFailureType]) - report.error(missingArgMsg(arg, formal, ""), ctx.source.atSpan(span)) + report.error(missingArgMsg(arg, formal, where), ctx.source.atSpan(span)) arg } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index e48c2fdf5066..22a12ed0f468 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1830,6 +1830,18 @@ class Namer { typer: Typer => case _ => WildcardType } + + // translate `given T = deferred` to an abstract given with HasDefault flag + if sym.is(Given) then + mdef.rhs match + case rhs: RefTree + if rhs.name == nme.deferred + && typedAheadExpr(rhs).symbol == defn.Compiletime_deferred + && sym.maybeOwner.is(Trait) => + sym.resetFlag(Final) + sym.setFlag(Deferred | HasDefault) + case _ => + val mbrTpe = paramFn(checkSimpleKinded(typedAheadType(mdef.tpt, tptProto)).tpe) if (ctx.explicitNulls && mdef.mods.is(JavaDefined)) JavaNullInterop.nullifyMember(sym, mbrTpe, mdef.mods.isAllOf(JavaEnumValue)) diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 7cd1d67e9aa5..266b69d029c1 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -552,7 +552,7 @@ object RefChecks { overrideError("is an extension method, cannot override a normal method") else if (other.is(ExtensionMethod) && !member.is(ExtensionMethod)) // (1.3) overrideError("is a normal method, cannot override an extension method") - else if !other.is(Deferred) + else if (!other.is(Deferred) || other.isAllOf(Given | HasDefault)) && !member.is(Deferred) && !other.name.is(DefaultGetterName) && !member.isAnyOverride diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index b90b742aa0ec..c467a4507730 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2649,12 +2649,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) - if (sym.isOneOf(GivenOrImplicit)) checkImplicitConversionDefOK(sym) + if sym.is(Implicit) then checkImplicitConversionDefOK(sym) if sym.is(Module) then checkNoModuleClash(sym) val tpt1 = checkSimpleKinded(typedType(tpt)) val rhs1 = vdef.rhs match { - case rhs @ Ident(nme.WILDCARD) => rhs withType tpt1.tpe - case rhs => typedExpr(rhs, tpt1.tpe.widenExpr) + case rhs @ Ident(nme.WILDCARD) => + rhs.withType(tpt1.tpe) + case rhs: RefTree + if rhs.name == nme.deferred && sym.isAllOf(DeferredGivenFlags, butNot = Param) => + EmptyTree + case rhs => + typedExpr(rhs, tpt1.tpe.widenExpr) } val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) postProcessInfo(vdef1, sym) @@ -2715,9 +2720,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.isInlineMethod then rhsCtx.addMode(Mode.InlineableBody) if sym.is(ExtensionMethod) then rhsCtx.addMode(Mode.InExtensionMethod) - val rhs1 = PrepareInlineable.dropInlineIfError(sym, - if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) - else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) + val rhs1 = ddef.rhs match + case Ident(nme.deferred) if sym.isAllOf(DeferredGivenFlags) => + EmptyTree + case rhs => + PrepareInlineable.dropInlineIfError(sym, + if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) + else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then if StagingLevel.level > 0 then @@ -2898,6 +2907,59 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case None => body + /** Implement givens that were declared with a `deferred` rhs. + * The a given value matching the declared type is searched in a + * context directly enclosing the current class, in which all given + * parameters of the current class are also defined. + */ + def implementDeferredGivens(body: List[Tree]): List[Tree] = + if cls.is(Trait) || ctx.isAfterTyper then body + else + def isGivenValue(mbr: TermRef) = + val dcl = mbr.symbol + if dcl.is(Method) then + report.error( + em"""Cannnot infer the implementation of the deferred ${dcl.showLocated} + |since that given is parameterized. An implementing given needs to be written explicitly.""", + cdef.srcPos) + false + else true + + def givenImpl(mbr: TermRef): ValDef = + val dcl = mbr.symbol + val target = dcl.info.asSeenFrom(cls.thisType, dcl.owner) + val constr = cls.primaryConstructor + val usingParamAccessors = cls.paramAccessors.filter(_.is(Given)) + val paramScope = newScopeWith(usingParamAccessors*) + val searchCtx = ctx.outer.fresh.setScope(paramScope) + val rhs = implicitArgTree(target, cdef.span, + where = i"inferring the implementation of the deferred ${dcl.showLocated}" + )(using searchCtx) + + val impl = dcl.copy(cls, + flags = dcl.flags &~ (HasDefault | Deferred) | Final | Override, + info = target, + coord = rhs.span).entered.asTerm + + def anchorParams = new TreeMap: + override def transform(tree: Tree)(using Context): Tree = tree match + case id: Ident if usingParamAccessors.contains(id.symbol) => + cpy.Select(id)(This(cls), id.name) + case _ => + super.transform(tree) + ValDef(impl, anchorParams.transform(rhs)) + end givenImpl + + val givenImpls = + cls.thisType.implicitMembers + //.showing(i"impl def givens for $cls/$result") + .filter(_.symbol.isAllOf(DeferredGivenFlags, butNot = Param)) + //.showing(i"impl def filtered givens for $cls/$result") + .filter(isGivenValue) + .map(givenImpl) + body ++ givenImpls + end implementDeferredGivens + ensureCorrectSuperClass() completeAnnotations(cdef, cls) val constr1 = typed(constr).asInstanceOf[DefDef] @@ -2919,9 +2981,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else { val dummy = localDummy(cls, impl) val body1 = - addParentRefinements( - addAccessorDefs(cls, - typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1)) + implementDeferredGivens( + addParentRefinements( + addAccessorDefs(cls, + typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1))) checkNoDoubleDeclaration(cls) val impl1 = cpy.Template(impl)(constr1, parents1, Nil, self1, body1) diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 3b14ce28569d..5c715faa504b 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -103,7 +103,7 @@ i13842.scala # Position change under captureChecking boxmap-paper.scala -# Function types print differnt after unpickling since test mispredicts Feature.preFundsEnabled +# Function types print different after unpickling since test mispredicts Feature.preFundsEnabled caps-universal.scala # GADT cast applied to singleton type difference @@ -128,6 +128,8 @@ i20053b.scala parsercombinators-givens.scala parsercombinators-givens-2.scala parsercombinators-arrow.scala +hylolib-deferred-given + diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index 3eca997554a0..be76941a680b 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -42,6 +42,19 @@ def erasedValue[T]: T = erasedValue[T] @compileTimeOnly("`uninitialized` can only be used as the right hand side of a mutable field definition") def uninitialized: Nothing = ??? +/** Used as the right hand side of a given in a trait, like this + * + * ``` + * given T = deferred + * ``` + * + * This signifies that the given will get a synthesized definition in all classes + * that implement the enclosing trait and that do not contain an explicit overriding + * definition of that given. + */ +@compileTimeOnly("`deferred` can only be used as the right hand side of a given definition in a trait") +def deferred: Nothing = ??? + /** The error method is used to produce user-defined compile errors during inline expansion. * If an inline expansion results in a call error(msgStr) the compiler produces an error message containing the given msgStr. * diff --git a/tests/neg/deferred-givens.check b/tests/neg/deferred-givens.check new file mode 100644 index 000000000000..cc15901d087f --- /dev/null +++ b/tests/neg/deferred-givens.check @@ -0,0 +1,13 @@ +-- [E172] Type Error: tests/neg/deferred-givens.scala:11:6 ------------------------------------------------------------- +11 |class B extends A // error + |^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- [E172] Type Error: tests/neg/deferred-givens.scala:13:15 ------------------------------------------------------------ +13 |abstract class C extends A // error + |^^^^^^^^^^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- Error: tests/neg/deferred-givens.scala:26:8 ------------------------------------------------------------------------- +26 | class E extends A2 // error, can't summon polymorphic given + | ^^^^^^^^^^^^^^^^^^ + | Cannnot infer the implementation of the deferred given instance given_Ctx3_T in trait A2 + | since that given is parameterized. An implementing given needs to be written explicitly. diff --git a/tests/neg/deferred-givens.scala b/tests/neg/deferred-givens.scala new file mode 100644 index 000000000000..7ff67d784714 --- /dev/null +++ b/tests/neg/deferred-givens.scala @@ -0,0 +1,30 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +class Ctx +class Ctx2 + +trait A: + given Ctx as ctx = deferred + given Ctx2 = deferred + +class B extends A // error + +abstract class C extends A // error + +class D extends A: + given Ctx as ctx = Ctx() // ok, was implemented + given Ctx2 = Ctx2() // ok + +class Ctx3[T] + +trait A2: + given [T] => Ctx3[T] = deferred + +object O: + given [T] => Ctx3[T] = Ctx3[T]() + class E extends A2 // error, can't summon polymorphic given + +class E extends A2: + given [T] => Ctx3[T] = Ctx3[T]() // ok + diff --git a/tests/neg/deferredSummon.check b/tests/neg/deferredSummon.check new file mode 100644 index 000000000000..bd76ad73467e --- /dev/null +++ b/tests/neg/deferredSummon.check @@ -0,0 +1,17 @@ +-- Error: tests/neg/deferredSummon.scala:4:26 -------------------------------------------------------------------------- +4 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:7:26 -------------------------------------------------------------------------- +7 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:12:16 ------------------------------------------------------------------------- +12 | given Int = deferred // error + | ^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:16:14 ------------------------------------------------------------------------- +16 | given Int = defered // error + | ^^^^^^^ + |`deferred` can only be used as the right hand side of a given definition in a trait. + |Note that `deferred` can only be used under its own name when implementing a given in a trait; `defered` is not accepted. diff --git a/tests/neg/deferredSummon.scala b/tests/neg/deferredSummon.scala new file mode 100644 index 000000000000..cddde82535fb --- /dev/null +++ b/tests/neg/deferredSummon.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.modularity + +object Test: + given Int = compiletime.deferred // error + +abstract class C: + given Int = compiletime.deferred // error + +trait A: + import compiletime.deferred + locally: + given Int = deferred // error + +trait B: + import compiletime.deferred as defered + given Int = defered // error + + + diff --git a/tests/pos/deferred-givens.scala b/tests/pos/deferred-givens.scala new file mode 100644 index 000000000000..51fa43866d1e --- /dev/null +++ b/tests/pos/deferred-givens.scala @@ -0,0 +1,26 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* +class Ord[Elem] + +given Ord[Double] + +trait B: + type Elem + given Ord[Elem] = deferred + def foo = summon[Ord[Elem]] + +class C extends B: + type Elem = String + override given Ord[Elem] = ??? + +def bar(using Ord[String]) = 1 + +class D(using Ord[String]) extends B: + type Elem = String + +class E(using x: Ord[String]) extends B: + type Elem = String + override given Ord[Elem] = x + +class F[X: Ord] extends B: + type Elem = X diff --git a/tests/pos/deferredSummon.scala b/tests/pos/deferredSummon.scala new file mode 100644 index 000000000000..d12a98e52736 --- /dev/null +++ b/tests/pos/deferredSummon.scala @@ -0,0 +1,30 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +trait Ord[Self]: + def less(x: Self, y: Self): Boolean + +trait A: + type Elem + given Ord[Elem] = deferred + def foo = summon[Ord[Elem]] + +object Inst: + given Ord[Int]: + def less(x: Int, y: Int) = x < y + +object Test: + import Inst.given + class C extends A: + type Elem = Int + object E extends A: + type Elem = Int + given A: + type Elem = Int + +class D[T: Ord] extends A: + type Elem = T + + + + diff --git a/tests/pos/hylolib-deferred-given-extract.scala b/tests/pos/hylolib-deferred-given-extract.scala new file mode 100644 index 000000000000..02d889dc9aac --- /dev/null +++ b/tests/pos/hylolib-deferred-given-extract.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.modularity -source future +package hylotest +import compiletime.deferred + +trait Value[Self] + +/** A collection of elements accessible by their position. */ +trait Collection[Self]: + + /** The type of the elements in the collection. */ + type Element + given elementIsValue: Value[Element] = compiletime.deferred + +class BitArray + +given Value[Boolean] {} + +given Collection[BitArray] with + type Element = Boolean diff --git a/tests/pos/hylolib-deferred-given/AnyCollection.scala b/tests/pos/hylolib-deferred-given/AnyCollection.scala new file mode 100644 index 000000000000..55e453d6dc87 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/AnyCollection.scala @@ -0,0 +1,69 @@ +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base](using b: Collection[Base])(base: Base): AnyCollection[b.Element] = + // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` + // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these + // choices is even correct! Note also that the ambiguity is suppressed if the constructor of + // `AnyValue` is declared with a context bound rather than an implicit parameter. + given Value[b.Position] = b.positionIsValue + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[b.Position])) + + def at(p: AnyValue): b.Element = + base.at(p.unsafelyUnwrappedAs[b.Position]) + + new AnyCollection[b.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given anyCollectionIsCollection[T](using tIsValue: Value[T]): Collection[AnyCollection[T]] with { + + type Element = T + //given elementIsValue: Value[Element] = tIsValue + + type Position = AnyValue + given positionIsValue: Value[Position] = anyValueIsValue + + extension (self: AnyCollection[T]) { + + def startPosition = + self._start() + + def endPosition = + self._end() + + def positionAfter(p: Position) = + self._after(p) + + def at(p: Position) = + self._at(p) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/AnyValue.scala b/tests/pos/hylolib-deferred-given/AnyValue.scala new file mode 100644 index 000000000000..b9d39869c09a --- /dev/null +++ b/tests/pos/hylolib-deferred-given/AnyValue.scala @@ -0,0 +1,76 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T](using Value[T])(wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given anyValueIsValue: Value[AnyValue] with { + + extension (self: AnyValue) { + + def copy(): AnyValue = + self.copy() + + def eq(other: AnyValue): Boolean = + self `eq` other + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/BitArray.scala b/tests/pos/hylolib-deferred-given/BitArray.scala new file mode 100644 index 000000000000..485f30472847 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/BitArray.scala @@ -0,0 +1,375 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given bitArrayPositionIsValue: Value[BitArray.Position] with { + + extension (self: BitArray.Position) { + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} + +given bitArrayIsCollection: Collection[BitArray] with { + + type Element = Boolean + //given elementIsValue: Value[Boolean] = booleanIsValue + + type Position = BitArray.Position + given positionIsValue: Value[BitArray.Position] = bitArrayPositionIsValue + + extension (self: BitArray) { + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + + } + +} + +given bitArrayIsStringConvertible: StringConvertible[BitArray] with { + + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + +} diff --git a/tests/pos/hylolib-deferred-given/Collection.scala b/tests/pos/hylolib-deferred-given/Collection.scala new file mode 100644 index 000000000000..6b5e7a762dc8 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Collection.scala @@ -0,0 +1,281 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection[Self] { + + /** The type of the elements in the collection. */ + type Element + given elementIsValue: Value[Element] = compiletime.deferred + + /** The type of a position in the collection. */ + type Position + given positionIsValue: Value[Position] + + extension (self: Self) { + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def _count(p: Position, n: Int): Int = + if p `eq` e then n else _count(self.positionAfter(p), n + 1) + _count(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if (i.eq(e)) { + false + } else if (j.eq(e)) { + true + } else { + def _isBefore(n: Position): Boolean = + if (n.eq(j)) { + true + } else if (n.eq(e)) { + false + } else { + _isBefore(self.positionAfter(n)) + } + _isBefore(self.positionAfter(i)) + } + + } + +} + +extension [Self](self: Self)(using s: Collection[Self]) { + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(s.Element, Slice[Self])] = + if (self.isEmpty) { + None + } else { + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + } + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T, combine: (T, s.Element) => T): T = + val e = self.endPosition + def loop(p: s.Position, r: T): T = + if (p.eq(e)) { + r + } else { + loop(self.positionAfter(p), combine(r, self.at(p))) + } + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: (s.Element) => Boolean): Boolean = + val e = self.endPosition + def loop(p: s.Position): Boolean = + if (p.eq(e)) { + true + } else if (!action(self.at(p))) { + false + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T](using Value[T])(transform: (s.Element) => T): HyArray[T] = + self.reduce( + HyArray[T](), + (r, e) => r.append(transform(e), assumeUniqueness = true) + ) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: (s.Element) => Boolean): HyArray[s.Element] = + self.reduce( + HyArray[s.Element](), + (r, e) => if (isIncluded(e)) then r.append(e, assumeUniqueness = true) else r + ) + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: (s.Element) => Boolean): Option[s.Position] = + val e = self.endPosition + def loop(p: s.Position): Option[s.Position] = + if (p.eq(e)) { + None + } else if (predicate(self.at(p))) { + Some(p) + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Comparable[s.Element]): Option[s.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Comparable[s.Element]): Option[s.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (s.Element, s.Element) => Boolean): Option[s.Element] = + if (self.isEmpty) { + None + } else { + val e = self.endPosition + def _least(p: s.Position, least: s.Element): s.Element = + if (p.eq(e)) { + least + } else { + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + _least(self.positionAfter(p), y) + } + + val b = self.startPosition + Some(_least(self.positionAfter(b), self.at(b))) + } + +} + +extension [Self](self: Self)(using + s: Collection[Self], + e: Value[s.Element] +) { + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T](using o: Collection[T] { type Element = s.Element })(other: T): Boolean = + def loop(i: s.Position, j: o.Position): Boolean = + if (i `eq` self.endPosition) { + j `eq` other.endPosition + } else if (j `eq` other.endPosition) { + false + } else if (self.at(i) `neq` other.at(j)) { + false + } else { + loop(self.positionAfter(i), other.positionAfter(j)) + } + loop(self.startPosition, other.startPosition) + +} diff --git a/tests/pos/hylolib-deferred-given/CoreTraits.scala b/tests/pos/hylolib-deferred-given/CoreTraits.scala new file mode 100644 index 000000000000..01b2c5242af9 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/CoreTraits.scala @@ -0,0 +1,57 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value[Self] { + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +} + +extension [Self: Value](self: Self) def neq(other: Self): Boolean = !self.eq(other) + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable[Self] extends Value[Self] { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib-deferred-given/Hasher.scala b/tests/pos/hylolib-deferred-given/Hasher.scala new file mode 100644 index 000000000000..ef6813df6b60 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Hasher.scala @@ -0,0 +1,38 @@ +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib-deferred-given/HyArray.scala b/tests/pos/hylolib-deferred-given/HyArray.scala new file mode 100644 index 000000000000..98632dcb65bc --- /dev/null +++ b/tests/pos/hylolib-deferred-given/HyArray.scala @@ -0,0 +1,224 @@ +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element] private (using + elementIsValue: Value[Element] +)( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + // NOTE: Can't refine `C.Element` without renaming the generic parameter of `HyArray`. + // /** Adds the contents of `source` at the end of the array. */ + // def appendContents[C](using + // s: Collection[C] + // )( + // source: C { type Element = Element }, + // assumeUniqueness: Boolean = false + // ): HyArray[Element] = + // val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + // source.reduce(result, (r, e) => r.append(e, assumeUniqueness = true)) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T](using t: Value[T])(elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { + + extension (self: HyArray[T]) { + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher, (h, e) => e.hashInto(h)) + + } + +} + +given hyArrayIsCollection[T](using tIsValue: Value[T]): Collection[HyArray[T]] with { + + type Element = T + //given elementIsValue: Value[T] = tIsValue + + type Position = Int + given positionIsValue: Value[Int] = intIsValue + + extension (self: HyArray[T]) { + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + + } + +} + +// NOTE: This should work. +// given hyArrayIsStringConvertible[T](using +// tIsValue: Value[T], +// tIsStringConvertible: StringConvertible[T] +// ): StringConvertible[HyArray[T]] with { +// +// given Collection[HyArray[T]] = hyArrayIsCollection[T] +// +// extension (self: HyArray[T]) +// override def description: String = +// var contents = mutable.StringBuilder() +// self.forEach((e) => { contents ++= e.description; true }) +// s"[${contents.mkString(", ")}]" +// +// } diff --git a/tests/pos/hylolib-deferred-given/Integers.scala b/tests/pos/hylolib-deferred-given/Integers.scala new file mode 100644 index 000000000000..b9bc203a88ea --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Integers.scala @@ -0,0 +1,58 @@ +package hylo + +given booleanIsValue: Value[Boolean] with { + + extension (self: Boolean) { + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + + } + +} + +given intIsValue: Value[Int] with { + + extension (self: Int) { + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + } + +} + +given intIsComparable: Comparable[Int] with { + + extension (self: Int) { + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + + } + +} + +given intIsStringConvertible: StringConvertible[Int] with {} diff --git a/tests/pos/hylolib-deferred-given/Range.scala b/tests/pos/hylolib-deferred-given/Range.scala new file mode 100644 index 000000000000..1f597652ead1 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound](lowerBound: Bound, upperBound: Bound)(using Comparable[Bound]) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib-deferred-given/Slice.scala b/tests/pos/hylolib-deferred-given/Slice.scala new file mode 100644 index 000000000000..57cdb38f6e53 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Slice.scala @@ -0,0 +1,49 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base](using + val b: Collection[Base] +)( + val base: Base, + val bounds: Range[b.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: b.Position = + bounds.lowerBound + + def endPosition: b.Position = + bounds.upperBound + + def positionAfter(p: b.Position): b.Position = + base.positionAfter(p) + + def at(p: b.Position): b.Element = + base.at(p) + +} + +given sliceIsCollection[T](using c: Collection[T]): Collection[Slice[T]] with { + + type Element = c.Element + //given elementIsValue: Value[Element] = c.elementIsValue + + type Position = c.Position + given positionIsValue: Value[Position] = c.positionIsValue + + extension (self: Slice[T]) { + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] // NOTE: Ugly hack + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/StringConvertible.scala b/tests/pos/hylolib-deferred-given/StringConvertible.scala new file mode 100644 index 000000000000..0702f79f2794 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/StringConvertible.scala @@ -0,0 +1,14 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible[Self] { + + extension (self: Self) { + + /** Returns a textual description of `self`. */ + def description: String = + self.toString + + } + +} From 6016ce99a66aac5e6ff6da801f3c01743ed35cf2 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 17:06:18 +0200 Subject: [PATCH 351/465] FIX: Allow ContextBoundParamNames to be unmangled. Also, fix the unmangling of UniqueExtNames, which seemingly never worked. [Cherry-picked 600293ee2a74e945ad8870b9034b416e2294c0e6] --- .../src/dotty/tools/dotc/core/NameKinds.scala | 37 +++++++++---------- 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index d4f009cbbbd5..74d440562824 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -182,13 +182,13 @@ object NameKinds { case DerivedName(underlying, info: this.NumberedInfo) => Some((underlying, info.num)) case _ => None } - protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = { + protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = var i = name.length - while (i > 0 && name(i - 1).isDigit) i -= 1 - if (i > separator.length && i < name.length && - name.slice(i - separator.length, i).toString == separator) i + while i > 0 && name(i - 1).isDigit do i -= 1 + if i >= separator.length && i < name.length + && name.slice(i - separator.length, i).toString == separator + then i else -1 - } numberedNameKinds(tag) = this: @unchecked } @@ -240,6 +240,16 @@ object NameKinds { } } + /** Unique names that can be unmangled */ + class UniqueNameKindWithUnmangle(separator: String) extends UniqueNameKind(separator): + override def unmangle(name: SimpleName): TermName = + val i = skipSeparatorAndNum(name, separator) + if i > 0 then + val index = name.drop(i).toString.toInt + val original = name.take(i - separator.length).asTermName + apply(original, index) + else name + /** Names of the form `prefix . name` */ val QualifiedName: QualifiedNameKind = new QualifiedNameKind(QUALIFIED, ".") @@ -288,7 +298,7 @@ object NameKinds { * * The "evidence$" prefix is a convention copied from Scala 2. */ - val ContextBoundParamName: UniqueNameKind = new UniqueNameKind("evidence$") + val ContextBoundParamName: UniqueNameKind = new UniqueNameKindWithUnmangle("evidence$") /** The name of an inferred contextual function parameter: * @@ -323,20 +333,7 @@ object NameKinds { val InlineBinderName: UniqueNameKind = new UniqueNameKind("$proxy") val MacroNames: UniqueNameKind = new UniqueNameKind("$macro$") - /** A kind of unique extension methods; Unlike other unique names, these can be - * unmangled. - */ - val UniqueExtMethName: UniqueNameKind = new UniqueNameKind("$extension") { - override def unmangle(name: SimpleName): TermName = { - val i = skipSeparatorAndNum(name, separator) - if (i > 0) { - val index = name.drop(i).toString.toInt - val original = name.take(i - separator.length).asTermName - apply(original, index) - } - else name - } - } + val UniqueExtMethName: UniqueNameKind = new UniqueNameKindWithUnmangle("$extension") /** Kinds of unique names generated by the pattern matcher */ val PatMatStdBinderName: UniqueNameKind = new UniqueNameKind("x") From 81679fabee21c6777099021b125afe5f77f7709d Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 21 Dec 2023 11:32:24 +0100 Subject: [PATCH 352/465] Change rules for given prioritization Consider the following program: ```scala class A class B extends A class C extends A given A = A() given B = B() given C = C() def f(using a: A, b: B, c: C) = println(a.getClass) println(b.getClass) println(c.getClass) @main def Test = f ``` With the current rules, this would fail with an ambiguity error between B and C when trying to synthesize the A parameter. This is a problem without an easy remedy. We can fix this problem by flipping the priority for implicit arguments. Instead of requiring an argument to be most _specific_, we now require it to be most _general_ while still conforming to the formal parameter. There are three justifications for this change, which at first glance seems quite drastic: - It gives us a natural way to deal with inheritance triangles like the one in the code above. Such triangles are quite common. - Intuitively, we want to get the closest possible match between required formal parameter type and synthetisized argument. The "most general" rule provides that. - We already do a crucial part of this. Namely, with current rules we interpolate all type variables in an implicit argument downwards, no matter what their variance is. This makes no sense in theory, but solves hairy problems with contravariant typeclasses like `Comparable`. Instead of this hack, we now do something more principled, by flipping the direction everywhere, preferring general over specific, instead of just flipping contravariant type parameters. The behavior is dependent on the Scala version - Old behavior: up to 3.4 - New behavior: from 3.5, 3.5-migration warns on behavior change The CB builds under the new rules. One fix was needed for a shapeless 3 deriving test. There was a typo: mkInstances instead of mkProductInstances, which previously got healed by accident because of the most specific rule. Also: Don't flip contravariant type arguments for overloading resolution Flipping contravariant type arguments was needed for implicit search where it will be replaced by a more general scheme. But it makes no sense for overloading resolution. For overloading resolution, we want to pick the most specific alternative, analogous to us picking the most specific instantiation when we force a fully defined type. Also: Disable implicit search everywhere for disambiaguation Previously, one disambiguation step missed that, whereas implicits were turned off everywhere else. [Cherry-picked 48000ee3f578201279094c7d76152a9fbf0992cc] --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 76d057f15408..63e86e3a321d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1886,7 +1886,7 @@ trait Applications extends Compatibility { then // Intermediate rules: better means specialize, but map all type arguments downwards // These are enabled for 3.0-3.5, and for all comparisons between old-style implicits, - // and in 3.5 amd 3.6-migration when we compare with previous rules. + // and in 3.5 and 3.6-migration when we compare with previous rules. val flip = new TypeMap: def apply(t: Type) = t match case t @ AppliedType(tycon, args) => diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5ac12ce1aa0c..fd22f0ec5529 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -531,7 +531,7 @@ object Implicits: |must be more specific than $target""" :: Nil override def msg(using Context) = - super.msg.append("\nThe expected type $target is not specific enough, so no search was attempted") + super.msg.append(i"\nThe expected type $target is not specific enough, so no search was attempted") override def toString = s"TooUnspecific" end TooUnspecific From 555f67c800af263b0528cdc410f32b47bab9b7e3 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 15:54:37 +0200 Subject: [PATCH 353/465] Allow context bounds in type declarations Expand them to deferred givens [Cherry-picked d923cac0f70b357d75721daf0cf316b4393f2beb] --- .../src/dotty/tools/dotc/ast/Desugar.scala | 15 +- .../dotty/tools/dotc/parsing/Parsers.scala | 53 +-- .../test/dotc/pos-test-pickling.blacklist | 1 + docs/_docs/internals/syntax.md | 2 +- tests/pos/deferredSummon.scala | 21 +- tests/pos/dep-context-bounds.scala | 10 + tests/pos/hylolib-cb-extract.scala | 18 + tests/pos/hylolib-cb/AnyCollection.scala | 66 ++++ tests/pos/hylolib-cb/AnyValue.scala | 76 ++++ tests/pos/hylolib-cb/BitArray.scala | 372 ++++++++++++++++++ tests/pos/hylolib-cb/Collection.scala | 279 +++++++++++++ tests/pos/hylolib-cb/CoreTraits.scala | 57 +++ tests/pos/hylolib-cb/Hasher.scala | 38 ++ tests/pos/hylolib-cb/HyArray.scala | 221 +++++++++++ tests/pos/hylolib-cb/Integers.scala | 58 +++ tests/pos/hylolib-cb/Range.scala | 37 ++ tests/pos/hylolib-cb/Slice.scala | 46 +++ tests/pos/hylolib-cb/StringConvertible.scala | 14 + .../pos/hylolib-deferred-given/AnyValue.scala | 2 +- tests/pos/hylolib-deferred-given/Range.scala | 2 +- 20 files changed, 1355 insertions(+), 33 deletions(-) create mode 100644 tests/pos/dep-context-bounds.scala create mode 100644 tests/pos/hylolib-cb-extract.scala create mode 100644 tests/pos/hylolib-cb/AnyCollection.scala create mode 100644 tests/pos/hylolib-cb/AnyValue.scala create mode 100644 tests/pos/hylolib-cb/BitArray.scala create mode 100644 tests/pos/hylolib-cb/Collection.scala create mode 100644 tests/pos/hylolib-cb/CoreTraits.scala create mode 100644 tests/pos/hylolib-cb/Hasher.scala create mode 100644 tests/pos/hylolib-cb/HyArray.scala create mode 100644 tests/pos/hylolib-cb/Integers.scala create mode 100644 tests/pos/hylolib-cb/Range.scala create mode 100644 tests/pos/hylolib-cb/Slice.scala create mode 100644 tests/pos/hylolib-cb/StringConvertible.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 04fd1afca8be..d6e442ed4a0c 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -237,12 +237,13 @@ object desugar { def desugarRhs(rhs: Tree): Tree = rhs match case ContextBounds(tbounds, cxbounds) => + val isMember = flags.isAllOf(DeferredGivenFlags) for bound <- cxbounds do val evidenceName = bound match case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => ownName - case _ if Config.nameSingleContextBounds && cxbounds.tail.isEmpty - && Feature.enabled(Feature.modularity) => + case _ if Config.nameSingleContextBounds && !isMember + && cxbounds.tail.isEmpty && Feature.enabled(Feature.modularity) => tdef.name.toTermName case _ => freshName(bound) @@ -492,6 +493,14 @@ object desugar { Apply(fn, params.map(refOfDef)) } + def typeDef(tdef: TypeDef)(using Context): Tree = + val evidenceBuf = new mutable.ListBuffer[ValDef] + val result = desugarContextBounds( + tdef, evidenceBuf, + (tdef.mods.flags.toTermFlags & AccessFlags) | Lazy | DeferredGivenFlags, + inventGivenOrExtensionName, Nil) + if evidenceBuf.isEmpty then result else Thicket(result :: evidenceBuf.toList) + /** The expansion of a class definition. See inline comments for what is involved */ def classDef(cdef: TypeDef)(using Context): Tree = { val impl @ Template(constr0, _, self, _) = cdef.rhs: @unchecked @@ -1426,7 +1435,7 @@ object desugar { case tree: TypeDef => if (tree.isClassDef) classDef(tree) else if (ctx.mode.isQuotedPattern) quotedPatternTypeDef(tree) - else tree + else typeDef(tree) case tree: DefDef => if (tree.name.isConstructorName) tree // was already handled by enclosing classDef else defDef(tree) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index bbc4096f266b..f3d02dda5c48 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3930,14 +3930,16 @@ object Parsers { argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] + /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() atSpan(start, nameStart) { val nameIdent = typeIdent() + val tname = nameIdent.name.asTypeName val tparams = typeParamClauseOpt(ParamOwner.Type) val vparamss = funParamClauses() + def makeTypeDef(rhs: Tree): Tree = { val rhs1 = lambdaAbstractAll(tparams :: vparamss, rhs) val tdef = TypeDef(nameIdent.name.toTypeName, rhs1) @@ -3945,36 +3947,37 @@ object Parsers { tdef.pushAttachment(Backquoted, ()) finalizeDef(tdef, mods, start) } + in.token match { case EQUALS => in.nextToken() makeTypeDef(toplevelTyp()) case SUBTYPE | SUPERTYPE => - val bounds = typeBounds() - if (in.token == EQUALS) { - val eqOffset = in.skipToken() - var rhs = toplevelTyp() - rhs match { - case mtt: MatchTypeTree => - bounds match { - case TypeBoundsTree(EmptyTree, upper, _) => - rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) - case _ => - syntaxError(em"cannot combine lower bound and match type alias", eqOffset) - } - case _ => - if mods.is(Opaque) then - rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) - else - syntaxError(em"cannot combine bound and alias", eqOffset) - } - makeTypeDef(rhs) - } - else makeTypeDef(bounds) + typeAndCtxBounds(tname) match + case bounds: TypeBoundsTree if in.token == EQUALS => + val eqOffset = in.skipToken() + var rhs = toplevelTyp() + rhs match { + case mtt: MatchTypeTree => + bounds match { + case TypeBoundsTree(EmptyTree, upper, _) => + rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) + case _ => + syntaxError(em"cannot combine lower bound and match type alias", eqOffset) + } + case _ => + if mods.is(Opaque) then + rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) + else + syntaxError(em"cannot combine bound and alias", eqOffset) + } + makeTypeDef(rhs) + case bounds => makeTypeDef(bounds) case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => - makeTypeDef(typeBounds()) - case _ if (staged & StageKind.QuotedPattern) != 0 => - makeTypeDef(typeBounds()) + makeTypeDef(typeAndCtxBounds(tname)) + case _ if (staged & StageKind.QuotedPattern) != 0 + || in.featureEnabled(Feature.modularity) && in.isColon => + makeTypeDef(typeAndCtxBounds(tname)) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) return EmptyTree // return to avoid setting the span to EmptyTree diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 5c715faa504b..e58277bdc0e5 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -129,6 +129,7 @@ parsercombinators-givens.scala parsercombinators-givens-2.scala parsercombinators-arrow.scala hylolib-deferred-given +hylolib-cb diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index e123fa900258..05f89a344148 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -457,7 +457,7 @@ PatDef ::= ids [‘:’ Type] [‘=’ Expr] DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound +TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef diff --git a/tests/pos/deferredSummon.scala b/tests/pos/deferredSummon.scala index d12a98e52736..31a9697eda6b 100644 --- a/tests/pos/deferredSummon.scala +++ b/tests/pos/deferredSummon.scala @@ -9,11 +9,15 @@ trait A: given Ord[Elem] = deferred def foo = summon[Ord[Elem]] +trait B: + type Elem: Ord + def foo = summon[Ord[Elem]] + object Inst: given Ord[Int]: def less(x: Int, y: Int) = x < y -object Test: +object Test1: import Inst.given class C extends A: type Elem = Int @@ -22,9 +26,22 @@ object Test: given A: type Elem = Int -class D[T: Ord] extends A: +class D1[T: Ord] extends B: + type Elem = T + +object Test2: + import Inst.given + class C extends B: + type Elem = Int + object E extends B: + type Elem = Int + given B: + type Elem = Int + +class D2[T: Ord] extends B: type Elem = T + diff --git a/tests/pos/dep-context-bounds.scala b/tests/pos/dep-context-bounds.scala new file mode 100644 index 000000000000..434805762622 --- /dev/null +++ b/tests/pos/dep-context-bounds.scala @@ -0,0 +1,10 @@ +//> using options -language:experimental.modularity -source future +trait A[X]: + type Self = X + +object Test2: + def foo[X: A as x](a: x.Self) = ??? + + def bar[X: A as x](a: Int) = ??? + + def baz[X: A as x](a: Int)(using String) = ??? diff --git a/tests/pos/hylolib-cb-extract.scala b/tests/pos/hylolib-cb-extract.scala new file mode 100644 index 000000000000..b80a88485a2b --- /dev/null +++ b/tests/pos/hylolib-cb-extract.scala @@ -0,0 +1,18 @@ +//> using options -language:experimental.modularity -source future +package hylotest +import compiletime.deferred + +trait Value[Self] + +/** A collection of elements accessible by their position. */ +trait Collection[Self]: + + /** The type of the elements in the collection. */ + type Element: Value + +class BitArray + +given Value[Boolean] {} + +given Collection[BitArray] with + type Element = Boolean diff --git a/tests/pos/hylolib-cb/AnyCollection.scala b/tests/pos/hylolib-cb/AnyCollection.scala new file mode 100644 index 000000000000..1a44344d0e51 --- /dev/null +++ b/tests/pos/hylolib-cb/AnyCollection.scala @@ -0,0 +1,66 @@ +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base](using b: Collection[Base])(base: Base): AnyCollection[b.Element] = + // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` + // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these + // choices is even correct! Note also that the ambiguity is suppressed if the constructor of + // `AnyValue` is declared with a context bound rather than an implicit parameter. + given Value[b.Position] = b.positionIsValue + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[b.Position])) + + def at(p: AnyValue): b.Element = + base.at(p.unsafelyUnwrappedAs[b.Position]) + + new AnyCollection[b.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given anyCollectionIsCollection[T](using tIsValue: Value[T]): Collection[AnyCollection[T]] with { + + type Element = T + type Position = AnyValue + + extension (self: AnyCollection[T]) { + + def startPosition = + self._start() + + def endPosition = + self._end() + + def positionAfter(p: Position) = + self._after(p) + + def at(p: Position) = + self._at(p) + + } + +} diff --git a/tests/pos/hylolib-cb/AnyValue.scala b/tests/pos/hylolib-cb/AnyValue.scala new file mode 100644 index 000000000000..b9d39869c09a --- /dev/null +++ b/tests/pos/hylolib-cb/AnyValue.scala @@ -0,0 +1,76 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T](using Value[T])(wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given anyValueIsValue: Value[AnyValue] with { + + extension (self: AnyValue) { + + def copy(): AnyValue = + self.copy() + + def eq(other: AnyValue): Boolean = + self `eq` other + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} diff --git a/tests/pos/hylolib-cb/BitArray.scala b/tests/pos/hylolib-cb/BitArray.scala new file mode 100644 index 000000000000..3a0b4658f747 --- /dev/null +++ b/tests/pos/hylolib-cb/BitArray.scala @@ -0,0 +1,372 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given bitArrayPositionIsValue: Value[BitArray.Position] with { + + extension (self: BitArray.Position) { + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} + +given bitArrayIsCollection: Collection[BitArray] with { + + type Element = Boolean + type Position = BitArray.Position + + extension (self: BitArray) { + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + + } + +} + +given bitArrayIsStringConvertible: StringConvertible[BitArray] with { + + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + +} diff --git a/tests/pos/hylolib-cb/Collection.scala b/tests/pos/hylolib-cb/Collection.scala new file mode 100644 index 000000000000..073a99cdd16b --- /dev/null +++ b/tests/pos/hylolib-cb/Collection.scala @@ -0,0 +1,279 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection[Self] { + + /** The type of the elements in the collection. */ + type Element: Value + + /** The type of a position in the collection. */ + type Position: Value as positionIsValue + + extension (self: Self) { + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def _count(p: Position, n: Int): Int = + if p `eq` e then n else _count(self.positionAfter(p), n + 1) + _count(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if (i.eq(e)) { + false + } else if (j.eq(e)) { + true + } else { + def _isBefore(n: Position): Boolean = + if (n.eq(j)) { + true + } else if (n.eq(e)) { + false + } else { + _isBefore(self.positionAfter(n)) + } + _isBefore(self.positionAfter(i)) + } + + } + +} + +extension [Self](self: Self)(using s: Collection[Self]) { + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(s.Element, Slice[Self])] = + if (self.isEmpty) { + None + } else { + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + } + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T, combine: (T, s.Element) => T): T = + val e = self.endPosition + def loop(p: s.Position, r: T): T = + if (p.eq(e)) { + r + } else { + loop(self.positionAfter(p), combine(r, self.at(p))) + } + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: (s.Element) => Boolean): Boolean = + val e = self.endPosition + def loop(p: s.Position): Boolean = + if (p.eq(e)) { + true + } else if (!action(self.at(p))) { + false + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T](using Value[T])(transform: (s.Element) => T): HyArray[T] = + self.reduce( + HyArray[T](), + (r, e) => r.append(transform(e), assumeUniqueness = true) + ) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: (s.Element) => Boolean): HyArray[s.Element] = + self.reduce( + HyArray[s.Element](), + (r, e) => if (isIncluded(e)) then r.append(e, assumeUniqueness = true) else r + ) + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: (s.Element) => Boolean): Option[s.Position] = + val e = self.endPosition + def loop(p: s.Position): Option[s.Position] = + if (p.eq(e)) { + None + } else if (predicate(self.at(p))) { + Some(p) + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Comparable[s.Element]): Option[s.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Comparable[s.Element]): Option[s.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (s.Element, s.Element) => Boolean): Option[s.Element] = + if (self.isEmpty) { + None + } else { + val e = self.endPosition + def _least(p: s.Position, least: s.Element): s.Element = + if (p.eq(e)) { + least + } else { + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + _least(self.positionAfter(p), y) + } + + val b = self.startPosition + Some(_least(self.positionAfter(b), self.at(b))) + } + +} + +extension [Self](self: Self)(using + s: Collection[Self], + e: Value[s.Element] +) { + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T](using o: Collection[T] { type Element = s.Element })(other: T): Boolean = + def loop(i: s.Position, j: o.Position): Boolean = + if (i `eq` self.endPosition) { + j `eq` other.endPosition + } else if (j `eq` other.endPosition) { + false + } else if (self.at(i) `neq` other.at(j)) { + false + } else { + loop(self.positionAfter(i), other.positionAfter(j)) + } + loop(self.startPosition, other.startPosition) + +} diff --git a/tests/pos/hylolib-cb/CoreTraits.scala b/tests/pos/hylolib-cb/CoreTraits.scala new file mode 100644 index 000000000000..01b2c5242af9 --- /dev/null +++ b/tests/pos/hylolib-cb/CoreTraits.scala @@ -0,0 +1,57 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value[Self] { + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +} + +extension [Self: Value](self: Self) def neq(other: Self): Boolean = !self.eq(other) + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable[Self] extends Value[Self] { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib-cb/Hasher.scala b/tests/pos/hylolib-cb/Hasher.scala new file mode 100644 index 000000000000..ef6813df6b60 --- /dev/null +++ b/tests/pos/hylolib-cb/Hasher.scala @@ -0,0 +1,38 @@ +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib-cb/HyArray.scala b/tests/pos/hylolib-cb/HyArray.scala new file mode 100644 index 000000000000..9347f7eb12cc --- /dev/null +++ b/tests/pos/hylolib-cb/HyArray.scala @@ -0,0 +1,221 @@ +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element] private (using + elementIsValue: Value[Element] +)( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + // NOTE: Can't refine `C.Element` without renaming the generic parameter of `HyArray`. + // /** Adds the contents of `source` at the end of the array. */ + // def appendContents[C](using + // s: Collection[C] + // )( + // source: C { type Element = Element }, + // assumeUniqueness: Boolean = false + // ): HyArray[Element] = + // val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + // source.reduce(result, (r, e) => r.append(e, assumeUniqueness = true)) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T](using t: Value[T])(elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { + + extension (self: HyArray[T]) { + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher, (h, e) => e.hashInto(h)) + + } + +} + +given hyArrayIsCollection[T](using tIsValue: Value[T]): Collection[HyArray[T]] with { + + type Element = T + type Position = Int + + extension (self: HyArray[T]) { + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + + } + +} + +// NOTE: This should work. +// given hyArrayIsStringConvertible[T](using +// tIsValue: Value[T], +// tIsStringConvertible: StringConvertible[T] +// ): StringConvertible[HyArray[T]] with { +// +// given Collection[HyArray[T]] = hyArrayIsCollection[T] +// +// extension (self: HyArray[T]) +// override def description: String = +// var contents = mutable.StringBuilder() +// self.forEach((e) => { contents ++= e.description; true }) +// s"[${contents.mkString(", ")}]" +// +// } diff --git a/tests/pos/hylolib-cb/Integers.scala b/tests/pos/hylolib-cb/Integers.scala new file mode 100644 index 000000000000..b9bc203a88ea --- /dev/null +++ b/tests/pos/hylolib-cb/Integers.scala @@ -0,0 +1,58 @@ +package hylo + +given booleanIsValue: Value[Boolean] with { + + extension (self: Boolean) { + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + + } + +} + +given intIsValue: Value[Int] with { + + extension (self: Int) { + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + } + +} + +given intIsComparable: Comparable[Int] with { + + extension (self: Int) { + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + + } + +} + +given intIsStringConvertible: StringConvertible[Int] with {} diff --git a/tests/pos/hylolib-cb/Range.scala b/tests/pos/hylolib-cb/Range.scala new file mode 100644 index 000000000000..1f597652ead1 --- /dev/null +++ b/tests/pos/hylolib-cb/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound](lowerBound: Bound, upperBound: Bound)(using Comparable[Bound]) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib-cb/Slice.scala b/tests/pos/hylolib-cb/Slice.scala new file mode 100644 index 000000000000..2289ac2a085b --- /dev/null +++ b/tests/pos/hylolib-cb/Slice.scala @@ -0,0 +1,46 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base](using + val b: Collection[Base] +)( + val base: Base, + val bounds: Range[b.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: b.Position = + bounds.lowerBound + + def endPosition: b.Position = + bounds.upperBound + + def positionAfter(p: b.Position): b.Position = + base.positionAfter(p) + + def at(p: b.Position): b.Element = + base.at(p) + +} + +given sliceIsCollection[T](using c: Collection[T]): Collection[Slice[T]] with { + + type Element = c.Element + type Position = c.Position + + extension (self: Slice[T]) { + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] // NOTE: Ugly hack + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + + } + +} diff --git a/tests/pos/hylolib-cb/StringConvertible.scala b/tests/pos/hylolib-cb/StringConvertible.scala new file mode 100644 index 000000000000..0702f79f2794 --- /dev/null +++ b/tests/pos/hylolib-cb/StringConvertible.scala @@ -0,0 +1,14 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible[Self] { + + extension (self: Self) { + + /** Returns a textual description of `self`. */ + def description: String = + self.toString + + } + +} diff --git a/tests/pos/hylolib-deferred-given/AnyValue.scala b/tests/pos/hylolib-deferred-given/AnyValue.scala index b9d39869c09a..21f2965e102e 100644 --- a/tests/pos/hylolib-deferred-given/AnyValue.scala +++ b/tests/pos/hylolib-deferred-given/AnyValue.scala @@ -44,7 +44,7 @@ final class AnyValue private ( object AnyValue { /** Creates an instance wrapping `wrapped`. */ - def apply[T](using Value[T])(wrapped: T): AnyValue = + def apply[T: Value](wrapped: T): AnyValue = def copy(a: AnyRef): AnyValue = AnyValue(a.asInstanceOf[Ref[T]].value.copy()) diff --git a/tests/pos/hylolib-deferred-given/Range.scala b/tests/pos/hylolib-deferred-given/Range.scala index 1f597652ead1..b0f50dd55c8c 100644 --- a/tests/pos/hylolib-deferred-given/Range.scala +++ b/tests/pos/hylolib-deferred-given/Range.scala @@ -30,7 +30,7 @@ object Range { * @requires * `lowerBound` is lesser than or equal to `upperBound`. */ - def apply[Bound](lowerBound: Bound, upperBound: Bound)(using Comparable[Bound]) = + def apply[Bound: Comparable](lowerBound: Bound, upperBound: Bound) = require(lowerBound `le` upperBound) new Range(lowerBound, upperBound) From 34375268ad921f612364b81d6dec63a0adc3aa7b Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 17:46:23 +0200 Subject: [PATCH 354/465] Make some context bound evidence params tracked Make context bound evidence params tracked if they have types with abstract type members. [Cherry-picked 4d62692a69e994b10a1386e8d1a73a06b1528b85] --- .../src/dotty/tools/dotc/core/Symbols.scala | 8 ++--- .../src/dotty/tools/dotc/typer/Namer.scala | 30 +++++++++++++++++++ tests/pos/hylolib-cb/AnyCollection.scala | 4 +-- tests/pos/hylolib-cb/Collection.scala | 9 +++--- tests/pos/hylolib-cb/HyArray.scala | 11 ++++--- tests/pos/hylolib-cb/Slice.scala | 6 ++-- 6 files changed, 47 insertions(+), 21 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 0020efa5018d..da0ecac47b7d 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -312,7 +312,6 @@ object Symbols extends SymUtils { * With the given setup, all such calls will give implicit-not found errors */ final def symbol(implicit ev: DontUseSymbolOnSymbol): Nothing = unsupported("symbol") - type DontUseSymbolOnSymbol final def source(using Context): SourceFile = { def valid(src: SourceFile): SourceFile = @@ -402,13 +401,12 @@ object Symbols extends SymUtils { flags: FlagSet = this.flags, info: Type = this.info, privateWithin: Symbol = this.privateWithin, - coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap - compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.associatedFile` once we bootstrap + coord: Coord = NoCoord, // Can be `= owner.coord` once we have new default args + compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.compilationUnitInfo` once we have new default args ): Symbol = { val coord1 = if (coord == NoCoord) owner.coord else coord val compilationUnitInfo1 = if (compilationUnitInfo == null) owner.compilationUnitInfo else compilationUnitInfo - if isClass then newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, compilationUnitInfo1) else @@ -936,6 +934,8 @@ object Symbols extends SymUtils { case (x: Symbol) :: _ if x.isType => Some(xs.asInstanceOf[List[TypeSymbol]]) case _ => None + type DontUseSymbolOnSymbol + // ----- Locating predefined symbols ---------------------------------------- def requiredPackage(path: PreName)(using Context): TermSymbol = { diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 22a12ed0f468..85678b9685f7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1885,6 +1885,28 @@ class Namer { typer: Typer => ddef.trailingParamss.foreach(completeParams) val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) + + /** We add `tracked` to context bound witnesses that have abstract type members */ + def needsTracked(sym: Symbol, param: ValDef)(using Context) = + !sym.is(Tracked) + && param.hasAttachment(ContextBoundParam) + && sym.info.memberNames(abstractTypeNameFilter).nonEmpty + + /** Set every context bound evidence parameter of a class to be tracked, + * provided it has a type that has an abstract type member. Reset private and local flags + * so that the parameter becomes a `val`. + */ + def setTracked(param: ValDef): Unit = + val sym = symbolOfTree(param) + sym.maybeOwner.maybeOwner.infoOrCompleter match + case info: TempClassInfo if needsTracked(sym, param) => + typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") + for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do + acc.resetFlag(PrivateLocal) + acc.setFlag(Tracked) + sym.setFlag(Tracked) + case _ => + def wrapMethType(restpe: Type): Type = instantiateDependent(restpe, paramSymss) methodType(paramSymss, restpe, ddef.mods.is(JavaDefined)) @@ -1893,10 +1915,18 @@ class Namer { typer: Typer => wrapMethType(addParamRefinements(restpe, paramSymss)) if isConstructor then + if sym.isPrimaryConstructor && Feature.enabled(modularity) then + ddef.termParamss.foreach(_.foreach(setTracked)) // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) wrapMethType(effectiveResultType(sym, paramSymss)) else if sym.isAllOf(Given | Method) && Feature.enabled(modularity) then + // set every context bound evidence parameter of a given companion method + // to be tracked, provided it has a type that has an abstract type member. + // Add refinements for all tracked parameters to the result type. + for params <- ddef.termParamss; param <- params do + val psym = symbolOfTree(param) + if needsTracked(psym, param) then psym.setFlag(Tracked) valOrDefDefSig(ddef, sym, paramSymss, wrapRefinedMethType) else valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) diff --git a/tests/pos/hylolib-cb/AnyCollection.scala b/tests/pos/hylolib-cb/AnyCollection.scala index 1a44344d0e51..50f4313e46ce 100644 --- a/tests/pos/hylolib-cb/AnyCollection.scala +++ b/tests/pos/hylolib-cb/AnyCollection.scala @@ -14,7 +14,7 @@ final class AnyCollection[Element] private ( object AnyCollection { /** Creates an instance forwarding its operations to `base`. */ - def apply[Base](using b: Collection[Base])(base: Base): AnyCollection[b.Element] = + def apply[Base: Collection as b](base: Base): AnyCollection[b.Element] = // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these // choices is even correct! Note also that the ambiguity is suppressed if the constructor of @@ -42,7 +42,7 @@ object AnyCollection { } -given anyCollectionIsCollection[T](using tIsValue: Value[T]): Collection[AnyCollection[T]] with { +given anyCollectionIsCollection[T: Value]: Collection[AnyCollection[T]] with { type Element = T type Position = AnyValue diff --git a/tests/pos/hylolib-cb/Collection.scala b/tests/pos/hylolib-cb/Collection.scala index 073a99cdd16b..2fc04f02b9ac 100644 --- a/tests/pos/hylolib-cb/Collection.scala +++ b/tests/pos/hylolib-cb/Collection.scala @@ -89,7 +89,7 @@ trait Collection[Self] { } -extension [Self](self: Self)(using s: Collection[Self]) { +extension [Self: Collection as s](self: Self) { /** Returns the first element of `self` along with a slice containing the suffix after this * element, or `None` if `self` is empty. @@ -148,7 +148,7 @@ extension [Self](self: Self)(using s: Collection[Self]) { * @complexity * O(n) where n is the number of elements in `self`. */ - def map[T](using Value[T])(transform: (s.Element) => T): HyArray[T] = + def map[T: Value](transform: (s.Element) => T): HyArray[T] = self.reduce( HyArray[T](), (r, e) => r.append(transform(e), assumeUniqueness = true) @@ -257,9 +257,8 @@ extension [Self](self: Self)(using s: Collection[Self]) { } -extension [Self](self: Self)(using - s: Collection[Self], - e: Value[s.Element] +extension [Self: Collection as s](self: Self)(using + Value[s.Element] ) { /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ diff --git a/tests/pos/hylolib-cb/HyArray.scala b/tests/pos/hylolib-cb/HyArray.scala index 9347f7eb12cc..0fff45e744ec 100644 --- a/tests/pos/hylolib-cb/HyArray.scala +++ b/tests/pos/hylolib-cb/HyArray.scala @@ -1,12 +1,11 @@ +//> using options -language:experimental.modularity -source future package hylo import java.util.Arrays import scala.collection.mutable /** An ordered, random-access collection. */ -final class HyArray[Element] private (using - elementIsValue: Value[Element] -)( +final class HyArray[Element: Value as elementIsCValue]( private var _storage: scala.Array[AnyRef | Null] | Null, private var _count: Int // NOTE: where do I document private fields ) { @@ -155,14 +154,14 @@ final class HyArray[Element] private (using object HyArray { /** Creates an array with the given `elements`. */ - def apply[T](using t: Value[T])(elements: T*): HyArray[T] = + def apply[T: Value](elements: T*): HyArray[T] = var a = new HyArray[T](null, 0) for (e <- elements) a = a.append(e, assumeUniqueness = true) a } -given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { +given [T: Value] => Value[HyArray[T]] with { extension (self: HyArray[T]) { @@ -179,7 +178,7 @@ given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { } -given hyArrayIsCollection[T](using tIsValue: Value[T]): Collection[HyArray[T]] with { +given [T: Value] => Collection[HyArray[T]] with { type Element = T type Position = Int diff --git a/tests/pos/hylolib-cb/Slice.scala b/tests/pos/hylolib-cb/Slice.scala index 2289ac2a085b..b577ceeb3739 100644 --- a/tests/pos/hylolib-cb/Slice.scala +++ b/tests/pos/hylolib-cb/Slice.scala @@ -1,9 +1,7 @@ package hylo /** A view into a collection. */ -final class Slice[Base](using - val b: Collection[Base] -)( +final class Slice[Base: Collection as b]( val base: Base, val bounds: Range[b.Position] ) { @@ -26,7 +24,7 @@ final class Slice[Base](using } -given sliceIsCollection[T](using c: Collection[T]): Collection[Slice[T]] with { +given sliceIsCollection[T: Collection as c]: Collection[Slice[T]] with { type Element = c.Element type Position = c.Position From d856e50a0a9e061c28b361e00788fc925ec80ab0 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 18:01:56 +0200 Subject: [PATCH 355/465] FIX: Fix typing of RefinedTypes with watching parents If a refined type has a parent type watching some other type, the parent should not be mapped to Object. Previously, the parent counted as `isEmpty` which caused this mapping. Fixes #10929 [Cherry-picked 11d7fa39372c430220f1818632ff1fe0c25ba60d] --- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- tests/pos/hylolib-deferred-given/Hasher.scala | 1 + tests/pos/i10929.scala | 21 +++++++++++++++++++ tests/pos/i13580.scala | 13 ++++++++++++ 4 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i10929.scala create mode 100644 tests/pos/i13580.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index c467a4507730..f744eb392d7c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2301,7 +2301,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = { - val tpt1 = if (tree.tpt.isEmpty) TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) + val tpt1 = if tree.tpt == EmptyTree then TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements).withSpan(tree.span) val refineCls = createSymbol(refineClsDef).asClass val TypeDef(_, impl: Template) = typed(refineClsDef): @unchecked diff --git a/tests/pos/hylolib-deferred-given/Hasher.scala b/tests/pos/hylolib-deferred-given/Hasher.scala index ef6813df6b60..ca45550ed002 100644 --- a/tests/pos/hylolib-deferred-given/Hasher.scala +++ b/tests/pos/hylolib-deferred-given/Hasher.scala @@ -1,3 +1,4 @@ +//> using options -language:experimental.modularity -source future package hylo import scala.util.Random diff --git a/tests/pos/i10929.scala b/tests/pos/i10929.scala new file mode 100644 index 000000000000..e916e4547e59 --- /dev/null +++ b/tests/pos/i10929.scala @@ -0,0 +1,21 @@ +//> using options -language:experimental.modularity -source future +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: tup.map(x.tail)(f)) + +def foo[T](xs: T)(using tup: T TupleOf Int): tup.Mapped[Int] = tup.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok \ No newline at end of file diff --git a/tests/pos/i13580.scala b/tests/pos/i13580.scala new file mode 100644 index 000000000000..c3c491a19dbe --- /dev/null +++ b/tests/pos/i13580.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +trait IntWidth: + type Out +given IntWidth: + type Out = 155 + +trait IntCandidate: + type Out +given (using tracked val w: IntWidth) => IntCandidate: + type Out = w.Out + +val x = summon[IntCandidate] +val xx = summon[x.Out =:= 155] From 5fe6b5bb8e0428b3a32ee350e85f0709e1395d89 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 6 Jan 2024 13:53:17 +0100 Subject: [PATCH 356/465] Also reduce term projections We already reduce `R { type A = T } # A` to `T` in most situations when we create types. We now also reduce `R { val x: S } # x` to `S` if `S` is a singleton type. This will simplify types as we go to more term-dependent typing. As a concrete benefit, it will avoid several test-pickling failures due to pickling differences when using dependent types. [Cherry-picked 96fbf2942a296df3f63b05e2503f6a1a904e28cf] --- .../src/dotty/tools/dotc/core/Types.scala | 66 +++++++++---------- 1 file changed, 32 insertions(+), 34 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index a6136a20cf32..ac3aef2a59d2 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1642,17 +1642,19 @@ object Types extends TypeUtils { * * P { ... type T = / += / -= U ... } # T * - * to just U. Does not perform the reduction if the resulting type would contain - * a reference to the "this" of the current refined type, except in the following situation + * to just U. Analogously, `P { val x: S} # x` is reduced tp `S` is `S` + * is a singleton type. * - * (1) The "this" reference can be avoided by following an alias. Example: + * Does not perform the reduction if the resulting type would contain + * a reference to the "this" of the current refined type, except if the "this" + * reference can be avoided by following an alias. Example: * * P { type T = String, type R = P{...}.T } # R --> String * * (*) normalizes means: follow instantiated typevars and aliases. */ - def lookupRefined(name: Name)(using Context): Type = { - @tailrec def loop(pre: Type): Type = pre.stripTypeVar match { + def lookupRefined(name: Name)(using Context): Type = + @tailrec def loop(pre: Type): Type = pre match case pre: RefinedType => pre.refinedInfo match { case tp: AliasingBounds => @@ -1675,12 +1677,13 @@ object Types extends TypeUtils { case TypeAlias(alias) => loop(alias) case _ => NoType } + case pre: (TypeVar | AnnotatedType) => + loop(pre.underlying) case _ => NoType - } loop(this) - } + end lookupRefined /** The type , reduced if possible */ def select(name: Name)(using Context): Type = @@ -2820,35 +2823,30 @@ object Types extends TypeUtils { def derivedSelect(prefix: Type)(using Context): Type = if prefix eq this.prefix then this else if prefix.isExactlyNothing then prefix - else { - val res = - if (isType && currentValidSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) + else + val reduced = + if isType && currentValidSymbol.isAllOf(ClassTypeParam) then argForParam(prefix) else prefix.lookupRefined(name) - if (res.exists) return res - if (isType) { - if (Config.splitProjections) - prefix match { - case prefix: AndType => - def isMissing(tp: Type) = tp match { - case tp: TypeRef => !tp.info.exists - case _ => false - } - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return ( - if (isMissing(derived1)) derived2 - else if (isMissing(derived2)) derived1 - else prefix.derivedAndType(derived1, derived2)) - case prefix: OrType => - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return prefix.derivedOrType(derived1, derived2) - case _ => - } - } - if (prefix.isInstanceOf[WildcardType]) WildcardType.sameKindAs(this) + if reduced.exists then return reduced + if Config.splitProjections && isType then + prefix match + case prefix: AndType => + def isMissing(tp: Type) = tp match + case tp: TypeRef => !tp.info.exists + case _ => false + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return + if isMissing(derived1) then derived2 + else if isMissing(derived2) then derived1 + else prefix.derivedAndType(derived1, derived2) + case prefix: OrType => + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return prefix.derivedOrType(derived1, derived2) + case _ => + if prefix.isInstanceOf[WildcardType] then WildcardType.sameKindAs(this) else withPrefix(prefix) - } /** A reference like this one, but with the given symbol, if it exists */ private def withSym(sym: Symbol)(using Context): ThisType = From becdf887a5a581b35386f605db702949326f1f6e Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 20:43:48 +0200 Subject: [PATCH 357/465] Implement context bound companions [Cherry-picked ce09ef3bc4a49c4f851b3f8ab3c4b3c2ba64bb7d] --- .../src/dotty/tools/dotc/ast/Desugar.scala | 50 ++++++++---- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 31 ++++++++ .../src/dotty/tools/dotc/core/Contexts.scala | 13 ++-- .../dotty/tools/dotc/core/Definitions.scala | 9 +++ .../src/dotty/tools/dotc/core/NamerOps.scala | 53 +++++++++++++ .../src/dotty/tools/dotc/core/StdNames.scala | 2 + .../src/dotty/tools/dotc/core/SymUtils.scala | 3 + .../tools/dotc/core/tasty/TreeUnpickler.scala | 1 + .../tools/dotc/printing/PlainPrinter.scala | 4 +- .../tools/dotc/reporting/ErrorMessageID.scala | 4 +- .../dotty/tools/dotc/reporting/messages.scala | 36 +++++++++ .../tools/dotc/transform/PostTyper.scala | 22 ++++-- .../tools/dotc/transform/TreeChecker.scala | 21 ++--- .../src/dotty/tools/dotc/typer/Namer.scala | 35 +++++++-- .../src/dotty/tools/dotc/typer/Typer.scala | 76 +++++++++++++++++++ .../annotation/internal/WitnessNames.scala | 53 +++++++++++++ project/MiMaFilters.scala | 2 + tests/neg/cb-companion-leaks.check | 66 ++++++++++++++++ tests/neg/cb-companion-leaks.scala | 16 ++++ tests/pos-macros/i8325/Macro_1.scala | 4 +- tests/pos-macros/i8325/Test_2.scala | 2 +- tests/pos-macros/i8325b/Macro_1.scala | 4 +- tests/pos-macros/i8325b/Test_2.scala | 2 +- tests/pos/FromString-cb-companion.scala | 14 ++++ tests/pos/cb-companion-joins.scala | 21 +++++ 25 files changed, 496 insertions(+), 48 deletions(-) create mode 100644 library/src/scala/annotation/internal/WitnessNames.scala create mode 100644 tests/neg/cb-companion-leaks.check create mode 100644 tests/neg/cb-companion-leaks.scala create mode 100644 tests/pos/FromString-cb-companion.scala create mode 100644 tests/pos/cb-companion-joins.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index d6e442ed4a0c..08953f1dec6b 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -257,7 +257,16 @@ object desugar { case _ => rhs - cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + if Feature.enabled(Feature.modularity) + && evidenceNames.nonEmpty + && !evidenceNames.contains(tdef.name.toTermName) + && !allParamss.nestedExists(_.name == tdef.name.toTermName) + then + tdef1.withAddedAnnotation: + WitnessNamesAnnot(evidenceNames.toList).withSpan(tdef.span) + else + tdef1 end desugarContextBounds private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = @@ -323,9 +332,9 @@ object desugar { def getterParamss(n: Int): List[ParamClause] = mapParamss(takeUpTo(paramssNoRHS, n)) { - tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) + tparam => dropContextBounds(toDefParam(tparam, KeepAnnotations.All)) } { - vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) + vparam => toDefParam(vparam, KeepAnnotations.All, keepDefault = false) } def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match @@ -430,7 +439,12 @@ object desugar { private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = if params.isEmpty then return meth - val boundNames = params.map(_.name).toSet + var boundNames = params.map(_.name).toSet + for mparams <- meth.paramss; mparam <- mparams do + mparam match + case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => + boundNames += tparam.name.toTermName + case _ => //println(i"add ev params ${meth.name}, ${boundNames.toList}") @@ -463,16 +477,26 @@ object desugar { @sharable private val synthetic = Modifiers(Synthetic) + /** Which annotations to keep in derived parameters */ + private enum KeepAnnotations: + case None, All, WitnessOnly + /** Filter annotations in `mods` according to `keep` */ - private def filterAnnots(mods: Modifiers, keep: Boolean)(using Context) = - if keep then mods else mods.withAnnotations(Nil) + private def filterAnnots(mods: Modifiers, keep: KeepAnnotations)(using Context) = keep match + case KeepAnnotations.None => mods.withAnnotations(Nil) + case KeepAnnotations.All => mods + case KeepAnnotations.WitnessOnly => + mods.withAnnotations: + mods.annotations.filter: + case WitnessNamesAnnot(_) => true + case _ => false - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean)(using Context): TypeDef = - val mods = filterAnnots(tparam.rawMods, keepAnnotations) + private def toDefParam(tparam: TypeDef, keep: KeepAnnotations)(using Context): TypeDef = + val mods = filterAnnots(tparam.rawMods, keep) tparam.withMods(mods & EmptyFlags | Param) - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean)(using Context): ValDef = { - val mods = filterAnnots(vparam.rawMods, keepAnnotations) + private def toDefParam(vparam: ValDef, keep: KeepAnnotations, keepDefault: Boolean)(using Context): ValDef = { + val mods = filterAnnots(vparam.rawMods, keep) val hasDefault = if keepDefault then HasDefault else EmptyFlags // Need to ensure that tree is duplicated since term parameters can be watched // and cloning a term parameter will copy its watchers to the clone, which means @@ -573,7 +597,7 @@ object desugar { // Annotations on class _type_ parameters are set on the derived parameters // but not on the constructor parameters. The reverse is true for // annotations on class _value_ parameters. - val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) + val constrTparams = impliedTparams.map(toDefParam(_, KeepAnnotations.WitnessOnly)) val constrVparamss = if (originalVparamss.isEmpty) { // ensure parameter list is non-empty if (isCaseClass) @@ -584,7 +608,7 @@ object desugar { report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) ListOfNil } - else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) + else originalVparamss.nestedMap(toDefParam(_, KeepAnnotations.All, keepDefault = true)) val derivedTparams = constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) @@ -606,7 +630,7 @@ object desugar { defDef( addEvidenceParams( cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), - evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) + evidenceParams(constr1).map(toDefParam(_, KeepAnnotations.None, keepDefault = false))))) case stat => stat } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 941e7b8f1219..990fb37f4e60 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -5,6 +5,8 @@ package ast import core.* import Flags.*, Trees.*, Types.*, Contexts.* import Names.*, StdNames.*, NameOps.*, Symbols.* +import Annotations.Annotation +import NameKinds.ContextBoundParamName import typer.ConstFold import reporting.trace @@ -380,6 +382,35 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => tree.tpe.isInstanceOf[ThisType] } + + /** Extractor for annotation.internal.WitnessNames(name_1, ..., name_n)` + * represented as an untyped or typed tree. + */ + object WitnessNamesAnnot: + def apply(names0: List[TermName])(using Context): untpd.Tree = + untpd.TypedSplice(tpd.New( + defn.WitnessNamesAnnot.typeRef, + tpd.SeqLiteral(names0.map(n => tpd.Literal(Constant(n.toString))), tpd.TypeTree(defn.StringType)) :: Nil + )) + + def unapply(tree: Tree)(using Context): Option[List[TermName]] = + def isWitnessNames(tp: Type) = tp match + case tp: TypeRef => + tp.name == tpnme.WitnessNames && tp.symbol == defn.WitnessNamesAnnot + case _ => + false + unsplice(tree) match + case Apply( + Select(New(tpt: tpd.TypeTree), nme.CONSTRUCTOR), + SeqLiteral(elems, _) :: Nil + ) if isWitnessNames(tpt.tpe) => + Some: + elems.map: + case Literal(Constant(str: String)) => + ContextBoundParamName.unmangle(str.toTermName.asSimpleName) + case _ => + None + end WitnessNamesAnnot } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index d0c30a665289..a5b0e2dba254 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -12,6 +12,7 @@ import Symbols.* import Scopes.* import Uniques.* import ast.Trees.* +import Flags.ParamAccessor import ast.untpd import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} @@ -399,7 +400,8 @@ object Contexts { * * - as owner: The primary constructor of the class * - as outer context: The context enclosing the class context - * - as scope: The parameter accessors in the class context + * - as scope: type parameters, the parameter accessors, and + * the context bound companions in the class context, * * The reasons for this peculiar choice of attributes are as follows: * @@ -413,10 +415,11 @@ object Contexts { * context see the constructor parameters instead, but then we'd need a final substitution step * from constructor parameters to class parameter accessors. */ - def superCallContext: Context = { - val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors*) - superOrThisCallContext(owner.primaryConstructor, locals) - } + def superCallContext: Context = + val locals = owner.typeParams + ++ owner.asClass.unforcedDecls.filter: sym => + sym.is(ParamAccessor) || sym.isContextBoundCompanion + superOrThisCallContext(owner.primaryConstructor, newScopeWith(locals*)) /** The context for the arguments of a this(...) constructor call. * The context is computed from the local auxiliary constructor context. diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 9ee5891f1606..b408883009ab 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -459,6 +459,13 @@ class Definitions { @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) + @tu lazy val CBCompanion: TypeSymbol = // type ``[-Refs] + enterPermanentSymbol(tpnme.CBCompanion, + TypeBounds(NothingType, + HKTypeLambda(tpnme.syntheticTypeParamName(0) :: Nil, Contravariant :: Nil)( + tl => TypeBounds.empty :: Nil, + tl => AnyType))).asType + /** Method representing a throw */ @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, MethodType(List(ThrowableType), NothingType)) @@ -1062,6 +1069,7 @@ class Definitions { @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") @tu lazy val RetainsArgAnnot: ClassSymbol = requiredClass("scala.annotation.retainsArg") @tu lazy val PublicInBinaryAnnot: ClassSymbol = requiredClass("scala.annotation.publicInBinary") + @tu lazy val WitnessNamesAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WitnessNames") @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") @@ -2158,6 +2166,7 @@ class Definitions { NullClass, NothingClass, SingletonClass, + CBCompanion, MaybeCapabilityAnnot) @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index af03573da4a8..58b4ad681c6f 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -4,8 +4,10 @@ package core import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.* import SymDenotations.{LazyType, SymDenotation}, StdNames.nme +import ContextOps.enter import TypeApplications.EtaExpansion import collection.mutable +import config.Printers.typr /** Operations that are shared between Namer and TreeUnpickler */ object NamerOps: @@ -256,4 +258,55 @@ object NamerOps: rhsCtx.gadtState.addBound(psym, tr, isUpper = true) } + /** Create a context-bound companion for type symbol `tsym`, which has a context + * bound that defines a set of witnesses with names `witnessNames`. + * + * @param parans If `tsym` is a type parameter, a list of parameter symbols + * that include all witnesses, otherwise the empty list. + * + * The context-bound companion has as name the name of `tsym` translated to + * a term name. We create a synthetic val of the form + * + * val A: ``[witnessRef1 | ... | witnessRefN] + * + * where + * + * is the CBCompanion type created in Definitions + * withnessRefK is a refence to the K'th witness. + * + * The companion has the same access flags as the original type. + */ + def addContextBoundCompanionFor(tsym: Symbol, witnessNames: List[TermName], params: List[Symbol])(using Context): Unit = + val prefix = ctx.owner.thisType + val companionName = tsym.name.toTermName + val witnessRefs = + if params.nonEmpty then + witnessNames.map: witnessName => + prefix.select(params.find(_.name == witnessName).get) + else + witnessNames.map(TermRef(prefix, _)) + val cbtype = defn.CBCompanion.typeRef.appliedTo: + witnessRefs.reduce[Type](OrType(_, _, soft = false)) + val cbc = newSymbol( + ctx.owner, companionName, + (tsym.flagsUNSAFE & (AccessFlags)).toTermFlags | Synthetic, + cbtype) + typr.println(s"context bound companion created $cbc for $witnessNames in ${ctx.owner}") + ctx.enter(cbc) + end addContextBoundCompanionFor + + /** Add context bound companions to all context-bound types declared in + * this class. This assumes that these types already have their + * WitnessNames annotation set even before they are completed. This is + * the case for unpickling but currently not for Namer. So the method + * is only called during unpickling, and is not part of NamerOps. + */ + def addContextBoundCompanions(cls: ClassSymbol)(using Context): Unit = + for sym <- cls.info.decls do + if sym.isType && !sym.isClass then + for ann <- sym.annotationsUNSAFE do + if ann.symbol == defn.WitnessNamesAnnot then + ann.tree match + case ast.tpd.WitnessNamesAnnot(witnessNames) => + addContextBoundCompanionFor(sym, witnessNames, Nil) end NamerOps diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index c0eb8a690eb4..ab7e4eea0b46 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -288,6 +288,7 @@ object StdNames { // Compiler-internal val CAPTURE_ROOT: N = "cap" + val CBCompanion: N = "" val CONSTRUCTOR: N = "" val STATIC_CONSTRUCTOR: N = "" val EVT2U: N = "evt2u$" @@ -396,6 +397,7 @@ object StdNames { val TypeApply: N = "TypeApply" val TypeRef: N = "TypeRef" val UNIT : N = "UNIT" + val WitnessNames: N = "WitnessNames" val acc: N = "acc" val adhocExtensions: N = "adhocExtensions" val andThen: N = "andThen" diff --git a/compiler/src/dotty/tools/dotc/core/SymUtils.scala b/compiler/src/dotty/tools/dotc/core/SymUtils.scala index 65634241b790..3a97a0053dbd 100644 --- a/compiler/src/dotty/tools/dotc/core/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/SymUtils.scala @@ -87,6 +87,9 @@ class SymUtils: !d.isPrimitiveValueClass } + def isContextBoundCompanion(using Context): Boolean = + self.is(Synthetic) && self.infoOrCompleter.typeSymbol == defn.CBCompanion + /** Is this a case class for which a product mirror is generated? * Excluded are value classes, abstract classes and case classes with more than one * parameter section. diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 15f58956fbe3..91a5899146cc 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1138,6 +1138,7 @@ class TreeUnpickler(reader: TastyReader, }) defn.patchStdLibClass(cls) NamerOps.addConstructorProxies(cls) + NamerOps.addContextBoundCompanions(cls) setSpan(start, untpd.Template(constr, mappedParents, self, lazyStats) .withType(localDummy.termRef)) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 5808707326a0..c06b43cafe17 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -434,11 +434,11 @@ class PlainPrinter(_ctx: Context) extends Printer { sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName /** String representation of a definition's type following its name, - * if symbol is completed, "?" otherwise. + * if symbol is completed, ": ?" otherwise. */ protected def toTextRHS(optType: Option[Type]): Text = optType match { case Some(tp) => toTextRHS(tp) - case None => "?" + case None => ": ?" } protected def decomposeLambdas(bounds: TypeBounds): (Text, TypeBounds) = diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index e51f0a8b77ac..04380a7b8e4a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -208,7 +208,9 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case UnstableInlineAccessorID // errorNumber: 192 case VolatileOnValID // errorNumber: 193 case ExtensionNullifiedByMemberID // errorNumber: 194 - case InlinedAnonClassWarningID // errorNumber: 195 + case ConstructorProxyNotValueID // errorNumber: 195 + case ContextBoundCompanionNotValueID // errorNumber: 196 + case InlinedAnonClassWarningID // errorNumber: 197 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 51556a5c93ac..ceb8ecbc8e03 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3203,3 +3203,39 @@ class VolatileOnVal()(using Context) extends SyntaxMsg(VolatileOnValID): protected def msg(using Context): String = "values cannot be volatile" protected def explain(using Context): String = "" + +class ConstructorProxyNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"constructor proxy $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A constructor proxy is a symbol made up by the compiler to represent a non-existent + |factory method of a class. For instance, in + | + | class C(x: Int) + | + |C does not have an apply method since it is not a case class. Yet one can + |still create instances with applications like `C(3)` which expand to `new C(3)`. + |The `C` in this call is a constructor proxy. It can only be used as applications + |but not as a stand-alone value.""" + +class ContextBoundCompanionNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"context bound companion $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A context bound companion is a symbol made up by the compiler to represent the + |witness or witnesses generated for the context bound(s) of a type parameter or type. + |For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + |there is just a type `A` declared but not a value `A`. Nevertheless, one can write + |the selection `A.unit`, which works because the compiler created a context bound + |companion value with the (term-)name `A`. However, these context bound companions + |are not values themselves, they can only be referred to in selections.""" + diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 954b08c24ac1..a110ec53abc0 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -279,9 +279,13 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } } - def checkNoConstructorProxy(tree: Tree)(using Context): Unit = + def checkUsableAsValue(tree: Tree)(using Context): Unit = + def unusable(msg: Symbol => Message) = + report.error(msg(tree.symbol), tree.srcPos) if tree.symbol.is(ConstructorProxy) then - report.error(em"constructor proxy ${tree.symbol} cannot be used as a value", tree.srcPos) + unusable(ConstructorProxyNotValue(_)) + if tree.symbol.isContextBoundCompanion then + unusable(ContextBoundCompanionNotValue(_)) def checkStableSelection(tree: Tree)(using Context): Unit = def check(qual: Tree) = @@ -326,7 +330,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if tree.isType then checkNotPackage(tree) else - checkNoConstructorProxy(tree) + checkUsableAsValue(tree) registerNeedsInlining(tree) tree.tpe match { case tpe: ThisType => This(tpe.cls).withSpan(tree.span) @@ -338,7 +342,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => Checking.checkRealizable(qual.tpe, qual.srcPos) withMode(Mode.Type)(super.transform(checkNotPackage(tree))) else - checkNoConstructorProxy(tree) + checkUsableAsValue(tree) transformSelect(tree, Nil) case tree: Apply => val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] @@ -469,8 +473,14 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => val relativePath = util.SourceFile.relativePath(ctx.compilationUnit.source, reference) sym.addAnnotation(Annotation(defn.SourceFileAnnot, Literal(Constants.Constant(relativePath)), tree.span)) else - if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then - Checking.checkGoodBounds(tree.symbol) + if !sym.is(Param) then + if !sym.owner.isOneOf(AbstractOrTrait) then + Checking.checkGoodBounds(tree.symbol) + if sym.owner.isClass && sym.hasAnnotation(defn.WitnessNamesAnnot) then + val decls = sym.owner.info.decls + for cbCompanion <- decls.lookupAll(sym.name.toTermName) do + if cbCompanion.isContextBoundCompanion then + decls.openForMutations.unlink(cbCompanion) (tree.rhs, sym.info) match case (rhs: LambdaTypeTree, bounds: TypeBounds) => VarianceChecker.checkLambda(rhs, bounds) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 2ebe33a9a14f..c4e1c7892e8d 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -311,9 +311,11 @@ object TreeChecker { def assertDefined(tree: untpd.Tree)(using Context): Unit = if (tree.symbol.maybeOwner.isTerm) { val sym = tree.symbol + def isAllowed = // constructor proxies and context bound companions are flagged at PostTyper + isSymWithoutDef(sym) && ctx.phase.id < postTyperPhase.id assert( - nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym), - i"undefined symbol ${sym} at line " + tree.srcPos.line + nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym) || isAllowed, + i"undefined symbol ${sym} in ${sym.owner} at line " + tree.srcPos.line ) if (!ctx.phase.patternTranslated) @@ -384,6 +386,9 @@ object TreeChecker { case _ => } + def isSymWithoutDef(sym: Symbol)(using Context): Boolean = + sym.is(ConstructorProxy) || sym.isContextBoundCompanion + /** Exclude from double definition checks any erased symbols that were * made `private` in phase `UnlinkErasedDecls`. These symbols will be removed * completely in phase `Erasure` if they are defined in a currently compiled unit. @@ -614,14 +619,12 @@ object TreeChecker { val decls = cls.classInfo.decls.toList.toSet.filter(isNonMagicalMember) val defined = impl.body.map(_.symbol) - def isAllowed(sym: Symbol): Boolean = sym.is(ConstructorProxy) - - val symbolsNotDefined = (decls -- defined - constr.symbol).filterNot(isAllowed) + val symbolsMissingDefs = (decls -- defined - constr.symbol).filterNot(isSymWithoutDef) - assert(symbolsNotDefined.isEmpty, - i" $cls tree does not define members: ${symbolsNotDefined.toList}%, %\n" + - i"expected: ${decls.toList}%, %\n" + - i"defined: ${defined}%, %") + assert(symbolsMissingDefs.isEmpty, + i"""$cls tree does not define members: ${symbolsMissingDefs.toList}%, % + |expected: ${decls.toList}%, % + |defined: ${defined}%, %""") super.typedClassDef(cdef, cls) } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 85678b9685f7..393b38c5ff57 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -406,6 +406,11 @@ class Namer { typer: Typer => enterSymbol(sym) setDocstring(sym, origStat) addEnumConstants(mdef, sym) + mdef match + case tdef: TypeDef if ctx.owner.isClass => + for case WitnessNamesAnnot(witnessNames) <- tdef.mods.annotations do + addContextBoundCompanionFor(symbolOfTree(tdef), witnessNames, Nil) + case _ => ctx case stats: Thicket => stats.toList.foreach(recur) @@ -1749,12 +1754,6 @@ class Namer { typer: Typer => val sym = tree.symbol if sym.isConstructor then sym.owner else sym - /** Enter and typecheck parameter list */ - def completeParams(params: List[MemberDef])(using Context): Unit = { - index(params) - for (param <- params) typedAheadExpr(param) - } - /** The signature of a module valdef. * This will compute the corresponding module class TypeRef immediately * without going through the defined type of the ValDef. This is necessary @@ -1853,6 +1852,30 @@ class Namer { typer: Typer => // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR + val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() + if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then + for params <- ddef.paramss; case tdef: TypeDef <- params do + for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do + witnessNamesOfParam(tdef) = ws + + /** Are all names in `wnames` defined by the longest prefix of all `params` + * that have been typed ahead (i.e. that carry the TypedAhead attachment)? + */ + def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = + (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty + + /** Enter and typecheck parameter list, add context companions as. + * Once all witness parameters for a context bound are seen, create a + * context bound companion for it. + */ + def completeParams(params: List[MemberDef])(using Context): Unit = + index(params) + for param <- params do + typedAheadExpr(param) + for (tdef, wnames) <- witnessNamesOfParam do + if wnames.contains(param.name) && allParamsSeen(wnames, params) then + addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) + // The following 3 lines replace what was previously just completeParams(tparams). // But that can cause bad bounds being computed, as witnessed by // tests/pos/paramcycle.scala. The problematic sequence is this: diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index f744eb392d7c..37da51157e91 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -840,6 +840,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return dynSelected.ensureConforms(fieldType) case _ => + // Otherwise, if the qualifier is a context bound companion, handle + // by selecting a witness in typedCBSelect + if qual.tpe.typeSymbol == defn.CBCompanion then + val witnessSelection = typedCBSelect(tree0, pt, qual) + if !witnessSelection.isEmpty then return witnessSelection + // Otherwise, report an error assignType(tree, rawType match @@ -849,6 +855,76 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer notAMemberErrorType(tree, qual, pt)) end typedSelect + /** Expand a selection A.m on a context bound companion A with type + * `[ref_1 | ... | ref_N]` as described by + * Step 3 of the doc comment of annotation.internal.WitnessNames. + * @return the best alternative if it exists, + * or EmptyTree if no witness admits selecting with the given name, + * or EmptyTree and report an ambiguity error of there are several + * possible witnesses and no selection is better than the other + * according to the critera given in Step 3. + */ + def typedCBSelect(tree: untpd.Select, pt: Type, qual: Tree)(using Context): Tree = + + type Alts = List[(/*prev: */Tree, /*prevState: */TyperState, /*prevWitness: */TermRef)] + + /** Compare two alternative selections `alt1` and `alt2` from witness types + * `wit1`, `wit2` according to the 3 criteria in the enclosing doc comment. I.e. + * + * alt1 = qual1.m, alt2 = qual2.m, qual1: wit1, qual2: wit2 + * + * @return 1 if 1st alternative is preferred over 2nd + * -1 if 2nd alternative is preferred over 1st + * 0 if neither alternative is preferred over the other + */ + def compareAlts(alt1: Tree, alt2: Tree, wit1: TermRef, wit2: TermRef): Int = + val cmpPrefix = compare(wit1, wit2, preferGeneral = true) + typr.println(i"compare witnesses $wit1: ${wit1.info}, $wit2: ${wit2.info} = $cmpPrefix") + if cmpPrefix != 0 then cmpPrefix + else (alt1.tpe, alt2.tpe) match + case (tp1: TypeRef, tp2: TypeRef) => + if tp1.dealias == tp2.dealias then 1 else 0 + case (tp1: TermRef, tp2: TermRef) => + if tp1.info.isSingleton && (tp1 frozen_=:= tp2) then 1 + else compare(tp1, tp2, preferGeneral = false) + case (tp1: TermRef, _) => 1 + case (_, tp2: TermRef) => -1 + case _ => 0 + + /** Find the set of maximally preferred alternative among `prev` and the + * remaining alternatives generated from `witnesses` with is a union type + * of witness references. + */ + def tryAlts(prevs: Alts, witnesses: Type): Alts = witnesses match + case OrType(wit1, wit2) => + tryAlts(tryAlts(prevs, wit1), wit2) + case witness: TermRef => + val altQual = tpd.ref(witness).withSpan(qual.span) + val altCtx = ctx.fresh.setNewTyperState() + val alt = typedSelect(tree, pt, altQual)(using altCtx) + def current = (alt, altCtx.typerState, witness) + if altCtx.reporter.hasErrors then prevs + else + val cmps = prevs.map: (prevTree, prevState, prevWitness) => + compareAlts(prevTree, alt, prevWitness, witness) + if cmps.exists(_ == 1) then prevs + else current :: prevs.zip(cmps).collect{ case (prev, cmp) if cmp != -1 => prev } + + qual.tpe.widen match + case AppliedType(_, arg :: Nil) => + tryAlts(Nil, arg) match + case Nil => EmptyTree + case (best @ (bestTree, bestState, _)) :: Nil => + bestState.commit() + bestTree + case multiAlts => + report.error( + em"""Ambiguous witness reference. None of the following alternatives is more specific than the other: + |${multiAlts.map((alt, _, witness) => i"\n $witness.${tree.name}: ${alt.tpe.widen}")}""", + tree.srcPos) + EmptyTree + end typedCBSelect + def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { record("typedSelect") diff --git a/library/src/scala/annotation/internal/WitnessNames.scala b/library/src/scala/annotation/internal/WitnessNames.scala new file mode 100644 index 000000000000..f859cda96d06 --- /dev/null +++ b/library/src/scala/annotation/internal/WitnessNames.scala @@ -0,0 +1,53 @@ +package scala.annotation +package internal + +/** An annotation that is used for marking type definitions that should get + * context bound companions. The scheme is as follows: + * + * 1. When desugaring a context-bounded type A, add a @WitnessNames(n_1, ... , n_k) + * annotation to the type declaration node, where n_1, ..., n_k are the names of + * all the witnesses generated for the context bounds of A. This annotation will + * be pickled as usual. + * + * 2. During Namer or Unpickling, when encountering a type declaration A with + * a WitnessNames(n_1, ... , n_k) annotation, create a CB companion `val A` with + * rtype ``[ref_1 | ... | ref_k] where ref_i is a TermRef + * with the same prefix as A and name n_i. Except, don't do this if the type in + * question is a type parameter and there is already a term parameter with name A + * defined for the same method. + * + * ContextBoundCompanion is defined as an internal abstract type like this: + * + * type ``[-Refs] + * + * The context bound companion's variance is negative, so that unons in the + * arguments are joined when encountering multiple definfitions and forming a glb. + * + * 3. Add a special case for typing a selection A.m on a value A of type + * ContextBoundCompanion[ref_1, ..., ref_k]. Namely, try to typecheck all + * selections ref_1.m, ..., ref_k.m with the expected type. There must be + * a unique selection ref_i.m that typechecks and such that for all other + * selections ref_j.m that also typecheck one of the following three criteria + * applies: + * + * 1. ref_i.m and ref_j.m are the same. This means: If they are types then + * ref_i.m is an alias of ref_j.m. If they are terms then they are both + * singleton types and ref_i.m =:= ref_j.m. + * 2. The underlying type (under widen) of ref_i is a true supertype of the + * underlying type of ref_j. + * 3. ref_i.m is a term, the underlying type of ref_j is not a strict subtype + * of the underlying type of ref_j, and the underlying type ref_i.m is a + * strict subtype of the underlying type of ref_j.m. + * + * If there is such a selection, map A.m to ref_i.m, otherwise report an error. + * + * (2) might surprise. It is the analogue of given disambiguation, where we also + * pick the most general candidate that matches the expected type. E.g. we have + * context bounds for Functor, Monad, and Applicable. In this case we want to + * select the `map` method of `Functor`. + * + * 4. At PostTyper, issue an error when encountering any reference to a CB companion. + */ +class WitnessNames(names: String*) extends StaticAnnotation + + diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 3b28733226a0..6c3640eed12c 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -20,6 +20,8 @@ object MiMaFilters { ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.compiletime.package#package.deferred"), + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.internal.WitnessNames"), ), // Additions since last LTS diff --git a/tests/neg/cb-companion-leaks.check b/tests/neg/cb-companion-leaks.check new file mode 100644 index 000000000000..156f8a7ab3ee --- /dev/null +++ b/tests/neg/cb-companion-leaks.check @@ -0,0 +1,66 @@ +-- [E194] Type Error: tests/neg/cb-companion-leaks.scala:9:23 ---------------------------------------------------------- +9 | def foo[A: {C, D}] = A // error + | ^ + | context bound companion value A cannot be used as a value + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + --------------------------------------------------------------------------------------------------------------------- +-- [E194] Type Error: tests/neg/cb-companion-leaks.scala:13:10 --------------------------------------------------------- +13 | val x = A // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- +-- [E194] Type Error: tests/neg/cb-companion-leaks.scala:15:9 ---------------------------------------------------------- +15 | val y: A.type = ??? // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/cb-companion-leaks.scala b/tests/neg/cb-companion-leaks.scala new file mode 100644 index 000000000000..07155edb05dc --- /dev/null +++ b/tests/neg/cb-companion-leaks.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future -explain + +class C[Self] + +class D[Self] + +trait Test: + + def foo[A: {C, D}] = A // error + + type A: C + + val x = A // error + + val y: A.type = ??? // error + diff --git a/tests/pos-macros/i8325/Macro_1.scala b/tests/pos-macros/i8325/Macro_1.scala index 18466e17b3df..92a54d21b00a 100644 --- a/tests/pos-macros/i8325/Macro_1.scala +++ b/tests/pos-macros/i8325/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -15,7 +15,7 @@ object A: import quotes.reflect.* expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) - case Apply(fun,args) => '{ A.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } + case Apply(fun,args) => '{ O.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325/Test_2.scala b/tests/pos-macros/i8325/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325/Test_2.scala +++ b/tests/pos-macros/i8325/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos-macros/i8325b/Macro_1.scala b/tests/pos-macros/i8325b/Macro_1.scala index 181efa260f9b..139abed94078 100644 --- a/tests/pos-macros/i8325b/Macro_1.scala +++ b/tests/pos-macros/i8325b/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -16,7 +16,7 @@ object A: expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) case r@Apply(fun,args) => '{ - A.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } + O.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325b/Test_2.scala b/tests/pos-macros/i8325b/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325b/Test_2.scala +++ b/tests/pos-macros/i8325b/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos/FromString-cb-companion.scala b/tests/pos/FromString-cb-companion.scala new file mode 100644 index 000000000000..d086420761ee --- /dev/null +++ b/tests/pos/FromString-cb-companion.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[Self]: + def fromString(s: String): Self + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString, Numeric as num}](a: String, b: String): N = + N.plus( + num.plus(N.fromString(a), N.fromString(b)), + N.fromString(a) + ) \ No newline at end of file diff --git a/tests/pos/cb-companion-joins.scala b/tests/pos/cb-companion-joins.scala new file mode 100644 index 000000000000..97e0a8a7e4ac --- /dev/null +++ b/tests/pos/cb-companion-joins.scala @@ -0,0 +1,21 @@ +import language.experimental.modularity +import language.future + +trait M[Self]: + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num[Self]: + def zero: Self + +trait A extends M[A] +trait B extends M[A] + +trait AA: + type X: M +trait BB: + type X: Num +class CC[X1: {M, Num}] extends AA, BB: + type X = X1 + X.zero + X.unit From 5f3ff9ff3295057b1344ade9c32932b8f8af4550 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 23:33:36 +0200 Subject: [PATCH 358/465] Allow contecxt bounds with abstract `Self` types If a context bound type `T` for type parameter `A` does not have type parameters, demand evidence of type `T { type Self = A }` instead. [Cherry-picked c6388c2785f628b7e4a8680b6d4f1e7be0b0a925] --- .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../src/dotty/tools/dotc/typer/Typer.scala | 6 +- .../test/dotc/pos-test-pickling.blacklist | 7 +- .../scala/runtime/stdLibPatches/Predef.scala | 13 + tests/pos/FromString.scala | 15 + tests/pos/deferred-givens.scala | 12 +- tests/pos/deferredSummon.scala | 11 +- tests/pos/dep-context-bounds.scala | 11 +- tests/pos/hylolib-extract.scala | 29 ++ tests/pos/hylolib/AnyCollection.scala | 51 +++ tests/pos/hylolib/AnyValue.scala | 67 ++++ tests/pos/hylolib/AnyValueTests.scala | 15 + tests/pos/hylolib/BitArray.scala | 362 ++++++++++++++++++ tests/pos/hylolib/Collection.scala | 267 +++++++++++++ tests/pos/hylolib/CollectionTests.scala | 67 ++++ tests/pos/hylolib/CoreTraits.scala | 56 +++ tests/pos/hylolib/Hasher.scala | 39 ++ tests/pos/hylolib/HyArray.scala | 202 ++++++++++ tests/pos/hylolib/HyArrayTests.scala | 17 + tests/pos/hylolib/Integers.scala | 46 +++ tests/pos/hylolib/IntegersTests.scala | 14 + tests/pos/hylolib/Range.scala | 37 ++ tests/pos/hylolib/Slice.scala | 63 +++ tests/pos/hylolib/StringConvertible.scala | 9 + tests/pos/hylolib/Test.scala | 16 + tests/pos/i10929-new-syntax.scala | 22 ++ tests/pos/ord-over-tracked.scala | 15 + tests/pos/parsercombinators-arrow.scala | 48 +++ tests/pos/parsercombinators-ctx-bounds.scala | 49 +++ tests/pos/parsercombinators-new-syntax.scala | 45 +++ tests/pos/parsercombinators-this.scala | 53 +++ tests/pos/sets-tc.scala | 46 +++ tests/pos/typeclass-aggregates.scala | 32 +- tests/pos/typeclasses-arrow.scala | 140 +++++++ tests/pos/typeclasses-this.scala | 141 +++++++ tests/pos/typeclasses.scala | 47 ++- tests/run/for-desugar-strawman.scala | 96 +++++ tests/run/given-disambiguation.scala | 58 +++ tests/run/i15840.scala | 27 ++ 39 files changed, 2199 insertions(+), 53 deletions(-) create mode 100644 tests/pos/FromString.scala create mode 100644 tests/pos/hylolib-extract.scala create mode 100644 tests/pos/hylolib/AnyCollection.scala create mode 100644 tests/pos/hylolib/AnyValue.scala create mode 100644 tests/pos/hylolib/AnyValueTests.scala create mode 100644 tests/pos/hylolib/BitArray.scala create mode 100644 tests/pos/hylolib/Collection.scala create mode 100644 tests/pos/hylolib/CollectionTests.scala create mode 100644 tests/pos/hylolib/CoreTraits.scala create mode 100644 tests/pos/hylolib/Hasher.scala create mode 100644 tests/pos/hylolib/HyArray.scala create mode 100644 tests/pos/hylolib/HyArrayTests.scala create mode 100644 tests/pos/hylolib/Integers.scala create mode 100644 tests/pos/hylolib/IntegersTests.scala create mode 100644 tests/pos/hylolib/Range.scala create mode 100644 tests/pos/hylolib/Slice.scala create mode 100644 tests/pos/hylolib/StringConvertible.scala create mode 100644 tests/pos/hylolib/Test.scala create mode 100644 tests/pos/i10929-new-syntax.scala create mode 100644 tests/pos/ord-over-tracked.scala create mode 100644 tests/pos/parsercombinators-arrow.scala create mode 100644 tests/pos/parsercombinators-ctx-bounds.scala create mode 100644 tests/pos/parsercombinators-new-syntax.scala create mode 100644 tests/pos/parsercombinators-this.scala create mode 100644 tests/pos/sets-tc.scala create mode 100644 tests/pos/typeclasses-arrow.scala create mode 100644 tests/pos/typeclasses-this.scala create mode 100644 tests/run/for-desugar-strawman.scala create mode 100644 tests/run/given-disambiguation.scala create mode 100644 tests/run/i15840.scala diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index ab7e4eea0b46..b935488695e0 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -388,6 +388,7 @@ object StdNames { val RootPackage: N = "RootPackage" val RootClass: N = "RootClass" val Select: N = "Select" + val Self: N = "Self" val Shape: N = "Shape" val StringContext: N = "StringContext" val This: N = "This" diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 37da51157e91..6ac41ed619b6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2366,9 +2366,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) if tycon.tpe.typeParams.nonEmpty then typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) + else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractType then + val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) + typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else errorTree(tree, - em"""Illegal context bound: ${tycon.tpe} does not take type parameters.""") + em"""Illegal context bound: ${tycon.tpe} does not take type parameters and + |does not have an abstract type member named `Self` either.""") def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index e58277bdc0e5..d6f962176ecc 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -127,10 +127,11 @@ i20053b.scala # alias types at different levels of dereferencing parsercombinators-givens.scala parsercombinators-givens-2.scala +parsercombinators-ctx-bounds.scala +parsercombinators-this.scala parsercombinators-arrow.scala +parsercombinators-new-syntax.scala hylolib-deferred-given hylolib-cb - - - +hylolib diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 7abd92e408f8..a68a628623bf 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -66,4 +66,17 @@ object Predef: extension (opt: Option.type) @experimental inline def fromNullable[T](t: T | Null): Option[T] = Option(t).asInstanceOf[Option[T]] + + /** A type supporting Self-based type classes. + * + * A is TC + * + * expands to + * + * TC { type Self = A } + * + * which is what is needed for a context bound `[A: TC]`. + */ + infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } + end Predef diff --git a/tests/pos/FromString.scala b/tests/pos/FromString.scala new file mode 100644 index 000000000000..333a4c002989 --- /dev/null +++ b/tests/pos/FromString.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future + +trait FromString: + type Self + def fromString(s: String): Self + +given Int is FromString = _.toInt + +given Double is FromString = _.toDouble + +def add[N: {FromString, Numeric as num}](a: String, b: String): N = + N.plus( + num.plus(N.fromString(a), N.fromString(b)), + N.fromString(a) + ) \ No newline at end of file diff --git a/tests/pos/deferred-givens.scala b/tests/pos/deferred-givens.scala index 51fa43866d1e..b9018c97e151 100644 --- a/tests/pos/deferred-givens.scala +++ b/tests/pos/deferred-givens.scala @@ -1,9 +1,19 @@ //> using options -language:experimental.modularity -source future import compiletime.* class Ord[Elem] - given Ord[Double] +trait A: + type Elem : Ord + def foo = summon[Ord[Elem]] + +class AC extends A: + type Elem = Double + override given Ord[Elem] = ??? + +class AD extends A: + type Elem = Double + trait B: type Elem given Ord[Elem] = deferred diff --git a/tests/pos/deferredSummon.scala b/tests/pos/deferredSummon.scala index 31a9697eda6b..f8252576d81a 100644 --- a/tests/pos/deferredSummon.scala +++ b/tests/pos/deferredSummon.scala @@ -1,20 +1,21 @@ //> using options -language:experimental.modularity -source future import compiletime.deferred -trait Ord[Self]: +trait Ord: + type Self def less(x: Self, y: Self): Boolean trait A: type Elem - given Ord[Elem] = deferred - def foo = summon[Ord[Elem]] + given Elem is Ord = deferred + def foo = summon[Elem is Ord] trait B: type Elem: Ord - def foo = summon[Ord[Elem]] + def foo = summon[Elem is Ord] object Inst: - given Ord[Int]: + given Int is Ord: def less(x: Int, y: Int) = x < y object Test1: diff --git a/tests/pos/dep-context-bounds.scala b/tests/pos/dep-context-bounds.scala index 434805762622..c724d92e9809 100644 --- a/tests/pos/dep-context-bounds.scala +++ b/tests/pos/dep-context-bounds.scala @@ -1,6 +1,13 @@ //> using options -language:experimental.modularity -source future -trait A[X]: - type Self = X +trait A: + type Self + +object Test1: + def foo[X: A](x: X.Self) = ??? + + def bar[X: A](a: Int) = ??? + + def baz[X: A](a: Int)(using String) = ??? object Test2: def foo[X: A as x](a: x.Self) = ??? diff --git a/tests/pos/hylolib-extract.scala b/tests/pos/hylolib-extract.scala new file mode 100644 index 000000000000..846e52f30df6 --- /dev/null +++ b/tests/pos/hylolib-extract.scala @@ -0,0 +1,29 @@ +//> using options -language:experimental.modularity -source future +package hylotest + +trait Value: + type Self + extension (self: Self) def eq(other: Self): Boolean + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + +class BitArray + +given Boolean is Value: + extension (self: Self) def eq(other: Self): Boolean = + self == other + +given BitArray is Collection: + type Element = Boolean + +extension [Self: Value](self: Self) + def neq(other: Self): Boolean = !self.eq(other) + +extension [Self: Collection](self: Self) + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + ??? diff --git a/tests/pos/hylolib/AnyCollection.scala b/tests/pos/hylolib/AnyCollection.scala new file mode 100644 index 000000000000..6c2b835852e6 --- /dev/null +++ b/tests/pos/hylolib/AnyCollection.scala @@ -0,0 +1,51 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base: Collection](base: Base): AnyCollection[Base.Element] = + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[Base.Position])) + + def at(p: AnyValue): Base.Element = + base.at(p.unsafelyUnwrappedAs[Base.Position]) + + new AnyCollection[Base.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given [T: Value] => AnyCollection[T] is Collection: + + type Element = T + type Position = AnyValue + + extension (self: AnyCollection[T]) + def startPosition = self._start() + def endPosition = self._end() + def positionAfter(p: Position) = self._after(p) + def at(p: Position) = self._at(p) + diff --git a/tests/pos/hylolib/AnyValue.scala b/tests/pos/hylolib/AnyValue.scala new file mode 100644 index 000000000000..6844135b646b --- /dev/null +++ b/tests/pos/hylolib/AnyValue.scala @@ -0,0 +1,67 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T: Value](wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given AnyValue is Value: + + extension (self: AnyValue) + def copy(): AnyValue = self.copy() + def eq(other: AnyValue): Boolean = self `eq` other + def hashInto(hasher: Hasher): Hasher = self.hashInto(hasher) + diff --git a/tests/pos/hylolib/AnyValueTests.scala b/tests/pos/hylolib/AnyValueTests.scala new file mode 100644 index 000000000000..96d3563f4f53 --- /dev/null +++ b/tests/pos/hylolib/AnyValueTests.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class AnyValueTests extends munit.FunSuite: + + test("eq"): + val a = AnyValue(1) + assert(a `eq` a) + assert(!(a `neq` a)) + + val b = AnyValue(2) + assert(!(a `eq` b)) + assert(a `neq` b) + diff --git a/tests/pos/hylolib/BitArray.scala b/tests/pos/hylolib/BitArray.scala new file mode 100644 index 000000000000..6ef406e5ad83 --- /dev/null +++ b/tests/pos/hylolib/BitArray.scala @@ -0,0 +1,362 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given BitArray.Position is Value: + + extension (self: BitArray.Position) + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + +given BitArray is Collection: + + type Element = Boolean + type Position = BitArray.Position + + extension (self: BitArray) + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + +given BitArray is StringConvertible: + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + diff --git a/tests/pos/hylolib/Collection.scala b/tests/pos/hylolib/Collection.scala new file mode 100644 index 000000000000..bef86a967e6e --- /dev/null +++ b/tests/pos/hylolib/Collection.scala @@ -0,0 +1,267 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + + /** The type of a position in the collection. */ + type Position: Value + + extension (self: Self) + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def loop(p: Position, n: Int): Int = + if p `eq` e then n else loop(self.positionAfter(p), n + 1) + loop(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if i `eq` e then false + else if j `eq` e then true + else + def recur(n: Position): Boolean = + if n `eq` j then true + else if n `eq` e then false + else recur(self.positionAfter(n)) + recur(self.positionAfter(i)) + + class Slice2(val base: Self, val bounds: Range[Position]): + + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Position = + bounds.lowerBound + + def endPosition: Position = + bounds.upperBound + + def at(p: Position): Element = + base.at(p) + end Slice2 + +end Collection + +extension [Self: Collection](self: Self) + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(Self.Element, Slice[Self])] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + def headAndTail2: Option[(Self.Element, Self.Slice2)] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Self.Slice2(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T)(combine: (T, Self.Element) => T): T = + val e = self.endPosition + def loop(p: Self.Position, r: T): T = + if p `eq` e then r + else loop(self.positionAfter(p), combine(r, self.at(p))) + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: Self.Element => Boolean): Boolean = + val e = self.endPosition + def loop(p: Self.Position): Boolean = + if p `eq` e then true + else if !action(self.at(p)) then false + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T: Value](transform: Self.Element => T): HyArray[T] = + self.reduce(HyArray[T]()): (r, e) => + r.append(transform(e), assumeUniqueness = true) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: Self.Element => Boolean): HyArray[Self.Element] = + self.reduce(HyArray[Self.Element]()): (r, e) => + if isIncluded(e) then r.append(e, assumeUniqueness = true) else r + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: Self.Element => Boolean): Option[Self.Position] = + val e = self.endPosition + def loop(p: Self.Position): Option[Self.Position] = + if p `eq` e then None + else if predicate(self.at(p)) then Some(p) + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Self.Element is Comparable): Option[Self.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Self.Element is Comparable): Option[Self.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + if self.isEmpty then + None + else + val e = self.endPosition + def loop(p: Self.Position, least: Self.Element): Self.Element = + if p `eq` e then + least + else + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + loop(self.positionAfter(p), y) + val b = self.startPosition + Some(loop(self.positionAfter(b), self.at(b))) + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + def loop(i: Self.Position, j: T.Position): Boolean = + if i `eq` self.endPosition then + j `eq` other.endPosition + else if j `eq` other.endPosition then + false + else if self.at(i) `neq` other.at(j)then + false + else + loop(self.positionAfter(i), other.positionAfter(j)) + loop(self.startPosition, other.startPosition) +end extension diff --git a/tests/pos/hylolib/CollectionTests.scala b/tests/pos/hylolib/CollectionTests.scala new file mode 100644 index 000000000000..d884790f64d7 --- /dev/null +++ b/tests/pos/hylolib/CollectionTests.scala @@ -0,0 +1,67 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class CollectionTests extends munit.FunSuite: + + test("isEmpty"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.isEmpty) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + assert(!nonEmpty.isEmpty) + + test("count"): + val a = AnyCollection(HyArray[Int](1, 2)) + assertEquals(a.count, 2) + + test("isBefore"): + val empty = AnyCollection(HyArray[Int]()) + assert(!empty.isBefore(empty.startPosition, empty.endPosition)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + val p0 = nonEmpty.startPosition + val p1 = nonEmpty.positionAfter(p0) + val p2 = nonEmpty.positionAfter(p1) + assert(nonEmpty.isBefore(p0, nonEmpty.endPosition)) + assert(nonEmpty.isBefore(p1, nonEmpty.endPosition)) + assert(!nonEmpty.isBefore(p2, nonEmpty.endPosition)) + + test("headAndTail"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.headAndTail, None) + + val one = AnyCollection(HyArray[Int](1)) + val Some((h0, t0)) = one.headAndTail: @unchecked + assert(h0 eq 1) + assert(t0.isEmpty) + + val two = AnyCollection(HyArray[Int](1, 2)) + val Some((h1, t1)) = two.headAndTail: @unchecked + assertEquals(h1, 1) + assertEquals(t1.count, 1) + + test("reduce"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.reduce(0)((s, x) => s + x), 0) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + assertEquals(nonEmpty.reduce(0)((s, x) => s + x), 6) + + test("forEach"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.forEach((e) => false)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + var s = 0 + assert(nonEmpty.forEach((e) => { s += e; true })) + assertEquals(s, 6) + + s = 0 + assert(!nonEmpty.forEach((e) => { s += e; false })) + assertEquals(s, 1) + + test("elementsEqual"): + val a = HyArray(1, 2) + assert(a.elementsEqual(a)) +end CollectionTests diff --git a/tests/pos/hylolib/CoreTraits.scala b/tests/pos/hylolib/CoreTraits.scala new file mode 100644 index 000000000000..f4b3699b430e --- /dev/null +++ b/tests/pos/hylolib/CoreTraits.scala @@ -0,0 +1,56 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value: + type Self + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + def neq(other: Self): Boolean = !self.eq(other) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable extends Value { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib/Hasher.scala b/tests/pos/hylolib/Hasher.scala new file mode 100644 index 000000000000..ca45550ed002 --- /dev/null +++ b/tests/pos/hylolib/Hasher.scala @@ -0,0 +1,39 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib/HyArray.scala b/tests/pos/hylolib/HyArray.scala new file mode 100644 index 000000000000..de5e83d3b1a3 --- /dev/null +++ b/tests/pos/hylolib/HyArray.scala @@ -0,0 +1,202 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element: Value as elementIsCValue]( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + /** Adds the contents of `source` at the end of the array. */ + def appendContents[C: Collection { type Element = HyArray.this.Element }]( + source: C, assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + source.reduce(result): (r, e) => + r.append(e, assumeUniqueness = true) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T: Value](elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given [T: Value] => HyArray[T] is Value: + + extension (self: HyArray[T]) + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher)((h, e) => e.hashInto(h)) + +given [T: Value] => HyArray[T] is Collection: + + type Element = T + type Position = Int + + extension (self: HyArray[T]) + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + +given [T: {Value, StringConvertible}] => HyArray[T] is StringConvertible: + extension (self: HyArray[T]) + override def description: String = + val contents = mutable.StringBuilder() + self.forEach: e => + contents ++= e.description + true + s"[${contents.mkString(", ")}]" diff --git a/tests/pos/hylolib/HyArrayTests.scala b/tests/pos/hylolib/HyArrayTests.scala new file mode 100644 index 000000000000..0de65603d0c7 --- /dev/null +++ b/tests/pos/hylolib/HyArrayTests.scala @@ -0,0 +1,17 @@ +import hylo.* +import hylo.given + +class HyArrayTests extends munit.FunSuite: + + test("reserveCapacity"): + var a = HyArray[Int]() + a = a.append(1) + a = a.append(2) + + a = a.reserveCapacity(10) + assert(a.capacity >= 10) + assertEquals(a.count, 2) + assertEquals(a.at(0), 1) + assertEquals(a.at(1), 2) + +end HyArrayTests diff --git a/tests/pos/hylolib/Integers.scala b/tests/pos/hylolib/Integers.scala new file mode 100644 index 000000000000..f7334ae40786 --- /dev/null +++ b/tests/pos/hylolib/Integers.scala @@ -0,0 +1,46 @@ +package hylo + +given Boolean is Value: + + extension (self: Boolean) + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + +given Int is Value: + + extension (self: Int) + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + +given Int is Comparable: + + extension (self: Int) + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + +given Int is StringConvertible diff --git a/tests/pos/hylolib/IntegersTests.scala b/tests/pos/hylolib/IntegersTests.scala new file mode 100644 index 000000000000..74dedf30d83e --- /dev/null +++ b/tests/pos/hylolib/IntegersTests.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class IntegersTests extends munit.FunSuite: + + test("Int.hashInto"): + val x = Hasher.hash(42) + val y = Hasher.hash(42) + assertEquals(x, y) + + val z = Hasher.hash(1337) + assertNotEquals(x, z) + diff --git a/tests/pos/hylolib/Range.scala b/tests/pos/hylolib/Range.scala new file mode 100644 index 000000000000..b0f50dd55c8c --- /dev/null +++ b/tests/pos/hylolib/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound: Comparable](lowerBound: Bound, upperBound: Bound) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib/Slice.scala b/tests/pos/hylolib/Slice.scala new file mode 100644 index 000000000000..d54f855b1041 --- /dev/null +++ b/tests/pos/hylolib/Slice.scala @@ -0,0 +1,63 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base: Collection]( + val base: Base, + val bounds: Range[Base.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Base.Position = + bounds.lowerBound + + def endPosition: Base.Position = + bounds.upperBound + + def positionAfter(p: Base.Position): Base.Position = + base.positionAfter(p) + + def at(p: Base.Position): Base.Element = + base.at(p) + +} + +given [C: Collection] => Slice[C] is Collection: + + type Element = C.Element + type Position = C.Position + + extension (self: Slice[C]) + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] + // This is actually unsafe. We have: + // self.bounds: Range(Slice[C].Base.Position) + // But the _value_ of Slice[C].Base is not necssarily this given, even + // though it is true that `type Slice[C].Base = C`. There might be multiple + // implementations of `Slice[C] is Collection` that define different `Position` + // types. So we cannot conclude that `Slice[C].Base.Position = this.Position`. + // To make this safe, we'd need some form of coherence, where we ensure that + // there is only one way to implement `Slice is Collection`. + // + // As an alternativem we can make Slice dependent on the original Collection + // _instance_ instead of the original Collection _type_. This design is + // realized by the Slice2 definitions. It works without casts. + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + +given [C: Collection] => C.Slice2 is Collection: + type Element = C.Element + type Position = C.Position + + extension (self: C.Slice2) + + def startPosition = self.bounds.lowerBound + def endPosition = self.bounds.upperBound + def positionAfter(p: Position) = self.base.positionAfter(p) + def at(p: Position) = self.base.at(p) diff --git a/tests/pos/hylolib/StringConvertible.scala b/tests/pos/hylolib/StringConvertible.scala new file mode 100644 index 000000000000..cf901d9a3313 --- /dev/null +++ b/tests/pos/hylolib/StringConvertible.scala @@ -0,0 +1,9 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible: + type Self + + /** Returns a textual description of `self`. */ + extension (self: Self) + def description: String = self.toString diff --git a/tests/pos/hylolib/Test.scala b/tests/pos/hylolib/Test.scala new file mode 100644 index 000000000000..9e8d6181affd --- /dev/null +++ b/tests/pos/hylolib/Test.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +object munit: + open class FunSuite: + def test(name: String)(op: => Unit): Unit = op + def assertEquals[T](x: T, y: T) = assert(x == y) + def assertNotEquals[T](x: T, y: T) = assert(x != y) + +@main def Test = + CollectionTests() + AnyValueTests() + HyArrayTests() + IntegersTests() + println("done") diff --git a/tests/pos/i10929-new-syntax.scala b/tests/pos/i10929-new-syntax.scala new file mode 100644 index 000000000000..11c5e9313d4c --- /dev/null +++ b/tests/pos/i10929-new-syntax.scala @@ -0,0 +1,22 @@ +//> using options -language:experimental.modularity -source future +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple is TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest is TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: Rest.map(x.tail)(f)) + +def foo[T: TupleOf[Int]](xs: T): T.Mapped[Int] = T.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok diff --git a/tests/pos/ord-over-tracked.scala b/tests/pos/ord-over-tracked.scala new file mode 100644 index 000000000000..a9b4aba556e1 --- /dev/null +++ b/tests/pos/ord-over-tracked.scala @@ -0,0 +1,15 @@ +import language.experimental.modularity + +trait Ord[T]: + def lt(x: T, y: T): Boolean + +given Ord[Int] = ??? + +case class D(tracked val x: Int) +given [T <: D]: Ord[T] = (a, b) => a.x < b.x + +def mySort[T: Ord](x: Array[T]): Array[T] = ??? + +def test = + val arr = Array(D(1)) + val arr1 = mySort(arr) // error: no given instance of type Ord[D{val x: (1 : Int)}] \ No newline at end of file diff --git a/tests/pos/parsercombinators-arrow.scala b/tests/pos/parsercombinators-arrow.scala new file mode 100644 index 000000000000..f8bec02067e5 --- /dev/null +++ b/tests/pos/parsercombinators-arrow.scala @@ -0,0 +1,48 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given [C, E] => Apply[C, E] is Combinator: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + +given [A: Combinator, B: Combinator { type Context = A.Context }] + => Combine[A, B] is Combinator: + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked diff --git a/tests/pos/parsercombinators-ctx-bounds.scala b/tests/pos/parsercombinators-ctx-bounds.scala new file mode 100644 index 000000000000..d77abea5e539 --- /dev/null +++ b/tests/pos/parsercombinators-ctx-bounds.scala @@ -0,0 +1,49 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: [X] =>> Combinator[X] { type Context = A.Context }] + : Combinator[Combine[A, B]] with + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/parsercombinators-new-syntax.scala b/tests/pos/parsercombinators-new-syntax.scala new file mode 100644 index 000000000000..f984972b915d --- /dev/null +++ b/tests/pos/parsercombinators-new-syntax.scala @@ -0,0 +1,45 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + type Self + type Input + type Result + + extension (self: Self) + /// Parses and returns an element from input `in`. + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](first: A, second: B) + +given [I, R] => Apply[I, R] is Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] is Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for x <- self.first.parse(in); y <- self.second.parse(in) yield (x, y) + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // was error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Input` + val rc: Option[(Int, Int)] = r + diff --git a/tests/pos/parsercombinators-this.scala b/tests/pos/parsercombinators-this.scala new file mode 100644 index 000000000000..70b423985400 --- /dev/null +++ b/tests/pos/parsercombinators-this.scala @@ -0,0 +1,53 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator with { + type Self = Apply[C, E] + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: Combinator { type Context = A.Context }] + : Combinator with + type Self = Combine[A, B] + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/sets-tc.scala b/tests/pos/sets-tc.scala new file mode 100644 index 000000000000..86349bf6a405 --- /dev/null +++ b/tests/pos/sets-tc.scala @@ -0,0 +1,46 @@ +import language.experimental.modularity + +// First version: higher-kinded self type +object v1: + trait Set: + type Self[A] + def empty[A]: Self[A] + def union[A](self: Self[A], other: Self[A]): Self[A] + + case class ListSet[A](elems: List[A]) + + given ListSet is Set: + def empty[A]: ListSet[A] = ListSet(Nil) + + def union[A](self: ListSet[A], other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S[_]: Set](xs: List[S[A]]): S[A] = + xs.foldLeft(S.empty)(S.union) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + + // Second version: parameterized type class +object v2: + trait Set[A]: + type Self + def empty: Self + extension (s: Self) def union (other: Self): Self + + case class ListSet[A](elems: List[A]) + + given [A] => ListSet[A] is Set[A]: + def empty: ListSet[A] = ListSet(Nil) + + extension (self: ListSet[A]) def union(other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S: Set[A]](xs: List[S]): S = + xs.foldLeft(S.empty)(_ `union` _) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala index 9bb576603b7b..5e4551b226b7 100644 --- a/tests/pos/typeclass-aggregates.scala +++ b/tests/pos/typeclass-aggregates.scala @@ -1,47 +1,47 @@ //> using options -source future -language:experimental.modularity trait Ord: - type This - extension (x: This) - def compareTo(y: This): Int - def < (y: This): Boolean = compareTo(y) < 0 - def > (y: This): Boolean = compareTo(y) > 0 + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 trait OrdProxy extends Ord: export Ord.this.* trait SemiGroup: - type This - extension (x: This) def combine(y: This): This + type Self + extension (x: Self) def combine(y: Self): Self trait SemiGroupProxy extends SemiGroup: export SemiGroup.this.* trait Monoid extends SemiGroup: - def unit: This + def unit: Self trait MonoidProxy extends Monoid: export Monoid.this.* -def ordWithMonoid(ord: Ord, monoid: Monoid{ type This = ord.This }): Ord & Monoid = +def ordWithMonoid(ord: Ord, monoid: Monoid{ type Self = ord.Self }): Ord & Monoid = new ord.OrdProxy with monoid.MonoidProxy {} trait OrdWithMonoid extends Ord, Monoid -def ordWithMonoid2(ord: Ord, monoid: Monoid{ type This = ord.This }) = //: OrdWithMonoid { type This = ord.This} = +def ordWithMonoid2(ord: Ord, monoid: Monoid{ type Self = ord.Self }) = //: OrdWithMonoid { type Self = ord.Self} = new OrdWithMonoid with ord.OrdProxy with monoid.MonoidProxy {} -given intOrd: (Ord { type This = Int }) = ??? -given intMonoid: (Monoid { type This = Int }) = ??? +given intOrd: (Ord { type Self = Int }) = ??? +given intMonoid: (Monoid { type Self = Int }) = ??? -//given (using ord: Ord, monoid: Monoid{ type This = ord.This }): (Ord & Monoid { type This = ord.This}) = +//given (using ord: Ord, monoid: Monoid{ type Self = ord.Self }): (Ord & Monoid { type Self = ord.Self}) = // ordWithMonoid2(ord, monoid) -val x = summon[Ord & Monoid { type This = Int}] -val y: Int = ??? : x.This +val x = summon[Ord & Monoid { type Self = Int}] +val y: Int = ??? : x.Self // given [A, B](using ord: A is Ord, monoid: A is Monoid) => A is Ord & Monoid = // new ord.OrdProxy with monoid.MonoidProxy {} -given [A](using ord: Ord { type This = A }, monoid: Monoid { type This = A}): ((Ord & Monoid) { type This = A}) = +given [A](using ord: Ord { type Self = A }, monoid: Monoid { type Self = A}): ((Ord & Monoid) { type Self = A}) = new ord.OrdProxy with monoid.MonoidProxy {} diff --git a/tests/pos/typeclasses-arrow.scala b/tests/pos/typeclasses-arrow.scala new file mode 100644 index 000000000000..379365ffa1c5 --- /dev/null +++ b/tests/pos/typeclasses-arrow.scala @@ -0,0 +1,140 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given Int is Ord as intOrd: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] is Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List is Monad as listMonad: + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] is Monad as readerMonad: + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T is Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep is Animal: + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses-this.scala b/tests/pos/typeclasses-this.scala new file mode 100644 index 000000000000..20ce78678b22 --- /dev/null +++ b/tests/pos/typeclasses-this.scala @@ -0,0 +1,141 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given intOrd: Int is Ord with + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + +// given [T](using tracked val ev: Ord { type Self = T}): Ord { type Self = List[T] } with + given [T: Ord]: List[T] is Ord with + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given listMonad: List is Monad with + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given readerMonad[Ctx]: Reader[Ctx] is Monad with + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given descending[T: Ord]: T is Ord with + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep is Animal with + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala index 2bf7f76f0804..d0315a318310 100644 --- a/tests/pos/typeclasses.scala +++ b/tests/pos/typeclasses.scala @@ -3,38 +3,36 @@ class Common: trait Ord: - type This - extension (x: This) - def compareTo(y: This): Int - def < (y: This): Boolean = compareTo(y) < 0 - def > (y: This): Boolean = compareTo(y) > 0 + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 trait SemiGroup: - type This - extension (x: This) def combine(y: This): This + type Self + extension (x: Self) def combine(y: Self): Self trait Monoid extends SemiGroup: - def unit: This + def unit: Self trait Functor: - type This[A] - extension [A](x: This[A]) def map[B](f: A => B): This[B] + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] trait Monad extends Functor: - def pure[A](x: A): This[A] - extension [A](x: This[A]) - def flatMap[B](f: A => This[B]): This[B] + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] def map[B](f: A => B) = x.flatMap(f `andThen` pure) - infix type is[A <: AnyKind, B <: {type This <: AnyKind}] = B { type This = A } - end Common object Instances extends Common: given intOrd: (Int is Ord) with - type This = Int + type Self = Int extension (x: Int) def compareTo(y: Int) = if x < y then -1 @@ -77,8 +75,8 @@ object Instances extends Common: def second = xs.tail.head def third = xs.tail.tail.head - extension [M, A](using m: Monad)(xss: m.This[m.This[A]]) - def flatten: m.This[A] = + extension [M, A](using m: Monad)(xss: m.Self[m.Self[A]]) + def flatten: m.Self[A] = xss.flatMap(identity) def maximum[T](xs: List[T])(using T is Ord): T = @@ -103,12 +101,12 @@ object Instances extends Common: // wc Scala: 30 115 853 // wc Rust : 57 193 1466 trait Animal: - type This - // Associated function signature; `This` refers to the implementor type. - def apply(name: String): This + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self // Method signatures; these will return a string. - extension (self: This) + extension (self: Self) def name: String def noise: String def talk(): Unit = println(s"$name, $noise") @@ -126,18 +124,17 @@ class Sheep(val name: String): /* instance Sheep: Animal with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = println(s"$name pauses briefly... $noise") */ -import Instances.is // Implement the `Animal` trait for `Sheep`. given (Sheep is Animal) with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = diff --git a/tests/run/for-desugar-strawman.scala b/tests/run/for-desugar-strawman.scala new file mode 100644 index 000000000000..a92b19b9150a --- /dev/null +++ b/tests/run/for-desugar-strawman.scala @@ -0,0 +1,96 @@ + +@main def Test = + println: + for + x <- List(1, 2, 3) + y = x + x + if x >= 2 + i <- List.range(0, y) + z = i * i + if z % 2 == 0 + yield + i * x + + println: + val xs = List(1, 2, 3) + xs.flatMapDefined: x => + val y = x + x + xs.applyFilter(x >= 2): + val is = List.range(0, y) + is.mapDefined: i => + val z = i * i + is.applyFilter(z % 2 == 0): + i * x + +extension [A](as: List[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then Some(b) else None + + def flatMapDefined[B](f: A => Option[IterableOnce[B]]): List[B] = + as.flatMap: x => + f(x).getOrElse(Nil) + + def mapDefined[B](f: A => Option[B]): List[B] = + as.flatMap(f) + +object UNDEFINED + +extension [A](as: Vector[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then b else UNDEFINED + + def flatMapDefined[B](f: A => IterableOnce[B] | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: IterableOnce[B] => y + + def mapDefined[B](f: A => B | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: B => y :: Nil + +/* +F ::= val x = E; F + x <- E; G +G ::= [] + val x = E; G + if E; G + x <- E; G + +Translation scheme: + +{ for F yield E }c where c = undefined +{ for G yield E }c where c is a reference to the generator preceding the G sequence + +{ for [] yield E }c = E +{ for p = Ep; G yield E }c = val p = Ep; { for G yield E }c +{ for if Ep; G yield E}c = c.applyFilter(Ep)({ for G yield E }c) +{ for p <- Ep; G yield E }c = val c1 = Ep; c1.BIND{ case p => { for G yield E }c1 } (c1 fresh) + + where BIND = flatMapDefined if isGen(G), isFilter(G) + = mapDefined if !isGen(G), isFilter(G) + = flatMap if isGen(G), !isFilter(G) + = map if !isGen(G), !isFilter(G) + +{ for case p <- Ep; G yield E }c = { for $x <- Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E }c +{ for case p = Ep; G yield E }c = { for $x = Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E}c + +isFilter(if E; S) +isFilter(val x = E; S) if isFilter(S) + +isGen(x <- E; S) +isGen(val x = E; S) if isGen(S) +isGen(if E; S) if isGen(S) + +*/ + +val foo = 1 + +def main2 = + foo + ??? + ??? match { case _ => 0 } \ No newline at end of file diff --git a/tests/run/given-disambiguation.scala b/tests/run/given-disambiguation.scala new file mode 100644 index 000000000000..637c02a5621f --- /dev/null +++ b/tests/run/given-disambiguation.scala @@ -0,0 +1,58 @@ +import language.experimental.modularity +import language.future + +trait M: + type Self + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num: + type Self + def zero: Self + +trait A extends M +trait B extends M + +def f[X: {M, A, B}](x: X) = + summon[X is M] + x.combine(x) + +trait AA: + type XX: {M, A, B} + val x = XX.unit + val A: String = "hello" + +trait AAA: + type X: M +trait BBB: + type X: Num +class CCC[X1: {M, Num}] extends AAA, BBB: + type X = X1 + X.zero + X.unit + +@main def Test = + class C + + given C is M: + extension (x: Self) def combine (y: Self) = "M" + def unit = C() + + given C is A: + extension (x: Self) def combine (y: Self) = "A" + def unit = C() + + given C is B: + extension (x: Self) def combine (y: Self) = "B" + def unit = C() + + assert(f(C()) == "M") + + class CC extends AA: + type XX = C + assert(A.length == 5) + assert(A.toString == "hello") + + CC() + + diff --git a/tests/run/i15840.scala b/tests/run/i15840.scala new file mode 100644 index 000000000000..0f238e2e7148 --- /dev/null +++ b/tests/run/i15840.scala @@ -0,0 +1,27 @@ +//> using options -language:experimental.modularity -source future + +trait Nat: + type N <: Nat + +class _0 extends Nat: + type N = _0 + +class NatOps[N <: Nat](tracked val n: N): + def toInt(using toIntN: ToInt[n.N]): Int = toIntN() + +// works +def toInt[N <: Nat](n: N)(using toIntN: ToInt[n.N]) = toIntN() + +sealed abstract class ToInt[N <: Nat]: + def apply(): Int + +object ToInt: + given ToInt[_0] { + def apply() = 0 + } + +@main def Test() = + assert(toInt(new _0) == 0) + assert(NatOps[_0](new _0).toInt == 0) + assert: + NatOps(new _0).toInt == 0 // did not work From 9d299d6f778a039dcca41b6da40b39753d08442e Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 3 Apr 2024 10:06:39 +0200 Subject: [PATCH 359/465] Add a doc page [Cherry-picked f444b4605c39ff38c8e41c61fdc93efec3bd02d8] --- .../reference/experimental/typeclasses.md | 776 ++++++++++++++++++ docs/sidebar.yml | 1 + .../runtime/stdLibPatches/language.scala | 1 + 3 files changed, 778 insertions(+) create mode 100644 docs/_docs/reference/experimental/typeclasses.md diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md new file mode 100644 index 000000000000..5ac81061e42d --- /dev/null +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -0,0 +1,776 @@ + +--- +layout: doc-page +title: "Type Classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses.html +--- + +# Some Proposed Changes for Better Support of Type Classes + +Martin Odersky, 8.1.2024 + +A type class in Scala is a pattern where we define + + - a trait with one type parameter (the _type class_) + - given instances at specific instantiations of that trait, + - using clauses or context bounds abstracting over that trait. + +Type classes as a pattern work overall OK, but if we compare them to native implementations in Haskell, or protocols in Swift, or traits in Rust, then there are some idiosyncrasies and rough corners which in the end make them +a bit cumbersome and limiting for standard generic programming patterns. Much has improved since Scala 2's implicits, but there is still some gap to bridge to get to parity with these languages. + +This note shows that with some fairly small and reasonable tweaks to Scala's syntax and typing rules we can obtain a much better scheme for working with type classes, or do generic programming in general. + +The bulk of the suggested improvements has been implemented and is available +under source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +## Generalizing Context Bounds + + The only place in Scala's syntax where the type class pattern is relevant is + in context bounds. A context bound such as + +```scala + def min[A: Ordering](x: List[A]): A +``` +requires that `Ordering` is a trait or class with a single type parameter (which makes it a type class) and expands to a `using` clause that instantiates that parameter. Here is the expansion of `min`: +```scala + def min[A](x: List[A])(using Ordering[A]): A +``` + +**Proposal** Allow type classes to define an abstract type member named `Self` instead of a type parameter. + +**Example** + +```scala + trait Ord: + type Self + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + def reduce[A: Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) + + trait ParserCombinator: + type Self + type Input + type Result + extension (self: Self) + def parse(input: Input): Option[Result] = ... + + def combine[A: ParserCombinator, B: ParserCombinator { type Input = A.Input }] = ... +``` + +**Advantages** + + - Avoids repetitive type parameters, concentrates on what's essential, namely the type class hierarchy. + - Gives a clear indication of traits intended as type classes. A trait is a type class + if it has type `Self` as a member + - Allows to create aggregate type classes that combine givens via intersection types. + - Allows to use refinements in context bounds (the `combine` example above would be very awkward to express using the old way of context bounds expanding to type constructors). + +`Self`-based context bounds are a better fit for a dependently typed language like Scala than parameter-based ones. The main reason is that we are dealing with proper types, not type constructors. Proper types can be parameterized, intersected, or refined. This makes `Self`-based designs inherently more compositional than parameterized ones. + + + +**Details** + +When a trait has both a type parameter and an abstract `Self` type, we + resolve a context bound to the `Self` type. This allows type classes + that carry type parameters, as in + +```scala +trait Sequential[E]: + type Self +``` + +Here, +```scala +[S: Sequential[Int]] +``` +should resolve to: +```scala +[S](using Sequential[Int] { type Self = S }) +``` +and not to: +```scala +[S](using Sequential[S]) +``` + +**Discussion** + + Why not use `This` for the self type? The name `This` suggests that it is the type of `this`. But this is not true for type class traits. `Self` is the name of the type implementing a distinguished _member type_ of the trait in a `given` definition. `Self` is an established term in both Rust and Swift with the meaning used here. + + One possible objection to the `Self` based design is that it does not cover "multi-parameter" type classes. But neither do context bounds! "Multi-parameter" type classes in Scala are simply givens that can be synthesized with the standard mechanisms. Type classes in the strict sense abstract only over a single type, namely the implementation type of a trait. + + +## Auxiliary Type Alias `is` + +We introduce a standard type alias `is` in the Scala package or in `Predef`, defined like this: + +```scala + infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } +``` + +This makes writing instance definitions quite pleasant. Examples: + +```scala + given Int is Ord ... + given Int is Monoid ... + + type Reader = [X] =>> Env => X + given Reader is Monad ... +``` + +(more examples will follow below) + + + +## Naming Context Bounds + +Context bounds are a convenient and legible abbreviation. A problem so far is that they are always anonymous, +one cannot name the using parameter to which a context bound expands. + +For instance, consider a `reduce` method over `Monoid`s defined like this: + +```scala +def reduce[A : Monoid](xs: List[A]): A = ??? +``` +Since we don't have a name for the `Monoid` instance of `A`, we need to resort to `summon` in the body of `reduce`: +```scala +def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(summon Monoid[A])(_ `combine` _) +``` +That's generally considered too painful to write and read, hence people usually adopt one of two alternatives. Either, eschew context bounds and switch to using clauses: +```scala +def reduce[A](xs: List[A])(using m: Monoid[A]): A = + xs.foldLeft(m)(_ `combine` _) +``` +Or, plan ahead and define a "trampoline" method in `Monoid`'s companion object: +```scala + trait Monoid[A] extends SemiGroup[A]: + def unit: A + object Monoid: + def unit[A](using m: Monoid[A]): A = m.unit + ... + def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) +``` +This is all accidental complexity which can be avoided by the following proposal. + +**Proposal:** Allow to name a context bound, like this: +```scala + def reduce[A : Monoid as m](xs: List[A]): A = + xs.foldLeft(m.unit)(_ `combine` _) +``` + +We use `as x` after the type to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. + +**Benefits:** The new syntax is simple and clear. +It avoids the awkward choice between concise context bounds that can't be named and verbose using clauses that can. + +### New Syntax for Aggregate Context Bounds + +Aggregate context bounds like `A : X : Y` are not obvious to read, and it becomes worse when we add names, e.g. `A : X as x : Y as y`. + +**Proposal:** Allow to combine several context bounds inside `{...}`, analogous +to import clauses. Example: + +```scala + trait: + def showMax[X : {Ordering, Show}](x: X, y: X): String + class B extends A: + def showMax[X : {Ordering as ordering, Show as show}](x: X, y: X): String = + show.asString(ordering.max(x, y)) +``` + +The old syntax with multiple `:` should be phased out over time. + +**Benefits:** The new syntax is much clearer than the old one, in particular for newcomers that don't know context bounds well. + +### Better Default Names for Context Bounds + +So far, an unnamed context bound for a type parameter gets a synthesized fresh name. It would be much more useful if it got the name of the constrained type parameter instead, translated to be a term name. This means our `reduce` method over monoids would not even need an `as` binding. We could simply formulate it as follows: +``` + def reduce[A : Monoid](xs: List[A]) = + xs.foldLeft(A.unit)(_ `combine` _) +``` + +The use of a name like `A` above in two variants, both as a type name and as a term name is of course familiar to Scala programmers. We use the same convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. + +**Proposed Rules** + + 1. The generated evidence parameter for a context bound `A : C as a` has name `a` + 2. The generated evidence for a context bound `A : C` without an `as` binding has name `A` (seen as a term name). So, `A : C` is equivalent to `A : C as A`. + 3. If there are multiple context bounds for a type parameter, as in `A : {C_1, ..., C_n}`, the generated evidence parameter for every context bound `C_i` has a fresh synthesized name, unless the context bound carries an `as` clause, in which case rule (1) applies. + +The default naming convention reduces the need for named context bounds. But named context bounds are still essential, for at least two reasons: + + - They are needed to give names to multiple context bounds. + - They give an explanation what a single unnamed context bound expands to. + + +### Expansion of Context Bounds + +Context bounds are currently translated to implicit parameters in the last parameter list of a method or class. This is a problem if a context bound is mentioned in one of the preceding parameter types. For example, consider a type class of parsers with associated type members `Input` and `Result` describing the input type on which the parsers operate and the type of results they produce: +```scala +trait Parser[P]: + type Input + type Result +``` +Here is a method `run` that runs a parser on an input of the required type: + +```scala +def run[P : Parser](in: P.Input): P.Result +``` +Or, making clearer what happens by using an explicit name for the context bound: +```scala +def run[P : Parser as p](in: p.Input): p.Result +``` +With the current translation this does not work since it would be expanded to: +```scala + def run[P](x: p.Input)(using p: Parser[P]): p.Result +``` +Note that the `p` in `p.Input` refers to the `p` introduced in the using clause, which comes later. So this is ill-formed. + +This problem would be fixed by changing the translation of context bounds so that they expand to using clauses immediately after the type parameter. But such a change is infeasible, for two reasons: + + 1. It would be a binary-incompatible change. + 2. Putting using clauses earlier can impair type inference. A type in + a using clause can be constrained by term arguments coming before that + clause. Moving the using clause first would miss those constraints, which could cause ambiguities in implicit search. + +But there is an alternative which is feasible: + +**Proposal:** Map the context bounds of a method or class as follows: + + 1. If one of the bounds is referred to by its term name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. + 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. + 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. + +Rules (2) and (3) are the status quo, and match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility is maintained. + +**Discussion** More refined rules could be envisaged where context bounds are spread over different using clauses so that each comes as late as possible. But it would make matters more complicated and the gain in expressiveness is not clear to me. + +Named (either explicitly, or by default) context bounds in givens that produce classes are mapped to tracked val's of these classes (see #18958). This allows +references to these parameters to be precise, so that information about dependent type members is preserved. + + +## Context Bounds for Type Members + +It's not very orthogonal to allow subtype bounds for both type parameters and abstract type members, but context bounds only for type parameters. What's more, we don't even have the fallback of an explicit using clause for type members. The only alternative is to also introduce a set of abstract givens that get implemented in each subclass. This is extremely heavyweight and opaque to newcomers. + +**Proposal**: Allow context bounds for type members. Example: + +```scala + class Collection: + type Element : Ord +``` + +The question is how these bounds are expanded. Context bounds on type parameters +are expanded into using clauses. But for type members this does not work, since we cannot refer to a member type of a class in a parameter type of that class. What we are after is an equivalent of using parameter clauses but represented as class members. + +**Proposal:** Introduce a new way to implement a given definition in a trait like this: +```scala +given T = deferred +``` +`deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. + +Deferred givens allow a clean implementation of context bounds in traits, +as in the following example: +```scala +trait Sorted: + type Element : Ord + +class SortedSet[A : Ord] extends Sorted: + type Element = A +``` +The compiler expands this to the following implementation: +```scala +trait Sorted: + type Element + given Ord[Element] = compiletime.deferred + +class SortedSet[A](using A: Ord[A]) extends Sorted: + type Element = A + override given Ord[Element] = A // i.e. the A defined by the using clause +``` + +The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. + +**Benefits:** + + - Better orthogonality, type parameters and abstract type members now accept the same kinds of bounds. + - Better ergonomics, since deferred givens get naturally implemented in inheriting classes, no need for boilerplate to fill in definitions of abstract givens. + +**Alternative:** It was suggested that we use a modifier for a deferred given instead of a `= deferred`. Something like `deferred given C[T]`. But a modifier does not suggest the concept that a deferred given will be implemented automatically in subclasses unless an explicit definition is written. In a sense, we can see `= deferred` as the invocation of a magic macro that is provided by the compiler. So from a user's point of view a given with `deferred` right hand side is not abstract. +It is a concrete definition where the compiler will provide the correct implementation. + +## New Given Syntax + +A good language syntax is like a Bach fugue: A small set of motifs is combined in a multitude of harmonic ways. Dissonances and irregularities should be avoided. + +When designing Scala 3, I believe that, by and large, we achieved that goal, except in one area, which is the syntax of givens. There _are_ some glaring dissonances, as seen in this code for defining an ordering on lists: +```scala +given [A](using Ord[A]): Ord[List[A]] with + def compare(x: List[A], y: List[A]) = ... +``` +The `:` feels utterly foreign in this position. It's definitely not a type ascription, so what is its role? Just as bad is the trailing `with`. Everywhere else we use braces or trailing `:` to start a scope of nested definitions, so the need of `with` sticks out like a sore thumb. + +We arrived at that syntax not because of a flight of fancy but because even after trying for about a year to find other solutions it seemed like the least bad alternative. The awkwardness of the given syntax arose because we insisted that givens could be named or anonymous, with the default on anonymous, that we would not use underscore for an anonymous given, and that the name, if present, had to come first, and have the form `name [parameters] :`. In retrospect, that last requirement showed a lack of creativity on our part. + +Sometimes unconventional syntax grows on you and becomes natural after a while. But here it was unfortunately the opposite. The longer I used given definitions in this style the more awkward they felt, in particular since the rest of the language seemed so much better put together by comparison. And I believe many others agree with me on this. Since the current syntax is unnatural and esoteric, this means it's difficult to discover and very foreign even after that. This makes it much harder to learn and apply givens than it need be. + +Things become much simpler if we introduce the optional name instead with an `as name` clause at the end, just like we did for context bounds. We can then use a more intuitive syntax for givens like this: +```scala +given String is Ord: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] is Ord: + def compare(x: List[A], y: List[A]) = ... + +given Int is Monoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` +If explicit names are desired, we add them with `as` clauses: +```scala +given String is Ord as intOrd: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] is Ord as listOrd: + def compare(x: List[A], y: List[A]) = ... + +given Int is Monoid as intMonoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` + +The underlying principles are: + + - A `given` clause consists of the following elements: + + - An optional _precondition_, which introduces type parameters and/or using clauses and which ends in `=>`, + - the implemented _type_, + - an optional name binding using `as`, + - an implementation which consists of either an `=` and an expression, + or a template body. + + - Since there is no longer a middle `:` separating name and parameters from the implemented type, we can use a `:` to start the class body without looking unnatural, as is done everywhere else. That eliminates the special case where `with` was used before. + +This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s +in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. + +Changing something introduced just recently in Scala 3 is not fun, +but I believe these adjustments are preferable to let bad syntax +sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code +starts migrating. + +Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. + + +### Abolish Abstract Givens + +Another simplification is possible. So far we have special syntax for abstract givens: +```scala +given x: T +``` +The problem is that this syntax clashes with the quite common case where we want to establish a given without any nested definitions. For instance +consider a given that constructs a type tag: +```scala +class Tag[T] +``` +Then this works: +```scala +given Tag[String]() +given Tag[String] with {} +``` +But the following more natural syntax fails: +```scala +given Tag[String] +``` +The last line gives a rather cryptic error: +``` +1 |given Tag[String] + | ^ + | anonymous given cannot be abstract +``` +The problem is that the compiler thinks that the last given is intended to be abstract, and complains since abstract givens need to be named. This is another annoying dissonance. Nowhere else in Scala's syntax does adding a +`()` argument to a class cause a drastic change in meaning. And it's also a violation of the principle that it should be possible to define all givens without providing names for them. + +Fortunately, abstract givens are no longer necessary since they are superseded by the new `deferred` scheme. So we can deprecate that syntax over time. Abstract givens are a highly specialized mechanism with a so far non-obvious syntax. We have seen that this syntax clashes with reasonable expectations of Scala programmers. My estimate is that maybe a dozen people world-wide have used abstract givens in anger so far. + +**Proposal** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. + +This is less of a disruption than it might appear at first: + + - `given T` was illegal before since abstract givens could not be anonymous. + It now means a concrete given of class `T` with no member definitions. + - `given x: T` is legacy syntax for an abstract given. + - `given T as x = deferred` is the analogous new syntax, which is more powerful since + it allows for automatic instantiation. + - `given T = deferred` is the anonymous version in the new syntax, which was not expressible before. + +**Benefits:** + + - Simplification of the language since a feature is dropped + - Eliminate non-obvious and misleading syntax. + +## Summary of Syntax Changes + +Here is the complete context-free syntax for all proposed features. +Overall the syntax for givens becomes a lot simpler than what it was before. + +``` +TmplDef ::= 'given' GivenDef +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody +GivenType ::= AnnotType {id [nl] AnnotType} + +TypeDef ::= id [TypeParamClause] TypeAndCtxBounds +TypeParamBounds ::= TypeAndCtxBounds +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] +``` + + + +## Examples + + +### Example 1 + +Here are some standard type classes, which were mostly already introduced at the start of this note, now with associated instance givens and some test code: + +```scala + // Type classes + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] // Here, Self is a type constructor with parameter A + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + // Instances + + given Int is Ord: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] is Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = + (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List is Monad: + extension [A](xs: List[A]) + def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] is Monad: + extension [A](r: Ctx => A) + def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + // Usages + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T is Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) +``` + + +### Example 2 + +The following contributed code by @LPTK (issue #10929) did _not_ work at first since +references were not tracked correctly. The version below adds explicit tracked parameters which makes the code compile. +```scala +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: tup.map(x.tail)(f) +``` + +Note the quite convoluted syntax, which makes the code hard to understand. Here is the same example in the new type class syntax, which also compiles correctly: +```scala +//> using options -language:experimental.modularity -source future + +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple is TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest is TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: Rest.map(x.tail)(f) +``` +Note in particular the following points: + + - In the original code, it was not clear that `TupleOf` is a type class, + since it contained two type parameters, one of which played the role + of the instance type `Self`. The new version is much clearer: `TupleOf` is + a type class over `Self` with one additional parameter, the common type of all tuple elements. + - The two given definitions are obfuscated in the old code. Their version + in the new code makes it clear what kind of instances they define: + + - `EmptyTuple` is a tuple of `Nothing`. + - if `Rest` is a tuple of `A`, then `A *: Rest` is also a tuple of `A`. + + - There's no need to introduce names for parameter instances in using clauses; the default naming scheme for context bound evidences works fine, and is more concise. + - There's no need to manually declare implicit parameters as `tracked`, + context bounds provide that automatically. + - Everything in the new code feels like idiomatic Scala 3, whereas the original code exhibits the awkward corner case that requires a `with` in + front of given definitions. + +### Example 3 + +Dimi Racordon tried to [define parser combinators](https://users.scala-lang.org/t/create-an-instance-of-a-type-class-with-methods-depending-on-type-members/9613) in Scala that use dependent type members for inputs and results. It was intended as a basic example of type class constraints, but it did not work in current Scala. + +Here is the problem solved with the new syntax. Note how much clearer that syntax is compared to Dimi's original version, which did not work out in the end. + +```scala +/** A parser combinator */ +trait Combinator: + type Self + + type Input + type Result + + extension (self: Self) + /** Parses and returns an element from input `in` */ + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](a: A, b: B) + +given [I, R] => Apply[I, R] is Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] is Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for + x <- self.a.parse(in) + y <- self.b.parse(in) + yield (x, y) +``` +The example is now as expressed as straightforwardly as it should be: + + - `Combinator` is a type class with two associated types, `Input` and `Result`, and a `parse` method. + - `Apply` and `Combine` are two data constructors representing parser combinators. They are declared to be `Combinators` in the two subsequent `given` declarations. + - `Apply`'s parse method applies the `action` function to the input. + - `Combine[A, B]` is a parser combinator provided `A` and `B` are parser combinators + that process the same type of `Input`, which is also the input type of + `Combine[A, B]`. Its `Result` type is a pair of the `Result` types of `A` and `B`. + Results are produced by a simple for-expression. + +Compared to the original example, which required serious contortions, this is now all completely straightforward. + +_Note 1:_ One could also explore improvements, for instance making this purely functional. But that's not the point of the demonstration here, where I wanted +to take the original example and show how it can be made to work with the new constructs, and be expressed more clearly as well. + +_Note 2:_ One could improve the notation even further by adding equality constraints in the style of Swift, which in turn resemble the _sharing constraints_ of SML. A hypothetical syntax applied to the second given would be: +```scala +given [A: Combinator, B: Combinator with A.Input == B.Input] + => Combine[A, B] is Combinator: +``` +This variant is aesthetically pleasing since it makes the equality constraint symmetric. The original version had to use an asymmetric refinement on the second type parameter bound instead. For now, such constraints are neither implemented nor proposed. This is left as a possibility for future work. Note also the analogy with +the work of @mbovel and @Sporarum on refinement types, where similar `with` clauses can appear for term parameters. If that work goes ahead, we could possibly revisit the issue of `with` clauses also for type parameters. + +### Example 4 + +Dimi Racordon tried to [port some core elements](https://github.com/kyouko-taiga/scala-hylolib) of the type class based [Hylo standard library to Scala](https://github.com/hylo-lang/hylo/tree/main/StandardLibrary/Sources). It worked to some degree, but there were some things that could not be expressed, and more things that could be expressed only awkwardly. + +With the improvements proposed here, the library can now be expressed quite clearly and straightforwardly. See tests/pos/hylolib in this PR for details. + +## Suggested Improvements unrelated to Type Classes + +The following improvements elsewhere would make sense alongside the suggested changes to type classes. But they are currently not part of this proposal or implementation. + +### Fixing Singleton + +We know the current treatment of `Singleton` as a type bound is broken since +`x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. + +A better approach is to treat `Singleton` as a type class that is interpreted specially by the compiler. + +We can do this in a backwards-compatible way by defining `Singleton` like this: + +```scala +trait Singleton: + type Self +``` + +Then, instead of using an unsound upper bound we can use a context bound: + +```scala +def f[X: Singleton](x: X) = ... +``` + +The context bound would be treated specially by the compiler so that no using clause is generated at runtime. + +_Aside_: This can also lead to a solution how to express precise type variables. We can introduce another special type class `Precise` and use it like this: + +```scala +def f[X: Precise](x: X) = ... +``` +This would disable automatic widening of singleton types in inferred instances of type variable `X`. + +### Using `as` also in Patterns + +Since we have now more precedents of `as` as a postfix binder, I want to come back to the proposal to use it in patterns as well, in favor of `@`, which should be deprecated. + +Examples: + +```scala + xs match + case (Person(name, age) as p) :: rest => ... + + tp match + case Param(tl, _) :: _ as tparams => ... + + val x :: xs1 as xs = ys.checkedCast +``` + +These would replace the previous syntax using `@`: + +```scala + xs match + case p @ Person(name, age) :: rest => ... + + tp match + case tparams @ (Param(tl, _) :: _) => ... + + val xs @ (x :: xs1) = ys.checkedCast +``` +**Advantages:** No unpronounceable and non-standard symbol like `@`. More regularity. + +Generally, we want to use `as name` to attach a name for some entity that could also have been used stand-alone. + +**Proposed Syntax Change** + +``` +Pattern2 ::= InfixPattern ['as' id] +``` + +## Summary + +I have proposed some tweaks to Scala 3, which would greatly increase its usability for modular, type class based, generic programming. The proposed changes are: + + 1. Allow context bounds over classes that define a `Self` member type. + 1. Allow context bounds to be named with `as`. Use the bound parameter name as a default name for the generated context bound evidence. + 1. Add a new `{...}` syntax for multiple context bounds. + 1. Make context bounds also available for type members, which expand into a new form of deferred given. Phase out the previous abstract givens in favor of the new form. + 1. Add a predefined type alias `is`. + 1. Introduce a new cleaner syntax of given clauses. + +It's interesting that givens, which are a very general concept in Scala, were "almost there" when it comes to full support of concepts and generic programming. We only needed to add a few usability tweaks to context bounds, +alongside two syntactic changes that supersede the previous forms of `given .. with` clauses and abstract givens. Also interesting is that the superseded syntax constructs were the two areas where we collectively felt that the previous solutions were a bit awkward, but we could not think of better ones at the time. It's very nice that more satisfactory solutions are now emerging. + +## Conclusion + +Generic programming can be expressed in a number of languages. For instance, with +type classes in Haskell, or with traits in Rust, or with protocols in Swift, or with concepts in C++. Each of these is constructed from a fairly heavyweight set of new constructs, different from expressions and types. By contrast, equivalent solutions in Scala rely on regular types. Type classes are simply traits that define a `Self` type member. + +The proposed scheme has similar expressiveness to Protocols in Swift or Traits in Rust. Both of these were largely influenced by Jeremy Siek's PdD thesis "[A language for generic programming](https://scholarworks.iu.edu/dspace/handle/2022/7067)", which was first proposed as a way to implement concepts in C++. C++ did not follow Siek's approach, but Swift and Rust did. + +In Siek's thesis and in the formal treatments of Rust and Swift, + type class concepts are explained by mapping them to a lower level language of explicit dictionaries with representations for terms and types. Crucially, that lower level is not expressible without loss of granularity in the source language itself, since type representations are mapped to term dictionaries. By contrast, the current proposal expands type class concepts into other well-typed Scala constructs, which ultimately map into well-typed DOT programs. Type classes are simply a convenient notation for something that can already be expressed in Scala. In that sense, we stay true to the philosophy of a _scalable language_, where a small core can support a large range of advanced use cases. + diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 160698f1f44b..efdab80595a6 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -156,6 +156,7 @@ subsection: - page: reference/experimental/tupled-function.md - page: reference/experimental/named-tuples.md - page: reference/experimental/modularity.md + - page: reference/experimental/typeclasses.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index e9c480919902..a5cd683775f0 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -105,6 +105,7 @@ object language: * - ability to merge exported types in intersections * * @see [[https://dotty.epfl.ch/docs/reference/experimental/modularity]] + * @see [[https://dotty.epfl.ch/docs/reference/experimental/typeclasses]] */ @compileTimeOnly("`modularity` can only be used at compile time in import statements") object modularity From a6f918b8955997d98174f0e5d1e712392596801d Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Apr 2024 20:21:30 +0200 Subject: [PATCH 360/465] Fix Singleton Allow to constrain type variables to be singletons by a context bound [X: Singleton] instead of an unsound supertype [X <: Singleton]. This fixes the soundness hole of singletons. [Cherry-picked f71365250688a6bc886b9900f8535e8babdd94be] --- .../tools/dotc/core/ConstraintHandling.scala | 18 +++----- .../dotty/tools/dotc/core/Definitions.scala | 12 ++--- .../dotty/tools/dotc/core/TypeComparer.scala | 8 ++-- .../src/dotty/tools/dotc/core/TypeOps.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 43 +++++++++++++++--- .../src/dotty/tools/dotc/typer/Namer.scala | 2 +- .../dotty/tools/dotc/typer/ProtoTypes.scala | 37 ++++++++++++---- .../dotty/tools/dotc/typer/Synthesizer.scala | 13 +++++- .../src/dotty/tools/dotc/typer/Typer.scala | 4 +- .../reference/experimental/typeclasses.md | 15 +++++-- .../scala/runtime/stdLibPatches/Predef.scala | 2 +- tests/neg/singleton-ctx-bound.scala | 20 +++++++++ tests/pos/singleton-ctx-bound.scala | 44 +++++++++++++++++++ 13 files changed, 175 insertions(+), 45 deletions(-) create mode 100644 tests/neg/singleton-ctx-bound.scala create mode 100644 tests/pos/singleton-ctx-bound.scala diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 109929f0c6f5..06711ec97abf 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -647,9 +647,9 @@ trait ConstraintHandling { * At this point we also drop the @Repeated annotation to avoid inferring type arguments with it, * as those could leak the annotation to users (see run/inferred-repeated-result). */ - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = + def widenInferred(inst: Type, bound: Type, widen: Widen)(using Context): Type = def widenOr(tp: Type) = - if widenUnions then + if widen == Widen.Unions then val tpw = tp.widenUnion if tpw ne tp then if tpw.isTransparent() then @@ -667,14 +667,10 @@ trait ConstraintHandling { val tpw = tp.widenSingletons(skipSoftUnions) if (tpw ne tp) && (tpw <:< bound) then tpw else tp - def isSingleton(tp: Type): Boolean = tp match - case WildcardType(optBounds) => optBounds.exists && isSingleton(optBounds.bounds.hi) - case _ => isSubTypeWhenFrozen(tp, defn.SingletonType) - val wideInst = - if isSingleton(bound) then inst + if widen == Widen.None || bound.isSingletonBounded(frozen = true) then inst else - val widenedFromSingle = widenSingle(inst, skipSoftUnions = widenUnions) + val widenedFromSingle = widenSingle(inst, skipSoftUnions = widen == Widen.Unions) val widenedFromUnion = widenOr(widenedFromSingle) val widened = dropTransparentTraits(widenedFromUnion, bound) widenIrreducible(widened) @@ -713,10 +709,10 @@ trait ConstraintHandling { * The instance type is not allowed to contain references to types nested deeper * than `maxLevel`. */ - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { + def instanceType(param: TypeParamRef, fromBelow: Boolean, widen: Widen, maxLevel: Int)(using Context): Type = { val approx = approximation(param, fromBelow, maxLevel).simplified if fromBelow then - val widened = widenInferred(approx, param, widenUnions) + val widened = widenInferred(approx, param, widen) // Widening can add extra constraints, in particular the widened type might // be a type variable which is now instantiated to `param`, and therefore // cannot be used as an instantiation of `param` without creating a loop. @@ -724,7 +720,7 @@ trait ConstraintHandling { // (we do not check for non-toplevel occurrences: those should never occur // since `addOneBound` disallows recursive lower bounds). if constraint.occursAtToplevel(param, widened) then - instanceType(param, fromBelow, widenUnions, maxLevel) + instanceType(param, fromBelow, widen, maxLevel) else widened else diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index b408883009ab..6d3a4de7b026 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -59,10 +59,10 @@ class Definitions { private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered - private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope): TypeSymbol = scope.enter(newPermanentSymbol(cls, name, flags, TypeBounds.empty)) - private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope): TypeSymbol = enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope) private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") = @@ -538,9 +538,11 @@ class Definitions { @tu lazy val SingletonClass: ClassSymbol = // needed as a synthetic class because Scala 2.x refers to it in classfiles // but does not define it as an explicit class. - enterCompleteClassSymbol( - ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final, - List(AnyType), EmptyScope) + val cls = enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final | Erased, + List(AnyType)) + enterTypeField(cls, tpnme.Self, Deferred, cls.info.decls.openForMutations) + cls @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef @tu lazy val MaybeCapabilityAnnot: ClassSymbol = diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 27dd4b7134a9..c2c502a984c4 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3257,8 +3257,8 @@ object TypeComparer { def subtypeCheckInProgress(using Context): Boolean = comparing(_.subtypeCheckInProgress) - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = - comparing(_.instanceType(param, fromBelow, widenUnions, maxLevel)) + def instanceType(param: TypeParamRef, fromBelow: Boolean, widen: Widen, maxLevel: Int = Int.MaxValue)(using Context): Type = + comparing(_.instanceType(param, fromBelow, widen: Widen, maxLevel)) def approximation(param: TypeParamRef, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = comparing(_.approximation(param, fromBelow, maxLevel)) @@ -3278,8 +3278,8 @@ object TypeComparer { def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = comparing(_.addToConstraint(tl, tvars)) - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = - comparing(_.widenInferred(inst, bound, widenUnions)) + def widenInferred(inst: Type, bound: Type, widen: Widen)(using Context): Type = + comparing(_.widenInferred(inst, bound, widen: Widen)) def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = comparing(_.dropTransparentTraits(tp, bound)) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 8461c0f091fe..1282b77f013e 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -545,7 +545,7 @@ object TypeOps: val lo = TypeComparer.instanceType( tp.origin, fromBelow = variance > 0 || variance == 0 && tp.hasLowerBound, - widenUnions = tp.widenUnions)(using mapCtx) + tp.widenPolicy)(using mapCtx) val lo1 = apply(lo) if (lo1 ne lo) lo1 else tp case _ => diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index ac3aef2a59d2..27931bad0bc3 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -44,8 +44,6 @@ import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe - - object Types extends TypeUtils { @sharable private var nextId = 0 @@ -330,6 +328,21 @@ object Types extends TypeUtils { /** Is this type a (possibly aliased) singleton type? */ def isSingleton(using Context): Boolean = dealias.isInstanceOf[SingletonType] + /** Is this upper-bounded by a (possibly aliased) singleton type? + * Overridden in TypeVar + */ + def isSingletonBounded(frozen: Boolean)(using Context): Boolean = this.dealias.normalized match + case tp: SingletonType => tp.isStable + case tp: TypeRef => + tp.name == tpnme.Singleton && tp.symbol == defn.SingletonClass + || tp.superType.isSingletonBounded(frozen) + case tp: TypeVar if !tp.isInstantiated => + if frozen then tp frozen_<:< defn.SingletonType else tp <:< defn.SingletonType + case tp: HKTypeLambda => false + case tp: TypeProxy => tp.superType.isSingletonBounded(frozen) + case AndType(tpL, tpR) => tpL.isSingletonBounded(frozen) || tpR.isSingletonBounded(frozen) + case _ => false + /** Is this type of kind `AnyKind`? */ def hasAnyKind(using Context): Boolean = { @tailrec def loop(tp: Type): Boolean = tp match { @@ -4924,7 +4937,11 @@ object Types extends TypeUtils { * @param creatorState the typer state in which the variable was created. * @param initNestingLevel the initial nesting level of the type variable. (c.f. nestingLevel) */ - final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState | Null, val initNestingLevel: Int) extends CachedProxyType with ValueType { + final class TypeVar private( + initOrigin: TypeParamRef, + creatorState: TyperState | Null, + val initNestingLevel: Int, + precise: Boolean) extends CachedProxyType with ValueType { private var currentOrigin = initOrigin def origin: TypeParamRef = currentOrigin @@ -5012,7 +5029,7 @@ object Types extends TypeUtils { } def typeToInstantiateWith(fromBelow: Boolean)(using Context): Type = - TypeComparer.instanceType(origin, fromBelow, widenUnions, nestingLevel) + TypeComparer.instanceType(origin, fromBelow, widenPolicy, nestingLevel) /** Instantiate variable from the constraints over its `origin`. * If `fromBelow` is true, the variable is instantiated to the lub @@ -5029,7 +5046,10 @@ object Types extends TypeUtils { instantiateWith(tp) /** Widen unions when instantiating this variable in the current context? */ - def widenUnions(using Context): Boolean = !ctx.typerState.constraint.isHard(this) + def widenPolicy(using Context): Widen = + if precise then Widen.None + else if ctx.typerState.constraint.isHard(this) then Widen.Singletons + else Widen.Unions /** For uninstantiated type variables: the entry in the constraint (either bounds or * provisional instance value) @@ -5070,8 +5090,17 @@ object Types extends TypeUtils { } } object TypeVar: - def apply(using Context)(initOrigin: TypeParamRef, creatorState: TyperState | Null, nestingLevel: Int = ctx.nestingLevel) = - new TypeVar(initOrigin, creatorState, nestingLevel) + def apply(using Context)( + initOrigin: TypeParamRef, + creatorState: TyperState | Null, + nestingLevel: Int = ctx.nestingLevel, + precise: Boolean = false) = + new TypeVar(initOrigin, creatorState, nestingLevel, precise) + + enum Widen: + case None // no widening + case Singletons // widen singletons but not unions + case Unions // widen singletons and unions type TypeVars = SimpleIdentitySet[TypeVar] diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 393b38c5ff57..b69d9f76852a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -2087,7 +2087,7 @@ class Namer { typer: Typer => if defaultTp.exists then TypeOps.SimplifyKeepUnchecked() else null) match case ctp: ConstantType if sym.isInlineVal => ctp - case tp => TypeComparer.widenInferred(tp, pt, widenUnions = true) + case tp => TypeComparer.widenInferred(tp, pt, Widen.Unions) // Replace aliases to Unit by Unit itself. If we leave the alias in // it would be erased to BoxedUnit. diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 46c12b244fbb..7afdc836f656 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -701,6 +701,12 @@ object ProtoTypes { case FunProto((arg: untpd.TypedSplice) :: Nil, _) => arg.isExtensionReceiver case _ => false + object SingletonConstrained: + def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match + case RefinedType(parent, tpnme.Self, TypeAlias(tp)) + if parent.typeSymbol == defn.SingletonClass => Some(tp) + case _ => None + /** Add all parameters of given type lambda `tl` to the constraint's domain. * If the constraint contains already some of these parameters in its domain, * make a copy of the type lambda and add the copy's type parameters instead. @@ -713,26 +719,41 @@ object ProtoTypes { tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean, nestingLevel: Int = ctx.nestingLevel - ): (TypeLambda, List[TypeVar]) = { + ): (TypeLambda, List[TypeVar]) = val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty if (tl.isInstanceOf[PolyType]) assert(!ctx.typerState.isCommittable || addTypeVars, s"inconsistent: no typevars were added to committable constraint ${state.constraint}") // hk type lambdas can be added to constraints without typevars during match reduction + val added = state.constraint.ensureFresh(tl) + + def singletonConstrainedRefs(tp: Type): Set[TypeParamRef] = tp match + case tp: MethodType if tp.isContextualMethod => + val ownBounds = + for case SingletonConstrained(ref: TypeParamRef) <- tp.paramInfos + yield ref + ownBounds.toSet ++ singletonConstrainedRefs(tp.resType) + case tp: LambdaType => + singletonConstrainedRefs(tp.resType) + case _ => + Set.empty + + val singletonRefs = singletonConstrainedRefs(added) + def isSingleton(ref: TypeParamRef) = singletonRefs.contains(ref) - def newTypeVars(tl: TypeLambda): List[TypeVar] = - for paramRef <- tl.paramRefs - yield - val tvar = TypeVar(paramRef, state, nestingLevel) + def newTypeVars: List[TypeVar] = + for paramRef <- added.paramRefs yield + val tvar = TypeVar(paramRef, state, nestingLevel, precise = isSingleton(paramRef)) state.ownedVars += tvar tvar - val added = state.constraint.ensureFresh(tl) - val tvars = if addTypeVars then newTypeVars(added) else Nil + val tvars = if addTypeVars then newTypeVars else Nil TypeComparer.addToConstraint(added, tvars) + for paramRef <- added.paramRefs do + if isSingleton(paramRef) then paramRef <:< defn.SingletonType (added, tvars) - } + end constrained def constrained(tl: TypeLambda, owningTree: untpd.Tree)(using Context): (TypeLambda, List[TypeVar]) = constrained(tl, owningTree, diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 21d1151bcfd3..9fb091e3306c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -237,6 +237,16 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): EmptyTreeNoError end synthesizedValueOf + val synthesizedSingleton: SpecialHandler = (formal, span) => formal match + case SingletonConstrained(tp) => + if tp.isSingletonBounded(frozen = false) then + withNoErrors: + ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) + else + withErrors(i"$tp is not a singleton") + case _ => + EmptyTreeNoError + /** Create an anonymous class `new Object { type MirroredMonoType = ... }` * and mark it with given attachment so that it is made into a mirror at PostTyper. */ @@ -536,7 +546,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val tparams = poly.paramRefs val variances = childClass.typeParams.map(_.paramVarianceSign) val instanceTypes = tparams.lazyZip(variances).map((tparam, variance) => - TypeComparer.instanceType(tparam, fromBelow = variance < 0, widenUnions = true) + TypeComparer.instanceType(tparam, fromBelow = variance < 0, Widen.Unions) ) val instanceType = resType.substParams(poly, instanceTypes) // this is broken in tests/run/i13332intersection.scala, @@ -738,6 +748,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): defn.MirrorClass -> synthesizedMirror, defn.ManifestClass -> synthesizedManifest, defn.OptManifestClass -> synthesizedOptManifest, + defn.SingletonClass -> synthesizedSingleton, ) def tryAll(formal: Type, span: Span)(using Context): TreeWithErrors = diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 6ac41ed619b6..d23f77143e14 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3321,8 +3321,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) if ctx.mode.is(Mode.Pattern) then app1 else - val elemTpes = elems.lazyZip(pts).map((elem, pt) => - TypeComparer.widenInferred(elem.tpe, pt, widenUnions = true)) + val elemTpes = elems.lazyZip(pts).map: (elem, pt) => + TypeComparer.widenInferred(elem.tpe, pt, Widen.Unions) val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index 5ac81061e42d..8c95152b8e46 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -7,7 +7,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses # Some Proposed Changes for Better Support of Type Classes -Martin Odersky, 8.1.2024 +Martin Odersky, 8.1.2024, edited 5.4.2024 A type class in Scala is a pattern where we define @@ -27,6 +27,8 @@ under source version `future` if the additional experimental language import `mo scala compile -source:future -language:experimental.modularity ``` +It is intended to turn features described here into proposals under the Scala improvement process. A first installment is SIP 64, which covers some syntactic changes, names for context bounds, multiple context bounds and deferred givens. The order of exposition described in this note is different from the planned proposals of SIPs. This doc is not a guide on how to sequence details, but instead wants to present a vision of what is possible. For instance, we start here with a feature (Self types and `is` syntax) that has turned out to be controversial and that will probably be proposed only late in the sequence of SIPs. + ## Generalizing Context Bounds The only place in Scala's syntax where the type class pattern is relevant is @@ -54,6 +56,8 @@ requires that `Ordering` is a trait or class with a single type parameter (which trait Monoid extends SemiGroup: def unit: Self + object Monoid: + def unit[M](using m: Monoid { type Self = M}): M trait Functor: type Self[A] @@ -129,7 +133,7 @@ We introduce a standard type alias `is` in the Scala package or in `Predef`, def infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } ``` -This makes writing instance definitions quite pleasant. Examples: +This makes writing instance definitions and using clauses quite pleasant. Examples: ```scala given Int is Ord ... @@ -137,6 +141,9 @@ This makes writing instance definitions quite pleasant. Examples: type Reader = [X] =>> Env => X given Reader is Monad ... + + object Monoid: + def unit[M](using m: M is Monoid): M ``` (more examples will follow below) @@ -682,7 +689,7 @@ With the improvements proposed here, the library can now be expressed quite clea ## Suggested Improvements unrelated to Type Classes -The following improvements elsewhere would make sense alongside the suggested changes to type classes. But they are currently not part of this proposal or implementation. +The following two improvements elsewhere would make sense alongside the suggested changes to type classes. But only the first (fixing singleton) forms a part of this proposal and is implemented. ### Fixing Singleton @@ -704,7 +711,7 @@ Then, instead of using an unsound upper bound we can use a context bound: def f[X: Singleton](x: X) = ... ``` -The context bound would be treated specially by the compiler so that no using clause is generated at runtime. +The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). _Aside_: This can also lead to a solution how to express precise type variables. We can introduce another special type class `Precise` and use it like this: diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index a68a628623bf..6c286f322ba7 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -77,6 +77,6 @@ object Predef: * * which is what is needed for a context bound `[A: TC]`. */ - infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } + infix type is[A <: AnyKind, B <: Any{type Self <: AnyKind}] = B { type Self = A } end Predef diff --git a/tests/neg/singleton-ctx-bound.scala b/tests/neg/singleton-ctx-bound.scala new file mode 100644 index 000000000000..64bb63a288b0 --- /dev/null +++ b/tests/neg/singleton-ctx-bound.scala @@ -0,0 +1,20 @@ +//> using options -language:experimental.modularity -source future +object Test: + + def someInt = 1 + + def f1[T <: Singleton](x: T): T = x + f1(someInt) // error + f1(if ??? then 1 else 2) // OK, but should be error + f1(3 * 2) // OK + + def f2[T](x: T)(using T is Singleton): T = x + f2(someInt) // error + f2(if ??? then 1 else 2) // error + f2(3 * 2) // OK + + def f3[T: Singleton](x: T): T = x + f3(someInt) // error + f3(if ??? then 1 else 2) // error + f3(3 * 2) // OK + f3(6) // OK diff --git a/tests/pos/singleton-ctx-bound.scala b/tests/pos/singleton-ctx-bound.scala new file mode 100644 index 000000000000..5d15cf53836e --- /dev/null +++ b/tests/pos/singleton-ctx-bound.scala @@ -0,0 +1,44 @@ +//> using options -language:experimental.modularity -source future +object Test: + + class Wrap[T](x: T) + + def f0[T](x: T): Wrap[T] = Wrap(x) + val x0 = f0(1) + val _: Wrap[Int] = x0 + + def f1[T <: Singleton](x: T): Wrap[T] = Wrap(x) + val x1 = f1(1) + val _: Wrap[1] = x1 + + def f2[T](x: T)(using Singleton { type Self = T}): Wrap[T] = Wrap(x) + val x2 = f2(1) + val _: Wrap[1] = x2 + + def f3[T: Singleton](x: T): Wrap[T] = Wrap(x) + val x3 = f3(1) + val _: Wrap[1] = x3 + + def f4[T](x: T)(using T is Singleton): Wrap[T] = Wrap(x) + val x4 = f4(1) + val _: Wrap[1] = x4 + + class C0[T](x: T): + def fld: T = x + val y0 = C0("hi") + val _: String = y0.fld + + class C1[T <: Singleton](x: T): + def fld: T = x + val y1 = C1("hi") + val _: "hi" = y1.fld + + class C2[T](x: T)(using T is Singleton): + def fld: T = x + val y2 = C2("hi") + val _: "hi" = y1.fld + + class C3[T: Singleton](x: T): + def fld: T = x + val y3 = C3("hi") + val _: "hi" = y1.fld \ No newline at end of file From 0c941e21bc34a27d418b9050630f13ba27ec1c62 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 6 Apr 2024 15:13:07 +0200 Subject: [PATCH 361/465] Tweaks to doc pages [Cherry-picked 1f2e735565a7cb95b8b4ea3f71d330511da1f516] --- docs/_docs/reference/experimental/modularity.md | 2 +- docs/_docs/reference/experimental/typeclasses.md | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/_docs/reference/experimental/modularity.md b/docs/_docs/reference/experimental/modularity.md index 2062c4d5eda2..a989b71770af 100644 --- a/docs/_docs/reference/experimental/modularity.md +++ b/docs/_docs/reference/experimental/modularity.md @@ -138,7 +138,7 @@ when typechecking recursive class graphs. So an explicit `tracked` looks like th Since `tracked` parameters create refinements in constructor types, it is now possible that a class has a parent that is a refined type. -Previously such types were not permitted, since we were not quite sure how to handle them. But with tracked parameters it becomes pressing so +Previously such types were not permitted, since we were not quite sure how to handle them. But with tracked parameters it becomes pressing to admit such types. **Proposal** Allow refined types as parent types of classes. All refinements that are inherited in this way become synthetic members of the class. diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index 8c95152b8e46..dab612512579 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -220,7 +220,7 @@ So far, an unnamed context bound for a type parameter gets a synthesized fresh n xs.foldLeft(A.unit)(_ `combine` _) ``` -The use of a name like `A` above in two variants, both as a type name and as a term name is of course familiar to Scala programmers. We use the same convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. +In Scala we are already familiar with using one name for two related things where one version names a type and the other an associated value. For instance, we use that convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. **Proposed Rules** @@ -228,6 +228,8 @@ The use of a name like `A` above in two variants, both as a type name and as a t 2. The generated evidence for a context bound `A : C` without an `as` binding has name `A` (seen as a term name). So, `A : C` is equivalent to `A : C as A`. 3. If there are multiple context bounds for a type parameter, as in `A : {C_1, ..., C_n}`, the generated evidence parameter for every context bound `C_i` has a fresh synthesized name, unless the context bound carries an `as` clause, in which case rule (1) applies. +TODO: Present context bound proxy concept. + The default naming convention reduces the need for named context bounds. But named context bounds are still essential, for at least two reasons: - They are needed to give names to multiple context bounds. @@ -357,6 +359,8 @@ given Int is Monoid: extension (x: Int) def combine(y: Int) = x + y def unit = 0 ``` +Here, the second given can be read as if `A` is an `Ord` then `List[A]` is also an`Ord`. Or: for all `A: Ord`, `List[A]` is `Ord`. The arrow can be seen as an implication, note also the analogy to pattern matching syntax. + If explicit names are desired, we add them with `as` clauses: ```scala given String is Ord as intOrd: @@ -558,6 +562,7 @@ Here are some standard type classes, which were mostly already introduced at the def minimum[T: Ord](xs: List[T]) = maximum(xs)(using descending) ``` +The `Reader` type is a bit hairy. It is a type class (written in the parameterized syntax) where we fix a context `Ctx` and then let `Reader` be the polymorphic function type over `X` that takes a context `Ctx` and returns an `X`. Type classes like this are commonly used in monadic effect systems. ### Example 2 From 09a6a26a818b9503a989ec33aabf1999021d300a Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 6 Apr 2024 15:13:46 +0200 Subject: [PATCH 362/465] Add Precise type class for precise type inference [Cherry-picked 94bc6fee3aa23e0d00fb5a044b3f99ea13a3cc37] --- .../dotty/tools/dotc/core/Definitions.scala | 2 + .../src/dotty/tools/dotc/core/Types.scala | 14 +++- .../dotty/tools/dotc/typer/ProtoTypes.scala | 71 +++++++++++++------ .../dotty/tools/dotc/typer/Synthesizer.scala | 10 ++- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- .../dotty/tools/repl/TabcompleteTests.scala | 4 +- .../reference/experimental/typeclasses.md | 65 +++++++++-------- library/src/scala/Precise.scala | 11 +++ tests/neg/singleton-ctx-bound.check | 34 +++++++++ tests/neg/singleton-ctx-bound.scala | 15 ++++ tests/pos/deferred-givens-singletons.scala | 13 ++++ tests/pos/precise-ctx-bound.scala | 51 +++++++++++++ tests/pos/precise-indexof.scala | 46 ++++++++++++ tests/pos/singleton-ctx-bound.scala | 7 +- .../stdlibExperimentalDefinitions.scala | 3 + 15 files changed, 287 insertions(+), 61 deletions(-) create mode 100644 library/src/scala/Precise.scala create mode 100644 tests/neg/singleton-ctx-bound.check create mode 100644 tests/pos/deferred-givens-singletons.scala create mode 100644 tests/pos/precise-ctx-bound.scala create mode 100644 tests/pos/precise-indexof.scala diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 6d3a4de7b026..11a4a8473e79 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -535,6 +535,8 @@ class Definitions { def ConsType: TypeRef = ConsClass.typeRef @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") + @tu lazy val PreciseClass: ClassSymbol = requiredClass("scala.Precise") + @tu lazy val SingletonClass: ClassSymbol = // needed as a synthetic class because Scala 2.x refers to it in classfiles // but does not define it as an explicit class. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 27931bad0bc3..3c6d9ecbf204 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4941,7 +4941,7 @@ object Types extends TypeUtils { initOrigin: TypeParamRef, creatorState: TyperState | Null, val initNestingLevel: Int, - precise: Boolean) extends CachedProxyType with ValueType { + val precise: Boolean) extends CachedProxyType with ValueType { private var currentOrigin = initOrigin def origin: TypeParamRef = currentOrigin @@ -5045,9 +5045,19 @@ object Types extends TypeUtils { else instantiateWith(tp) + def isPrecise(using Context) = + precise + || { + val constr = ctx.typerState.constraint + constr.upper(origin).exists: tparam => + constr.typeVarOfParam(tparam) match + case tvar: TypeVar => tvar.precise + case _ => false + } + /** Widen unions when instantiating this variable in the current context? */ def widenPolicy(using Context): Widen = - if precise then Widen.None + if isPrecise then Widen.None else if ctx.typerState.constraint.isHard(this) then Widen.Singletons else Widen.Unions diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 7afdc836f656..bb1d5ac71269 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -11,6 +11,7 @@ import Constants.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet} import Decorators.* import Uniques.* +import Flags.Method import inlines.Inlines import config.Printers.typr import Inferencing.* @@ -26,7 +27,7 @@ object ProtoTypes { import tpd.* /** A trait defining an `isCompatible` method. */ - trait Compatibility { + trait Compatibility: /** Is there an implicit conversion from `tp` to `pt`? */ def viewExists(tp: Type, pt: Type)(using Context): Boolean @@ -106,19 +107,34 @@ object ProtoTypes { if !res then ctx.typerState.constraint = savedConstraint res - /** Constrain result with special case if `meth` is an inlineable method in an inlineable context. - * In that case, we should always succeed and not constrain type parameters in the expected type, - * because the actual return type can be a subtype of the currently known return type. - * However, we should constrain parameters of the declared return type. This distinction is - * achieved by replacing expected type parameters with wildcards. + /** Constrain result with two special cases: + * 1. If `meth` is an inlineable method in an inlineable context, + * we should always succeed and not constrain type parameters in the expected type, + * because the actual return type can be a subtype of the currently known return type. + * However, we should constrain parameters of the declared return type. This distinction is + * achieved by replacing expected type parameters with wildcards. + * 2. When constraining the result of a primitive value operation against + * a precise typevar, don't lower-bound the typevar with a non-singleton type. */ def constrainResult(meth: Symbol, mt: Type, pt: Type)(using Context): Boolean = - if (Inlines.isInlineable(meth)) { + + def constFoldException(pt: Type): Boolean = pt.dealias match + case tvar: TypeVar => + tvar.isPrecise + && meth.is(Method) && meth.owner.isPrimitiveValueClass + && mt.resultType.isPrimitiveValueType && !mt.resultType.isSingleton + case tparam: TypeParamRef => + constFoldException(ctx.typerState.constraint.typeVarOfParam(tparam)) + case _ => + false + + if Inlines.isInlineable(meth) then constrainResult(mt, wildApprox(pt)) true - } - else constrainResult(mt, pt) - } + else + constFoldException(pt) || constrainResult(mt, pt) + end constrainResult + end Compatibility object NoViewsAllowed extends Compatibility { override def viewExists(tp: Type, pt: Type)(using Context): Boolean = false @@ -701,10 +717,18 @@ object ProtoTypes { case FunProto((arg: untpd.TypedSplice) :: Nil, _) => arg.isExtensionReceiver case _ => false - object SingletonConstrained: - def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match - case RefinedType(parent, tpnme.Self, TypeAlias(tp)) - if parent.typeSymbol == defn.SingletonClass => Some(tp) + /** An extractor for Singleton and Precise witness types. + * + * Singleton { type Self = T } returns Some(T, true) + * Precise { type Self = T } returns Some(T, false) + */ + object PreciseConstrained: + def unapply(tp: Type)(using Context): Option[(Type, Boolean)] = tp.dealias match + case RefinedType(parent, tpnme.Self, TypeAlias(tp)) => + val tsym = parent.typeSymbol + if tsym == defn.SingletonClass then Some((tp, true)) + else if tsym == defn.PreciseClass then Some((tp, false)) + else None case _ => None /** Add all parameters of given type lambda `tl` to the constraint's domain. @@ -728,30 +752,31 @@ object ProtoTypes { // hk type lambdas can be added to constraints without typevars during match reduction val added = state.constraint.ensureFresh(tl) - def singletonConstrainedRefs(tp: Type): Set[TypeParamRef] = tp match + def preciseConstrainedRefs(tp: Type, singletonOnly: Boolean): Set[TypeParamRef] = tp match case tp: MethodType if tp.isContextualMethod => val ownBounds = - for case SingletonConstrained(ref: TypeParamRef) <- tp.paramInfos + for + case PreciseConstrained(ref: TypeParamRef, singleton) <- tp.paramInfos + if !singletonOnly || singleton yield ref - ownBounds.toSet ++ singletonConstrainedRefs(tp.resType) + ownBounds.toSet ++ preciseConstrainedRefs(tp.resType, singletonOnly) case tp: LambdaType => - singletonConstrainedRefs(tp.resType) + preciseConstrainedRefs(tp.resType, singletonOnly) case _ => Set.empty - val singletonRefs = singletonConstrainedRefs(added) - def isSingleton(ref: TypeParamRef) = singletonRefs.contains(ref) - def newTypeVars: List[TypeVar] = + val preciseRefs = preciseConstrainedRefs(added, singletonOnly = false) for paramRef <- added.paramRefs yield - val tvar = TypeVar(paramRef, state, nestingLevel, precise = isSingleton(paramRef)) + val tvar = TypeVar(paramRef, state, nestingLevel, precise = preciseRefs.contains(paramRef)) state.ownedVars += tvar tvar val tvars = if addTypeVars then newTypeVars else Nil TypeComparer.addToConstraint(added, tvars) + val singletonRefs = preciseConstrainedRefs(added, singletonOnly = true) for paramRef <- added.paramRefs do - if isSingleton(paramRef) then paramRef <:< defn.SingletonType + if singletonRefs.contains(paramRef) then paramRef <:< defn.SingletonType (added, tvars) end constrained diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 9fb091e3306c..6b18540b6551 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -238,7 +238,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): end synthesizedValueOf val synthesizedSingleton: SpecialHandler = (formal, span) => formal match - case SingletonConstrained(tp) => + case PreciseConstrained(tp, true) => if tp.isSingletonBounded(frozen = false) then withNoErrors: ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) @@ -247,6 +247,13 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case _ => EmptyTreeNoError + val synthesizedPrecise: SpecialHandler = (formal, span) => formal match + case PreciseConstrained(tp, false) => + withNoErrors: + ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) + case _ => + EmptyTreeNoError + /** Create an anonymous class `new Object { type MirroredMonoType = ... }` * and mark it with given attachment so that it is made into a mirror at PostTyper. */ @@ -749,6 +756,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): defn.ManifestClass -> synthesizedManifest, defn.OptManifestClass -> synthesizedOptManifest, defn.SingletonClass -> synthesizedSingleton, + defn.PreciseClass -> synthesizedPrecise, ) def tryAll(formal: Type, span: Span)(using Context): TreeWithErrors = diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index d23f77143e14..b1b21bd1eee5 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3027,7 +3027,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Select(id)(This(cls), id.name) case _ => super.transform(tree) - ValDef(impl, anchorParams.transform(rhs)) + ValDef(impl, anchorParams.transform(rhs)).withSpan(impl.span.endPos) end givenImpl val givenImpls = diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index e4c3a2557e7d..f719752be353 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -122,11 +122,11 @@ class TabcompleteTests extends ReplTest { } @Test def moduleCompletion = initially { - assertEquals(List("Predef"), tabComplete("object Foo { type T = Pre")) + assertEquals(List("Predef"), tabComplete("object Foo { type T = Pred")) } @Test def i6415 = initially { - assertEquals(List("Predef"), tabComplete("object Foo { opaque type T = Pre")) + assertEquals(List("Predef"), tabComplete("object Foo { opaque type T = Pred")) } @Test def i6361 = initially { diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index dab612512579..cf5f3220faa6 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -444,6 +444,39 @@ This is less of a disruption than it might appear at first: - Simplification of the language since a feature is dropped - Eliminate non-obvious and misleading syntax. + +### Bonus: Fixing Singleton + +We know the current treatment of `Singleton` as a type bound is broken since +`x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. + +A better approach is to treat `Singleton` as a type class that is interpreted specially by the compiler. + +We can do this in a backwards-compatible way by defining `Singleton` like this: + +```scala +trait Singleton: + type Self +``` + +Then, instead of using an unsound upper bound we can use a context bound: + +```scala +def f[X: Singleton](x: X) = ... +``` + +The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). + +### Bonus: Precise Typing + +This approach also presents a solution to the problem how to express precise type variables. We can introduce another special type class `Precise` and use it like this: + +```scala +def f[X: Precise](x: X) = ... +``` +Like a `Singleton` bound, a `Precise` bound disables automatic widening of singleton types or union types in inferred instances of type variable `X`. But there is no requirement that the type argument _must_ be a singleton. + + ## Summary of Syntax Changes Here is the complete context-free syntax for all proposed features. @@ -692,38 +725,10 @@ Dimi Racordon tried to [port some core elements](https://github.com/kyouko-taiga With the improvements proposed here, the library can now be expressed quite clearly and straightforwardly. See tests/pos/hylolib in this PR for details. -## Suggested Improvements unrelated to Type Classes - -The following two improvements elsewhere would make sense alongside the suggested changes to type classes. But only the first (fixing singleton) forms a part of this proposal and is implemented. - -### Fixing Singleton - -We know the current treatment of `Singleton` as a type bound is broken since -`x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. - -A better approach is to treat `Singleton` as a type class that is interpreted specially by the compiler. +## Suggested Improvement unrelated to Type Classes -We can do this in a backwards-compatible way by defining `Singleton` like this: +The following improvement would make sense alongside the suggested changes to type classes. But it does not form part of this proposal and is not yet implemented. -```scala -trait Singleton: - type Self -``` - -Then, instead of using an unsound upper bound we can use a context bound: - -```scala -def f[X: Singleton](x: X) = ... -``` - -The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). - -_Aside_: This can also lead to a solution how to express precise type variables. We can introduce another special type class `Precise` and use it like this: - -```scala -def f[X: Precise](x: X) = ... -``` -This would disable automatic widening of singleton types in inferred instances of type variable `X`. ### Using `as` also in Patterns diff --git a/library/src/scala/Precise.scala b/library/src/scala/Precise.scala new file mode 100644 index 000000000000..aad42ca8950f --- /dev/null +++ b/library/src/scala/Precise.scala @@ -0,0 +1,11 @@ +package scala +import annotation.experimental +import language.experimental.erasedDefinitions + +/** A type class-like trait intended as a context bound for type variables. + * If we have `[X: Precise]`, instances of the type variable `X` are inferred + * in precise mode. This means that singleton types and union types are not + * widened. + */ +@experimental erased trait Precise: + type Self diff --git a/tests/neg/singleton-ctx-bound.check b/tests/neg/singleton-ctx-bound.check new file mode 100644 index 000000000000..785123c0e680 --- /dev/null +++ b/tests/neg/singleton-ctx-bound.check @@ -0,0 +1,34 @@ +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:7:5 ------------------------------------------------- +7 | f1(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:12:5 ------------------------------------------------ +12 | f2(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:13:26 -------------------------------------------------------- +13 | f2(if ??? then 1 else 2) // error + | ^ + |No given instance of type (1 : Int) | (2 : Int) is Singleton was found for parameter x$2 of method f2 in object Test. Failed to synthesize an instance of type (1 : Int) | (2 : Int) is Singleton: (1 : Int) | (2 : Int) is not a singleton +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:17:5 ------------------------------------------------ +17 | f3(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:18:26 -------------------------------------------------------- +18 | f3(if ??? then 1 else 2) // error + | ^ + |No given instance of type Singleton{type Self = (1 : Int) | (2 : Int)} was found for a context parameter of method f3 in object Test. Failed to synthesize an instance of type Singleton{type Self = (1 : Int) | (2 : Int)}: (1 : Int) | (2 : Int) is not a singleton +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:33:6 --------------------------------------------------------- +33 |class D extends A: // error + |^ + |No given instance of type Singleton{type Self = D.this.Elem} was found for inferring the implementation of the deferred given instance given_Singleton_Elem in trait A. Failed to synthesize an instance of type Singleton{type Self = D.this.Elem}: D.this.Elem is not a singleton +34 | type Elem = Int diff --git a/tests/neg/singleton-ctx-bound.scala b/tests/neg/singleton-ctx-bound.scala index 64bb63a288b0..e061ec54bb16 100644 --- a/tests/neg/singleton-ctx-bound.scala +++ b/tests/neg/singleton-ctx-bound.scala @@ -18,3 +18,18 @@ object Test: f3(if ??? then 1 else 2) // error f3(3 * 2) // OK f3(6) // OK + +import compiletime.* + +trait A: + type Elem: Singleton + +class B extends A: + type Elem = 1 // OK + +class C[X: Singleton] extends A: + type Elem = X // OK + +class D extends A: // error + type Elem = Int + diff --git a/tests/pos/deferred-givens-singletons.scala b/tests/pos/deferred-givens-singletons.scala new file mode 100644 index 000000000000..60a881340b75 --- /dev/null +++ b/tests/pos/deferred-givens-singletons.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* + +trait A: + type Elem: Singleton + +class B extends A: + type Elem = 1 + +class C[X: Singleton] extends A: + type Elem = X + + diff --git a/tests/pos/precise-ctx-bound.scala b/tests/pos/precise-ctx-bound.scala new file mode 100644 index 000000000000..3f17a5b4a54e --- /dev/null +++ b/tests/pos/precise-ctx-bound.scala @@ -0,0 +1,51 @@ +//> using options -language:experimental.modularity -source future +object Test: + + class Wrap[T](x: T) + + def f0[T](x: T): Wrap[T] = Wrap(x) + val x0 = f0(1) + val _: Wrap[Int] = x0 + + def f1[T: Precise](x: T): Wrap[T] = Wrap(x) + def l = "hello".length + val x1 = Wrap(l) + val _: Wrap[Int] = x1 + + def f2[T](x: T)(using Precise { type Self = T}): Wrap[T] = Wrap(x) + val x2 = f2(1) + val _: Wrap[1] = x2 + + def f3[T: Precise](x: T): Wrap[T] = Wrap(x) + val x3 = f3(identity(1)) + val _: Wrap[1] = x3 + val x3a = f3(1 + 2) + val _: Wrap[3] = x3a + + def f4[T](x: T)(using T is Precise): Wrap[T] = Wrap(x) + val x4 = f4(1) + val _: Wrap[1] = x4 + val x4a = f4(1 + 2) + val _: Wrap[3] = x4a + val y4 = f4(if ??? then 1 else 2) + val _: Wrap[1 | 2] = y4 + val z4 = f4(if ??? then B() else C()) + val _: Wrap[B | C] = z4 + trait A + class B extends A + class C extends A + + class C0[T](x: T): + def fld: T = x + val y0 = C0("hi") + val _: String = y0.fld + + class C2[T](x: T)(using T is Precise): + def fld: T = x + val y2 = C2(identity("hi")) + val _: "hi" = y2.fld + + class C3[T: Precise](x: T): + def fld: T = x + val y3 = C3("hi") + val _: "hi" = y3.fld diff --git a/tests/pos/precise-indexof.scala b/tests/pos/precise-indexof.scala new file mode 100644 index 000000000000..af1e6c5b504b --- /dev/null +++ b/tests/pos/precise-indexof.scala @@ -0,0 +1,46 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* +import compiletime.ops.int.* + +/** The index of `Y` in tuple `X` as a literal constant Int, + * or `Size[X]` if `Y` does not occur in `X` + */ +type IndexOf[X <: Tuple, Y] <: Int = X match + case Y *: _ => 0 + case x *: xs => S[IndexOf[xs, Y]] + case EmptyTuple => 0 + +extension [X <: Tuple](inline x: X) + + /** The index (starting at 0) of the first element in the type `X` of `x` + * that matches type `Y`. + */ + inline def indexOfType[Y] = constValue[IndexOf[X, Y]] + + inline def indexOf[Y: Precise](y: Y) = constValue[IndexOf[X, Y]] + +// Note: without the Precise, the index calcularion would go wrong. For instance, +// (1, 2, "hello", true).indexOf(2) would be 0, the same as (1, 2, "hello", true).indexOTypef[Int] +// (1, 2, "hello", true).indexOf("foo") would be 2, the same as (1, 2, "hello", true).indexOTypef[String] +// But we could alternatively pick Singleton + +@main def Test = + val t: (1, 2, "hello", true) = (1, 2, "hello", true) + val x1: 0 = t.indexOfType[1] + val x2: 1 = t.indexOfType[2] + val x3: 2 = t.indexOfType["hello"] + val x4: 3 = t.indexOfType[true] + val x5: 4 = t.indexOfType[77] + val x6: 0 = t.indexOfType[Int] + val x7: 2 = t.indexOfType[String] + val x8: 4 = t.indexOfType[Double] + + val y1: 0 = t.indexOf(1) + val y2: 1 = t.indexOf(2) + val y3: 2 = t.indexOf("hello") + val y4: 3 = t.indexOf(true) + val y5: 4 = t.indexOf(identity(77)) + val y6: 0 = t.indexOf(identity(1)) + val y7: 4 = t.indexOf("foo") + + diff --git a/tests/pos/singleton-ctx-bound.scala b/tests/pos/singleton-ctx-bound.scala index 5d15cf53836e..c6b0d2fb823c 100644 --- a/tests/pos/singleton-ctx-bound.scala +++ b/tests/pos/singleton-ctx-bound.scala @@ -36,9 +36,12 @@ object Test: class C2[T](x: T)(using T is Singleton): def fld: T = x val y2 = C2("hi") - val _: "hi" = y1.fld + val _: "hi" = y2.fld class C3[T: Singleton](x: T): def fld: T = x val y3 = C3("hi") - val _: "hi" = y1.fld \ No newline at end of file + val _: "hi" = y3.fld + + + diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 48ff5407ac87..df35bed19360 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -79,6 +79,9 @@ val experimentalDefinitionInLibrary = Set( "scala.NamedTuple$", "scala.NamedTupleDecomposition", "scala.NamedTupleDecomposition$", + + // New feature: Precise trait + "scala.Precise", ) From e82cfbefac06aae6db231765dfde5219b1a379c3 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 14 Apr 2024 15:26:30 +0200 Subject: [PATCH 363/465] Fix rebase breakage [Cherry-picked 887fbc4b4996d95360e5dd92492d8f3904cde27a] --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- tests/neg/cb-companion-leaks.check | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 63e86e3a321d..c3369ac58e31 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1791,7 +1791,7 @@ trait Applications extends Compatibility { * a. always as good as a method or a polymorphic method. * b. as good as a member of any other type `tp2` if `asGoodValueType(tp1, tp2) = true` */ - def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { + def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsGood $tp1 $tp2", overload) { tp1 match case tp1: MethodType => // (1) tp1.paramInfos.isEmpty && tp2.isInstanceOf[LambdaType] diff --git a/tests/neg/cb-companion-leaks.check b/tests/neg/cb-companion-leaks.check index 156f8a7ab3ee..560561e0e261 100644 --- a/tests/neg/cb-companion-leaks.check +++ b/tests/neg/cb-companion-leaks.check @@ -1,4 +1,4 @@ --- [E194] Type Error: tests/neg/cb-companion-leaks.scala:9:23 ---------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:9:23 ---------------------------------------------------------- 9 | def foo[A: {C, D}] = A // error | ^ | context bound companion value A cannot be used as a value @@ -20,7 +20,7 @@ | companion value with the (term-)name `A`. However, these context bound companions | are not values themselves, they can only be referred to in selections. --------------------------------------------------------------------------------------------------------------------- --- [E194] Type Error: tests/neg/cb-companion-leaks.scala:13:10 --------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:13:10 --------------------------------------------------------- 13 | val x = A // error | ^ | context bound companion value A cannot be used as a value @@ -42,7 +42,7 @@ | companion value with the (term-)name `A`. However, these context bound companions | are not values themselves, they can only be referred to in selections. -------------------------------------------------------------------------------------------------------------------- --- [E194] Type Error: tests/neg/cb-companion-leaks.scala:15:9 ---------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:15:9 ---------------------------------------------------------- 15 | val y: A.type = ??? // error | ^ | context bound companion value A cannot be used as a value From 3b814bbaa402fd09a12914f71d3a0c65b82cc638 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 17 Apr 2024 23:01:54 +0200 Subject: [PATCH 364/465] Delay roll-out of new prioritization scheme: Now: 3.5: old scheme but warn if there are changes in the future 3.6-migration: new scheme, warn if prioritization has changed 3.6: new scheme, no warning [Cherry-picked 1e72282418d93a968b36fa43415f1ea63125d982] --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index c3369ac58e31..fd4c634801be 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1880,7 +1880,7 @@ trait Applications extends Compatibility { val tp1p = prepare(tp1) val tp2p = prepare(tp2) - if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) + if Feature.sourceVersion.isAtMost(SourceVersion.`3.5`) || oldResolution || !alt1isGiven && !alt2isGiven then From a8f7585ba757faaf74854d20129a9111c5489051 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 28 Apr 2024 13:12:43 +0200 Subject: [PATCH 365/465] Fix rebase breakage again [Cherry-picked 9d0ca20f949c4c390f4fa414f3c5ff4460013960] --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- project/MiMaFilters.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index b1b21bd1eee5..a2291d55bac8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2366,7 +2366,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) if tycon.tpe.typeParams.nonEmpty then typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) - else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractType then + else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 6c3640eed12c..18d2e985f844 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -98,7 +98,7 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of tasty core Build.mimaPreviousDottyVersion -> Seq( - ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype") + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype"), ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.TRACKED"), ), From dd8061fd8897870b35093c284af44fed3016eadf Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 28 Apr 2024 14:13:26 +0200 Subject: [PATCH 366/465] Make best effort compilation work with context bound companions If they are illegally used as values, we need to return an error tree, not a tree with a symbol that can't be pickled. [Cherry-picked fd072dc686bf0f0cc789ef0b7385d8189d64e374] --- .../tools/dotc/transform/PostTyper.scala | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index a110ec53abc0..22370e923a4b 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -279,13 +279,15 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } } - def checkUsableAsValue(tree: Tree)(using Context): Unit = + def checkUsableAsValue(tree: Tree)(using Context): Tree = def unusable(msg: Symbol => Message) = - report.error(msg(tree.symbol), tree.srcPos) + errorTree(tree, msg(tree.symbol)) if tree.symbol.is(ConstructorProxy) then unusable(ConstructorProxyNotValue(_)) - if tree.symbol.isContextBoundCompanion then + else if tree.symbol.isContextBoundCompanion then unusable(ContextBoundCompanionNotValue(_)) + else + tree def checkStableSelection(tree: Tree)(using Context): Unit = def check(qual: Tree) = @@ -330,11 +332,11 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if tree.isType then checkNotPackage(tree) else - checkUsableAsValue(tree) registerNeedsInlining(tree) - tree.tpe match { + val tree1 = checkUsableAsValue(tree) + tree1.tpe match { case tpe: ThisType => This(tpe.cls).withSpan(tree.span) - case _ => tree + case _ => tree1 } case tree @ Select(qual, name) => registerNeedsInlining(tree) @@ -342,8 +344,9 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => Checking.checkRealizable(qual.tpe, qual.srcPos) withMode(Mode.Type)(super.transform(checkNotPackage(tree))) else - checkUsableAsValue(tree) - transformSelect(tree, Nil) + checkUsableAsValue(tree) match + case tree1: Select => transformSelect(tree1, Nil) + case tree1 => tree1 case tree: Apply => val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] val app = From 04d402345763d31e9f61cd95cf494a4219061f51 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 30 Apr 2024 11:01:33 +0200 Subject: [PATCH 367/465] Tweaks after review [Cherry-picked 21f5e678e6a58380d47b8f68edf89317402595a9] --- .../src/dotty/tools/dotc/ast/Desugar.scala | 93 +++++++++++-------- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 8 +- compiler/src/dotty/tools/dotc/ast/untpd.scala | 1 + .../src/dotty/tools/dotc/config/Config.scala | 3 +- compiler/src/dotty/tools/dotc/core/Mode.scala | 4 +- .../src/dotty/tools/dotc/core/NamerOps.scala | 8 +- .../tools/dotc/core/SymDenotations.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 11 ++- .../dotty/tools/dotc/parsing/Parsers.scala | 30 ++++-- .../tools/dotc/transform/PostTyper.scala | 6 +- .../src/dotty/tools/dotc/typer/Namer.scala | 18 ++-- .../dotty/tools/dotc/typer/ProtoTypes.scala | 1 + .../dotty/tools/dotc/typer/RefChecks.scala | 8 +- .../src/dotty/tools/dotc/typer/Typer.scala | 64 ++++++++----- .../annotation/internal/WitnessNames.scala | 3 +- library/src/scala/compiletime/package.scala | 3 +- .../scala/runtime/stdLibPatches/Predef.scala | 1 + tests/neg/i12348.check | 12 +-- tests/neg/i12348.scala | 2 +- tests/pos/typeclasses-this.scala | 10 +- .../stdlibExperimentalDefinitions.scala | 5 +- 21 files changed, 175 insertions(+), 118 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 08953f1dec6b..0681492a4ba7 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -226,10 +226,21 @@ object desugar { private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) + /** Drop context bounds in given TypeDef, replacing them with evidence ValDefs that + * get added to a buffer. + * @param tdef The given TypeDef + * @param evidenceBuf The buffer to which evidence gets added. This buffer + * is shared between desugarings of different type parameters + * of the same method. + * @param evidenceFlags The flags to use for evidence definitions + * @param freshName A function to generate fresh names for evidence definitions + * @param allParams If `tdef` is a type paramter, all parameters of the owning method, + * otherwise the empty list. + */ private def desugarContextBounds( tdef: TypeDef, evidenceBuf: mutable.ListBuffer[ValDef], - flags: FlagSet, + evidenceFlags: FlagSet, freshName: untpd.Tree => TermName, allParamss: List[ParamClause])(using Context): TypeDef = @@ -237,18 +248,18 @@ object desugar { def desugarRhs(rhs: Tree): Tree = rhs match case ContextBounds(tbounds, cxbounds) => - val isMember = flags.isAllOf(DeferredGivenFlags) + val isMember = evidenceFlags.isAllOf(DeferredGivenFlags) for bound <- cxbounds do val evidenceName = bound match case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => - ownName + ownName // if there is an explicitly given name, use it. case _ if Config.nameSingleContextBounds && !isMember && cxbounds.tail.isEmpty && Feature.enabled(Feature.modularity) => tdef.name.toTermName case _ => freshName(bound) evidenceNames += evidenceName - val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(flags) + val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(evidenceFlags) evidenceParam.pushAttachment(ContextBoundParam, ()) evidenceBuf += evidenceParam tbounds @@ -258,9 +269,13 @@ object desugar { rhs val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + // Under x.modularity, if there was a context bound, and `tdef`s name as a term name is + // neither a name of an existing parameter nor a name of generated evidence for + // the same method, add a WitnessAnnotation with all generated evidence names to `tdef`. + // This means a context bound proxy will be created later. if Feature.enabled(Feature.modularity) && evidenceNames.nonEmpty - && !evidenceNames.contains(tdef.name.toTermName) + && !evidenceBuf.exists(_.name == tdef.name.toTermName) && !allParamss.nestedExists(_.name == tdef.name.toTermName) then tdef1.withAddedAnnotation: @@ -332,9 +347,9 @@ object desugar { def getterParamss(n: Int): List[ParamClause] = mapParamss(takeUpTo(paramssNoRHS, n)) { - tparam => dropContextBounds(toDefParam(tparam, KeepAnnotations.All)) + tparam => dropContextBounds(toMethParam(tparam, KeepAnnotations.All)) } { - vparam => toDefParam(vparam, KeepAnnotations.All, keepDefault = false) + vparam => toMethParam(vparam, KeepAnnotations.All, keepDefault = false) } def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match @@ -429,32 +444,30 @@ object desugar { * The position of the added parameters is determined as follows: * * - If there is an existing parameter list that refers to one of the added - * parameters in one of its parameter types, add the new parameters - * in front of the first such parameter list. - * - Otherwise, if the last parameter list consists implicit or using parameters, + * parameters or their future context bound proxies in one of its parameter + * types, add the new parameters in front of the first such parameter list. + * - Otherwise, if the last parameter list consists of implicit or using parameters, * join the new parameters in front of this parameter list, creating one - * parameter list (this is equilavent to Scala 2's scheme). + * parameter list (this is equivalent to Scala 2's scheme). * - Otherwise, add the new parameter list at the end as a separate parameter clause. */ private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = if params.isEmpty then return meth - var boundNames = params.map(_.name).toSet + var boundNames = params.map(_.name).toSet // all evidence parameter + context bound proxy names for mparams <- meth.paramss; mparam <- mparams do mparam match case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => boundNames += tparam.name.toTermName case _ => - //println(i"add ev params ${meth.name}, ${boundNames.toList}") - - def references(vdef: ValDef): Boolean = + def referencesBoundName(vdef: ValDef): Boolean = vdef.tpt.existsSubTree: case Ident(name: TermName) => boundNames.contains(name) case _ => false def recur(mparamss: List[ParamClause]): List[ParamClause] = mparamss match - case ValDefs(mparams) :: _ if mparams.exists(references) => + case ValDefs(mparams) :: _ if mparams.exists(referencesBoundName) => params :: mparamss case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => (params ++ mparams) :: Nil @@ -468,12 +481,12 @@ object desugar { /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = - meth.paramss.reverse match { - case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.hasAttachment(ContextBoundParam)) - case _ => - Nil - } + for + case ValDefs(vparams @ (vparam :: _)) <- meth.paramss + if vparam.mods.isOneOf(GivenOrImplicit) + param <- vparams.takeWhile(_.hasAttachment(ContextBoundParam)) + yield + param @sharable private val synthetic = Modifiers(Synthetic) @@ -491,11 +504,13 @@ object desugar { case WitnessNamesAnnot(_) => true case _ => false - private def toDefParam(tparam: TypeDef, keep: KeepAnnotations)(using Context): TypeDef = + /** Map type parameter accessor to corresponding method (i.e. constructor) parameter */ + private def toMethParam(tparam: TypeDef, keep: KeepAnnotations)(using Context): TypeDef = val mods = filterAnnots(tparam.rawMods, keep) tparam.withMods(mods & EmptyFlags | Param) - private def toDefParam(vparam: ValDef, keep: KeepAnnotations, keepDefault: Boolean)(using Context): ValDef = { + /** Map term parameter accessor to corresponding method (i.e. constructor) parameter */ + private def toMethParam(vparam: ValDef, keep: KeepAnnotations, keepDefault: Boolean)(using Context): ValDef = { val mods = filterAnnots(vparam.rawMods, keep) val hasDefault = if keepDefault then HasDefault else EmptyFlags // Need to ensure that tree is duplicated since term parameters can be watched @@ -507,22 +522,16 @@ object desugar { .withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } - def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = - paramss.foldLeft(fn) { (fn, params) => params match - case TypeDefs(params) => - TypeApply(fn, params.map(refOfDef)) - case (vparam: ValDef) :: _ if vparam.mods.is(Given) => - Apply(fn, params.map(refOfDef)).setApplyKind(ApplyKind.Using) - case _ => - Apply(fn, params.map(refOfDef)) - } - + /** Desugar type def (not param): Under x.moduliity this can expand + * context bounds, which are expanded to evidence ValDefs. These will + * ultimately map to deferred givens. + */ def typeDef(tdef: TypeDef)(using Context): Tree = val evidenceBuf = new mutable.ListBuffer[ValDef] val result = desugarContextBounds( tdef, evidenceBuf, (tdef.mods.flags.toTermFlags & AccessFlags) | Lazy | DeferredGivenFlags, - inventGivenOrExtensionName, Nil) + inventGivenName, Nil) if evidenceBuf.isEmpty then result else Thicket(result :: evidenceBuf.toList) /** The expansion of a class definition. See inline comments for what is involved */ @@ -597,7 +606,7 @@ object desugar { // Annotations on class _type_ parameters are set on the derived parameters // but not on the constructor parameters. The reverse is true for // annotations on class _value_ parameters. - val constrTparams = impliedTparams.map(toDefParam(_, KeepAnnotations.WitnessOnly)) + val constrTparams = impliedTparams.map(toMethParam(_, KeepAnnotations.WitnessOnly)) val constrVparamss = if (originalVparamss.isEmpty) { // ensure parameter list is non-empty if (isCaseClass) @@ -608,7 +617,7 @@ object desugar { report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) ListOfNil } - else originalVparamss.nestedMap(toDefParam(_, KeepAnnotations.All, keepDefault = true)) + else originalVparamss.nestedMap(toMethParam(_, KeepAnnotations.All, keepDefault = true)) val derivedTparams = constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) @@ -630,7 +639,7 @@ object desugar { defDef( addEvidenceParams( cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), - evidenceParams(constr1).map(toDefParam(_, KeepAnnotations.None, keepDefault = false))))) + evidenceParams(constr1).map(toMethParam(_, KeepAnnotations.None, keepDefault = false))))) case stat => stat } @@ -1148,7 +1157,7 @@ object desugar { */ def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { var name = mdef.name - if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) + if (name.isEmpty) name = name.likeSpaced(inventGivenName(impl)) def errPos = mdef.source.atSpan(mdef.nameSpan) if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { val kind = if (name.isTypeName) "class" else "object" @@ -1195,7 +1204,7 @@ object desugar { end makePolyFunctionType /** Invent a name for an anonympus given of type or template `impl`. */ - def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = + def inventGivenName(impl: Tree)(using Context): SimpleName = val str = impl match case impl: Template => if impl.parents.isEmpty then @@ -1207,6 +1216,10 @@ object desugar { "given_" ++ inventTypeName(impl) str.toTermName.asSimpleName + /** Extract a synthesized given name from a type tree. This is used for + * both anonymous givens and (under x.modularity) deferred givens. + * @param followArgs If true include argument types in the name + */ private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { private def extractArgs(args: List[Tree])(using Context): String = args.map(argNameExtractor.apply("", _)).mkString("_") diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 990fb37f4e60..11fb572b66c6 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -382,15 +382,15 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => tree.tpe.isInstanceOf[ThisType] } - - /** Extractor for annotation.internal.WitnessNames(name_1, ..., name_n)` + + /** Under x.modularity: Extractor for `annotation.internal.WitnessNames(name_1, ..., name_n)` * represented as an untyped or typed tree. */ object WitnessNamesAnnot: - def apply(names0: List[TermName])(using Context): untpd.Tree = + def apply(names: List[TermName])(using Context): untpd.Tree = untpd.TypedSplice(tpd.New( defn.WitnessNamesAnnot.typeRef, - tpd.SeqLiteral(names0.map(n => tpd.Literal(Constant(n.toString))), tpd.TypeTree(defn.StringType)) :: Nil + tpd.SeqLiteral(names.map(n => tpd.Literal(Constant(n.toString))), tpd.TypeTree(defn.StringType)) :: Nil )) def unapply(tree: Tree)(using Context): Option[List[TermName]] = diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 0486e2e6d3d7..64f9fb4df95e 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -119,6 +119,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree case class ContextBoundTypeTree(tycon: Tree, paramName: TypeName, ownName: TermName)(implicit @constructorOnly src: SourceFile) extends Tree + // `paramName: tycon as ownName`, ownName != EmptyTermName only under x.modularity case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 293044c245ef..ee8ed4b215d7 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -236,7 +236,8 @@ object Config { inline val checkLevelsOnConstraints = false inline val checkLevelsOnInstantiation = true - /** If a type parameter `X` has a single context bound `X: C`, should the + /** Under x.modularity: + * If a type parameter `X` has a single context bound `X: C`, should the * witness parameter be named `X`? This would prevent the creation of a * context bound companion. */ diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 5dab5631c62a..14d7827974c0 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -104,8 +104,8 @@ object Mode { val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") /** Use previous Scheme for implicit resolution. Currently significant - * in 3.0-migration where we use Scala-2's scheme instead and in 3.5-migration - * where we use the previous scheme up to 3.4 instead. + * in 3.0-migration where we use Scala-2's scheme instead and in 3.5 and 3.6-migration + * where we use the previous scheme up to 3.4 for comparison with the new scheme. */ val OldImplicitResolution: Mode = newMode(15, "OldImplicitResolution") diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 58b4ad681c6f..5e76b09bbde6 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -24,9 +24,9 @@ object NamerOps: addParamRefinements(ctor.owner.typeRef, paramss) /** Given a method with tracked term-parameters `p1, ..., pn`, and result type `R`, add the - * refinements R { p1 = p1' } ... { pn = pn' }, where pi' is the term parameter ref + * refinements R { p1 = p1' } ... { pn = pn' }, where pi' is the TermParamRef * of the parameter and pi is its name. This matters only under experimental.modularity, - * since wothout it there are no tracked parameters. Parameter refinements are added for + * since without it there are no tracked parameters. Parameter refinements are added for * constructors and given companion methods. */ def addParamRefinements(resType: Type, paramss: List[List[Symbol]])(using Context): Type = @@ -261,7 +261,7 @@ object NamerOps: /** Create a context-bound companion for type symbol `tsym`, which has a context * bound that defines a set of witnesses with names `witnessNames`. * - * @param parans If `tsym` is a type parameter, a list of parameter symbols + * @param params If `tsym` is a type parameter, a list of parameter symbols * that include all witnesses, otherwise the empty list. * * The context-bound companion has as name the name of `tsym` translated to @@ -299,7 +299,7 @@ object NamerOps: * this class. This assumes that these types already have their * WitnessNames annotation set even before they are completed. This is * the case for unpickling but currently not for Namer. So the method - * is only called during unpickling, and is not part of NamerOps. + * is only called during unpickling. */ def addContextBoundCompanions(cls: ClassSymbol)(using Context): Unit = for sym <- cls.info.decls do diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 49c466f0bfd5..3904228756a0 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1194,7 +1194,7 @@ object SymDenotations { || is(JavaDefinedVal, butNot = Method) || isConstructor || !owner.isExtensibleClass && !is(Deferred) - // Deferred symbols can arise through parent refinements. + // Deferred symbols can arise through parent refinements under x.modularity. // For them, the overriding relationship reverses anyway, so // being in a final class does not mean the symbol cannot be // implemented concretely in a superclass. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 3c6d9ecbf204..a92893678a17 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1655,7 +1655,7 @@ object Types extends TypeUtils { * * P { ... type T = / += / -= U ... } # T * - * to just U. Analogously, `P { val x: S} # x` is reduced tp `S` is `S` + * to just U. Analogously, `P { val x: S} # x` is reduced tp `S` if `S` * is a singleton type. * * Does not perform the reduction if the resulting type would contain @@ -4936,6 +4936,7 @@ object Types extends TypeUtils { * @param origin the parameter that's tracked by the type variable. * @param creatorState the typer state in which the variable was created. * @param initNestingLevel the initial nesting level of the type variable. (c.f. nestingLevel) + * @param precise whether we should use instantiation without widening for this TypeVar. */ final class TypeVar private( initOrigin: TypeParamRef, @@ -5045,6 +5046,9 @@ object Types extends TypeUtils { else instantiateWith(tp) + /** Should we suppress widening? True if this TypeVar is precise + * or if it has as an upper bound a precise TypeVar. + */ def isPrecise(using Context) = precise || { @@ -5055,7 +5059,9 @@ object Types extends TypeUtils { case _ => false } - /** Widen unions when instantiating this variable in the current context? */ + /** The policy used for widening singletons or unions when instantiating + * this variable in the current context. + */ def widenPolicy(using Context): Widen = if isPrecise then Widen.None else if ctx.typerState.constraint.isHard(this) then Widen.Singletons @@ -5107,6 +5113,7 @@ object Types extends TypeUtils { precise: Boolean = false) = new TypeVar(initOrigin, creatorState, nestingLevel, precise) + /** The three possible widening policies */ enum Widen: case None // no widening case Singletons // widen singletons but not unions diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index f3d02dda5c48..fe23d97d58c3 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3542,23 +3542,26 @@ object Parsers { paramMods() if paramOwner.takesOnlyUsingClauses && !impliedMods.is(Given) then syntaxError(em"`using` expected") - val (firstParamMod, isParams) = + val (firstParamMod, paramsAreNamed) = var mods = EmptyModifiers if in.lookahead.isColon then (mods, true) else if isErased then mods = addModifier(mods) - val isParams = + val paramsAreNamed = !impliedMods.is(Given) || startParamTokens.contains(in.token) || isIdent - && (in.name == nme.inline || in.name == nme.tracked || in.lookahead.isColon) - (mods, isParams) - (if isParams then commaSeparated(() => param()) - else contextTypes(paramOwner, numLeadParams, impliedMods)) match { + && (in.name == nme.inline + || in.name == nme.tracked && in.featureEnabled(Feature.modularity) + || in.lookahead.isColon) + (mods, paramsAreNamed) + val params = + if paramsAreNamed then commaSeparated(() => param()) + else contextTypes(paramOwner, numLeadParams, impliedMods) + params match case Nil => Nil case (h :: t) => h.withAddedFlags(firstParamMod.flags) :: t - } checkVarArgsRules(clause) clause } @@ -4156,7 +4159,10 @@ object Parsers { else // need to be careful with last `with` withConstrApps() - // TODO Change syntax description + // Adjust parameter modifiers so that they are now parameters of a method + // (originally, we created class parameters) + // TODO: syntax.md should be adjusted to reflect the difference that + // parameters of an alias given cannot be vals. def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = paramss.nestedMap: param => if !param.mods.isAllOf(PrivateLocal) then @@ -4173,7 +4179,8 @@ object Parsers { else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty - if !(name.isEmpty && noParams) then + val hasParamsOrId = !name.isEmpty || !noParams + if hasParamsOrId then if in.isColon then newSyntaxAllowed = false in.nextToken() @@ -4184,7 +4191,7 @@ object Parsers { rejectWildcardType(annotType()) :: Nil else constrApp() match case parent: Apply => parent :: moreConstrApps() - case parent if in.isIdent => + case parent if in.isIdent && newSyntaxAllowed => infixTypeRest(parent, _ => annotType1()) :: Nil case parent => parent :: moreConstrApps() if newSyntaxAllowed && in.isIdent(nme.as) then @@ -4193,6 +4200,7 @@ object Parsers { val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then + // given alias accept(EQUALS) mods1 |= Final if noParams && !mods.is(Inline) then @@ -4201,10 +4209,12 @@ object Parsers { else DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) else if (isStatSep || isStatSeqEnd) && parentsIsType && !newSyntaxAllowed then + // old-style abstract given if name.isEmpty then syntaxError(em"anonymous given cannot be abstract") DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else + // structural instance val vparamss1 = vparamss.nestedMap: vparam => if vparam.mods.is(Private) then vparam.withMods(vparam.mods &~ PrivateLocal | Protected) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 22370e923a4b..c6ad1bb860e8 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -476,9 +476,9 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => val relativePath = util.SourceFile.relativePath(ctx.compilationUnit.source, reference) sym.addAnnotation(Annotation(defn.SourceFileAnnot, Literal(Constants.Constant(relativePath)), tree.span)) else - if !sym.is(Param) then - if !sym.owner.isOneOf(AbstractOrTrait) then - Checking.checkGoodBounds(tree.symbol) + if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then + Checking.checkGoodBounds(tree.symbol) + // Delete all context bound companions of this TypeDef if sym.owner.isClass && sym.hasAnnotation(defn.WitnessNamesAnnot) then val decls = sym.owner.info.decls for cbCompanion <- decls.lookupAll(sym.name.toTermName) do diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index b69d9f76852a..0588e27ea54f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -296,13 +296,13 @@ class Namer { typer: Typer => createOrRefine[Symbol](tree, name, flags, ctx.owner, _ => info, (fs, _, pwithin) => newSymbol(ctx.owner, name, fs, info, pwithin, tree.nameSpan)) case tree: Import => - recordSym(newImportSym(tree), tree) + recordSym(importSymbol(tree), tree) case _ => NoSymbol } } - private def newImportSym(imp: Import)(using Context): Symbol = + private def importSymbol(imp: Import)(using Context): Symbol = newImportSymbol(ctx.owner, Completer(imp)(ctx), imp.span) /** If `sym` exists, enter it in effective scope. Check that @@ -719,7 +719,7 @@ class Namer { typer: Typer => */ def expandTopLevel(stats: List[Tree])(using Context): Unit = stats match case (imp @ Import(qual, _)) :: stats1 if untpd.languageImport(qual).isDefined => - expandTopLevel(stats1)(using ctx.importContext(imp, newImportSym(imp))) + expandTopLevel(stats1)(using ctx.importContext(imp, importSymbol(imp))) case stat :: stats1 => expand(stat) expandTopLevel(stats1) @@ -1624,7 +1624,8 @@ class Namer { typer: Typer => } /** Enter all parent refinements as public class members, unless a definition - * with the same name already exists in the class. + * with the same name already exists in the class. Remember the refining symbols + * as an attachment on the ClassDef tree. */ def enterParentRefinementSyms(refinements: List[(Name, Type)]) = val refinedSyms = mutable.ListBuffer[Symbol]() @@ -1852,19 +1853,20 @@ class Namer { typer: Typer => // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR + // A map from context-bounded type parameters to associated evidence parameter names val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then for params <- ddef.paramss; case tdef: TypeDef <- params do for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do witnessNamesOfParam(tdef) = ws - /** Are all names in `wnames` defined by the longest prefix of all `params` + /** Is each name in `wnames` defined spmewhere in the longest prefix of all `params` * that have been typed ahead (i.e. that carry the TypedAhead attachment)? */ def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty - /** Enter and typecheck parameter list, add context companions as. + /** Enter and typecheck parameter list. * Once all witness parameters for a context bound are seen, create a * context bound companion for it. */ @@ -1909,7 +1911,9 @@ class Namer { typer: Typer => val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) - /** We add `tracked` to context bound witnesses that have abstract type members */ + /** Under x.modularity, we add `tracked` to context bound witnesses + * that have abstract type members + */ def needsTracked(sym: Symbol, param: ValDef)(using Context) = !sym.is(Tracked) && param.hasAttachment(ContextBoundParam) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index bb1d5ac71269..ecf1da30cac1 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -776,6 +776,7 @@ object ProtoTypes { TypeComparer.addToConstraint(added, tvars) val singletonRefs = preciseConstrainedRefs(added, singletonOnly = true) for paramRef <- added.paramRefs do + // Constrain all type parameters [T: Singleton] to T <: Singleton if singletonRefs.contains(paramRef) then paramRef <:< defn.SingletonType (added, tvars) end constrained diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 266b69d029c1..cb1aea27c444 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -552,7 +552,11 @@ object RefChecks { overrideError("is an extension method, cannot override a normal method") else if (other.is(ExtensionMethod) && !member.is(ExtensionMethod)) // (1.3) overrideError("is a normal method, cannot override an extension method") - else if (!other.is(Deferred) || other.isAllOf(Given | HasDefault)) + else if (!other.is(Deferred) + || other.isAllOf(Given | HasDefault) + // deferred givens have flags Given, HasDefault and Deferred set. These + // need to be checked for overriding as if they were concrete members + ) && !member.is(Deferred) && !other.name.is(DefaultGetterName) && !member.isAnyOverride @@ -626,7 +630,7 @@ object RefChecks { else if intoOccurrences(memberTp(self)) != intoOccurrences(otherTp(self)) then overrideError("has different occurrences of `into` modifiers", compareTypes = true) else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) - && !member.is(Tracked) + && !member.is(Tracked) // see remark on tracked members above then // (1.12) report.errorOrMigrationWarning( em"cannot override val parameter ${other.showLocated}", diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index a2291d55bac8..2eeccb6e477d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -183,7 +183,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Overridden in derived typers def newLikeThis(nestingLevel: Int): Typer = new Typer(nestingLevel) - // Overridden to do nothing in derived typers + /** Apply given migration. Overridden to use `disabled` instead in ReTypers. */ protected def migrate[T](migration: => T, disabled: => T = ()): T = migration /** Find the type of an identifier with given `name` in given context `ctx`. @@ -869,7 +869,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer type Alts = List[(/*prev: */Tree, /*prevState: */TyperState, /*prevWitness: */TermRef)] /** Compare two alternative selections `alt1` and `alt2` from witness types - * `wit1`, `wit2` according to the 3 criteria in the enclosing doc comment. I.e. + * `wit1`, `wit2` according to the 3 criteria in Step 3 of the doc comment + * of annotation.internal.WitnessNames. I.e. * * alt1 = qual1.m, alt2 = qual2.m, qual1: wit1, qual2: wit2 * @@ -887,13 +888,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case (tp1: TermRef, tp2: TermRef) => if tp1.info.isSingleton && (tp1 frozen_=:= tp2) then 1 else compare(tp1, tp2, preferGeneral = false) - case (tp1: TermRef, _) => 1 + case (tp1: TermRef, _) => 1 // should not happen, but prefer TermRefs over othersver others case (_, tp2: TermRef) => -1 case _ => 0 - /** Find the set of maximally preferred alternative among `prev` and the - * remaining alternatives generated from `witnesses` with is a union type - * of witness references. + /** Find the set of maximally preferred alternatives among `prevs` and + * alternatives referred to by `witnesses`. + * @param prevs a list of (ref tree, typer state, term ref) tripls that + * represents previously identified alternatives + * @param witnesses a type of the form ref_1 | ... | ref_n containing references + * still to be considered. */ def tryAlts(prevs: Alts, witnesses: Type): Alts = witnesses match case OrType(wit1, wit2) => @@ -905,10 +909,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def current = (alt, altCtx.typerState, witness) if altCtx.reporter.hasErrors then prevs else - val cmps = prevs.map: (prevTree, prevState, prevWitness) => + val comparisons = prevs.map: (prevTree, prevState, prevWitness) => compareAlts(prevTree, alt, prevWitness, witness) - if cmps.exists(_ == 1) then prevs - else current :: prevs.zip(cmps).collect{ case (prev, cmp) if cmp != -1 => prev } + if comparisons.exists(_ == 1) then prevs + else current :: prevs.zip(comparisons).collect{ case (prev, cmp) if cmp != -1 => prev } qual.tpe.widen match case AppliedType(_, arg :: Nil) => @@ -2370,9 +2374,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else + def selfNote = + if Feature.enabled(modularity) then + " and\ndoes not have an abstract type member named `Self` either" + else "" errorTree(tree, - em"""Illegal context bound: ${tycon.tpe} does not take type parameters and - |does not have an abstract type member named `Self` either.""") + em"Illegal context bound: ${tycon.tpe} does not take type parameters$selfNote.") def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) @@ -2605,7 +2612,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var name = tree.name if (name == nme.WILDCARD && tree.mods.is(Given)) { val Typed(_, tpt) = tree.body: @unchecked - name = desugar.inventGivenOrExtensionName(tpt) + name = desugar.inventGivenName(tpt) } if (name == nme.WILDCARD) body1 else { @@ -2725,6 +2732,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if filters == List(MessageFilter.None) then sup.markUsed() ctx.run.nn.suppressions.addSuppression(sup) + /** Run `typed` on `rhs` except if `rhs` is the right hand side of a deferred given, + * in which case the empty tree is returned. + */ + private inline def excludeDeferredGiven( + rhs: untpd.Tree, sym: Symbol)( + inline typed: untpd.Tree => Tree)(using Context): Tree = + rhs match + case rhs: RefTree + if rhs.name == nme.deferred && sym.isAllOf(DeferredGivenFlags, butNot = Param) => + EmptyTree + case _ => + typed(rhs) + def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) @@ -2732,15 +2752,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.is(Implicit) then checkImplicitConversionDefOK(sym) if sym.is(Module) then checkNoModuleClash(sym) val tpt1 = checkSimpleKinded(typedType(tpt)) - val rhs1 = vdef.rhs match { + val rhs1 = vdef.rhs match case rhs @ Ident(nme.WILDCARD) => rhs.withType(tpt1.tpe) - case rhs: RefTree - if rhs.name == nme.deferred && sym.isAllOf(DeferredGivenFlags, butNot = Param) => - EmptyTree case rhs => - typedExpr(rhs, tpt1.tpe.widenExpr) - } + excludeDeferredGiven(rhs, sym): + typedExpr(_, tpt1.tpe.widenExpr) val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) postProcessInfo(vdef1, sym) vdef1.setDefTree @@ -2800,13 +2817,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.isInlineMethod then rhsCtx.addMode(Mode.InlineableBody) if sym.is(ExtensionMethod) then rhsCtx.addMode(Mode.InExtensionMethod) - val rhs1 = ddef.rhs match - case Ident(nme.deferred) if sym.isAllOf(DeferredGivenFlags) => - EmptyTree - case rhs => - PrepareInlineable.dropInlineIfError(sym, - if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) - else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) + val rhs1 = excludeDeferredGiven(ddef.rhs, sym): rhs => + PrepareInlineable.dropInlineIfError(sym, + if sym.isScala2Macro then typedScala2MacroBody(rhs)(using rhsCtx) + else typedExpr(rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then if StagingLevel.level > 0 then diff --git a/library/src/scala/annotation/internal/WitnessNames.scala b/library/src/scala/annotation/internal/WitnessNames.scala index f859cda96d06..80b8fea4a84d 100644 --- a/library/src/scala/annotation/internal/WitnessNames.scala +++ b/library/src/scala/annotation/internal/WitnessNames.scala @@ -36,7 +36,7 @@ package internal * 2. The underlying type (under widen) of ref_i is a true supertype of the * underlying type of ref_j. * 3. ref_i.m is a term, the underlying type of ref_j is not a strict subtype - * of the underlying type of ref_j, and the underlying type ref_i.m is a + * of the underlying type of ref_i, and the underlying type ref_i.m is a * strict subtype of the underlying type of ref_j.m. * * If there is such a selection, map A.m to ref_i.m, otherwise report an error. @@ -48,6 +48,7 @@ package internal * * 4. At PostTyper, issue an error when encountering any reference to a CB companion. */ +@experimental class WitnessNames(names: String*) extends StaticAnnotation diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index be76941a680b..a3896a1eeb06 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -1,7 +1,7 @@ package scala package compiletime -import annotation.compileTimeOnly +import annotation.{compileTimeOnly, experimental} /** Use this method when you have a type, do not have a value for it but want to * pattern match on it. For example, given a type `Tup <: Tuple`, one can @@ -52,6 +52,7 @@ def uninitialized: Nothing = ??? * that implement the enclosing trait and that do not contain an explicit overriding * definition of that given. */ +@experimental @compileTimeOnly("`deferred` can only be used as the right hand side of a given definition in a trait") def deferred: Nothing = ??? diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 6c286f322ba7..77b014b80466 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -77,6 +77,7 @@ object Predef: * * which is what is needed for a context bound `[A: TC]`. */ + @experimental infix type is[A <: AnyKind, B <: Any{type Self <: AnyKind}] = B { type Self = A } end Predef diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check index eded51f70f31..55806fa5ca1b 100644 --- a/tests/neg/i12348.check +++ b/tests/neg/i12348.check @@ -1,8 +1,4 @@ --- [E040] Syntax Error: tests/neg/i12348.scala:2:16 -------------------------------------------------------------------- -2 | given inline x: Int = 0 // error // error - | ^ - | an identifier expected, but ':' found --- [E067] Syntax Error: tests/neg/i12348.scala:2:8 --------------------------------------------------------------------- -2 | given inline x: Int = 0 // error // error - | ^ - |Declaration of given instance given_x_inline_ not allowed here: only classes can have declared but undefined members +-- [E040] Syntax Error: tests/neg/i12348.scala:2:15 -------------------------------------------------------------------- +2 | given inline x: Int = 0 // error + | ^ + | 'with' expected, but identifier found diff --git a/tests/neg/i12348.scala b/tests/neg/i12348.scala index 43daf9a2801b..bd8bf63994e6 100644 --- a/tests/neg/i12348.scala +++ b/tests/neg/i12348.scala @@ -1,2 +1,2 @@ object A { - given inline x: Int = 0 // error // error + given inline x: Int = 0 // error diff --git a/tests/pos/typeclasses-this.scala b/tests/pos/typeclasses-this.scala index 20ce78678b22..33ccb8d9d653 100644 --- a/tests/pos/typeclasses-this.scala +++ b/tests/pos/typeclasses-this.scala @@ -36,7 +36,7 @@ end Common object Instances extends Common: - given intOrd: Int is Ord with + given intOrd: (Int is Ord) with extension (x: Int) def compareTo(y: Int) = if x < y then -1 @@ -44,7 +44,7 @@ object Instances extends Common: else 0 // given [T](using tracked val ev: Ord { type Self = T}): Ord { type Self = List[T] } with - given [T: Ord]: List[T] is Ord with + given [T: Ord]: (List[T] is Ord) with extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -53,7 +53,7 @@ object Instances extends Common: val fst = x.compareTo(y) if (fst != 0) fst else xs1.compareTo(ys1) - given listMonad: List is Monad with + given listMonad: (List is Monad) with extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = @@ -61,7 +61,7 @@ object Instances extends Common: type Reader[Ctx] = [X] =>> Ctx => X - given readerMonad[Ctx]: Reader[Ctx] is Monad with + given readerMonad[Ctx]: (Reader[Ctx] is Monad) with extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -83,7 +83,7 @@ object Instances extends Common: def maximum[T: Ord](xs: List[T]): T = xs.reduce(_ `max` _) - given descending[T: Ord]: T is Ord with + given descending[T: Ord]: (T is Ord) with extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) def minimum[T: Ord](xs: List[T]) = diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index df35bed19360..9a01e711537b 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -80,8 +80,11 @@ val experimentalDefinitionInLibrary = Set( "scala.NamedTupleDecomposition", "scala.NamedTupleDecomposition$", - // New feature: Precise trait + // New feature: modularity "scala.Precise", + "scala.annotation.internal.WitnessNames", + "scala.compiletime.package$package$.deferred", + "scala.Predef$.is", ) From ff98f011d9b6172df58d5b4cc1345b1e8539aedd Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 16:07:04 +0200 Subject: [PATCH 368/465] Fix rebase breakage [Cherry-picked d3e6a952d4e3914d8f7cfc1054f6ddbeab9b33c5] --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index fd4c634801be..c3369ac58e31 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1880,7 +1880,7 @@ trait Applications extends Compatibility { val tp1p = prepare(tp1) val tp2p = prepare(tp2) - if Feature.sourceVersion.isAtMost(SourceVersion.`3.5`) + if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) || oldResolution || !alt1isGiven && !alt2isGiven then diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 9a01e711537b..7079c7320ba0 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -84,7 +84,7 @@ val experimentalDefinitionInLibrary = Set( "scala.Precise", "scala.annotation.internal.WitnessNames", "scala.compiletime.package$package$.deferred", - "scala.Predef$.is", + "scala.runtime.stdLibPatches.Predef$.is", ) From 0bcf69c8188e53b947e4d257d63a5b7c1463ce34 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 16:07:33 +0200 Subject: [PATCH 369/465] Make Singleton an erased class only under modularity import [Cherry-picked b2f0791a0ac337474fdd223085f8da6ee03ac01e] --- compiler/src/dotty/tools/dotc/core/TypeUtils.scala | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index dd881bb1adf6..afc2cc39f9cf 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -8,6 +8,7 @@ import Names.{Name, TermName} import Constants.Constant import Names.Name +import config.Feature class TypeUtils: /** A decorator that provides methods on types @@ -22,7 +23,11 @@ class TypeUtils: self.classSymbol.isPrimitiveValueClass def isErasedClass(using Context): Boolean = - self.underlyingClassRef(refinementOK = true).typeSymbol.is(Flags.Erased) + val cls = self.underlyingClassRef(refinementOK = true).typeSymbol + cls.is(Flags.Erased) + && (cls != defn.SingletonClass || Feature.enabled(Feature.modularity)) + // Singleton counts as an erased class only under x.modularity + /** Is this type a checked exception? This is the case if the type * derives from Exception but not from RuntimeException. According to @@ -179,7 +184,7 @@ class TypeUtils: def isThisTypeOf(cls: Symbol)(using Context) = self match case self: Types.ThisType => self.cls == cls case _ => false - + /** Strip all outer refinements off this type */ def stripRefinement: Type = self match case self: RefinedOrRecType => self.parent.stripRefinement From 42de3703a50043284d9963891dfa9db8bf545f64 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 19:07:52 +0200 Subject: [PATCH 370/465] Address review comments [Cherry-picked 4f28d8a892b835a2598e10a7af48b05ed5a19e32] --- .../src/dotty/tools/dotc/ast/Desugar.scala | 28 ++++++++++--------- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 24 ++++++---------- .../src/dotty/tools/dotc/core/Flags.scala | 2 +- .../src/dotty/tools/dotc/core/NamerOps.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 18 ++++++------ .../dotty/tools/dotc/parsing/Parsers.scala | 7 +++-- .../src/dotty/tools/dotc/typer/Namer.scala | 4 +-- .../src/dotty/tools/dotc/typer/Typer.scala | 4 +-- docs/_docs/internals/syntax.md | 2 +- .../annotation/internal/WitnessNames.scala | 4 +-- 10 files changed, 46 insertions(+), 49 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 0681492a4ba7..b1b771bc7512 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -234,7 +234,7 @@ object desugar { * of the same method. * @param evidenceFlags The flags to use for evidence definitions * @param freshName A function to generate fresh names for evidence definitions - * @param allParams If `tdef` is a type paramter, all parameters of the owning method, + * @param allParamss If `tdef` is a type paramter, all parameters of the owning method, * otherwise the empty list. */ private def desugarContextBounds( @@ -246,29 +246,31 @@ object desugar { val evidenceNames = mutable.ListBuffer[TermName]() - def desugarRhs(rhs: Tree): Tree = rhs match - case ContextBounds(tbounds, cxbounds) => + def desugarRHS(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, ctxbounds) => val isMember = evidenceFlags.isAllOf(DeferredGivenFlags) - for bound <- cxbounds do + for bound <- ctxbounds do val evidenceName = bound match case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => ownName // if there is an explicitly given name, use it. - case _ if Config.nameSingleContextBounds && !isMember - && cxbounds.tail.isEmpty && Feature.enabled(Feature.modularity) => - tdef.name.toTermName case _ => - freshName(bound) + if Config.nameSingleContextBounds + && !isMember + && ctxbounds.tail.isEmpty + && Feature.enabled(Feature.modularity) + then tdef.name.toTermName + else freshName(bound) evidenceNames += evidenceName val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(evidenceFlags) evidenceParam.pushAttachment(ContextBoundParam, ()) evidenceBuf += evidenceParam tbounds case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, desugarRhs(body)) + cpy.LambdaTypeTree(rhs)(tparams, desugarRHS(body)) case _ => rhs - val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRHS(tdef.rhs)) // Under x.modularity, if there was a context bound, and `tdef`s name as a term name is // neither a name of an existing parameter nor a name of generated evidence for // the same method, add a WitnessAnnotation with all generated evidence names to `tdef`. @@ -695,10 +697,10 @@ object desugar { case _ => false } - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { + /** Is this a repeated argument x* (using a spread operator)? */ + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match case PostfixOp(_, Ident(tpnme.raw.STAR)) => true case _ => false - } def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { val targs = for (tparam <- tparams) yield { @@ -1218,7 +1220,7 @@ object desugar { /** Extract a synthesized given name from a type tree. This is used for * both anonymous givens and (under x.modularity) deferred givens. - * @param followArgs If true include argument types in the name + * @param followArgs if true include argument types in the name */ private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { private def extractArgs(args: List[Tree])(using Context): String = diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 11fb572b66c6..97de434ba9d5 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -394,22 +394,16 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => )) def unapply(tree: Tree)(using Context): Option[List[TermName]] = - def isWitnessNames(tp: Type) = tp match - case tp: TypeRef => - tp.name == tpnme.WitnessNames && tp.symbol == defn.WitnessNamesAnnot - case _ => - false unsplice(tree) match - case Apply( - Select(New(tpt: tpd.TypeTree), nme.CONSTRUCTOR), - SeqLiteral(elems, _) :: Nil - ) if isWitnessNames(tpt.tpe) => - Some: - elems.map: - case Literal(Constant(str: String)) => - ContextBoundParamName.unmangle(str.toTermName.asSimpleName) - case _ => - None + case Apply(Select(New(tpt: tpd.TypeTree), nme.CONSTRUCTOR), SeqLiteral(elems, _) :: Nil) => + tpt.tpe match + case tp: TypeRef if tp.name == tpnme.WitnessNames && tp.symbol == defn.WitnessNamesAnnot => + Some: + elems.map: + case Literal(Constant(str: String)) => + ContextBoundParamName.unmangle(str.toTermName.asSimpleName) + case _ => None + case _ => None end WitnessNamesAnnot } diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index e17834d61fdc..b1bf7a266c91 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -573,7 +573,7 @@ object Flags { val DeferredOrLazyOrMethod: FlagSet = Deferred | Lazy | Method val DeferredOrTermParamOrAccessor: FlagSet = Deferred | ParamAccessor | TermParam // term symbols without right-hand sides val DeferredOrTypeParam: FlagSet = Deferred | TypeParam // type symbols without right-hand sides - val DeferredGivenFlags = Deferred | Given | HasDefault + val DeferredGivenFlags: FlagSet = Deferred | Given | HasDefault val EnumValue: FlagSet = Enum | StableRealizable // A Scala enum value val FinalOrInline: FlagSet = Final | Inline val FinalOrModuleClass: FlagSet = Final | ModuleClass // A module class or a final class diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 5e76b09bbde6..07cb9292baa4 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -262,7 +262,7 @@ object NamerOps: * bound that defines a set of witnesses with names `witnessNames`. * * @param params If `tsym` is a type parameter, a list of parameter symbols - * that include all witnesses, otherwise the empty list. + * that includes all witnesses, otherwise the empty list. * * The context-bound companion has as name the name of `tsym` translated to * a term name. We create a synthetic val of the form diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index a92893678a17..eeffc41d4159 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1655,7 +1655,7 @@ object Types extends TypeUtils { * * P { ... type T = / += / -= U ... } # T * - * to just U. Analogously, `P { val x: S} # x` is reduced tp `S` if `S` + * to just U. Analogously, `P { val x: S} # x` is reduced to `S` if `S` * is a singleton type. * * Does not perform the reduction if the resulting type would contain @@ -5050,14 +5050,14 @@ object Types extends TypeUtils { * or if it has as an upper bound a precise TypeVar. */ def isPrecise(using Context) = - precise - || { - val constr = ctx.typerState.constraint - constr.upper(origin).exists: tparam => - constr.typeVarOfParam(tparam) match - case tvar: TypeVar => tvar.precise - case _ => false - } + precise || hasPreciseUpperBound + + private def hasPreciseUpperBound(using Context) = + val constr = ctx.typerState.constraint + constr.upper(origin).exists: tparam => + constr.typeVarOfParam(tparam) match + case tvar: TypeVar => tvar.precise + case _ => false /** The policy used for widening singletons or unions when instantiating * this variable in the current context. diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index fe23d97d58c3..e28ba5fd669e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3552,9 +3552,10 @@ object Parsers { !impliedMods.is(Given) || startParamTokens.contains(in.token) || isIdent - && (in.name == nme.inline - || in.name == nme.tracked && in.featureEnabled(Feature.modularity) - || in.lookahead.isColon) + && (in.name == nme.inline // inline starts a name binding + || in.name == nme.tracked // tracked starts a name binding under x.modularity + && in.featureEnabled(Feature.modularity) + || in.lookahead.isColon) // a following `:` starts a name binding (mods, paramsAreNamed) val params = if paramsAreNamed then commaSeparated(() => param()) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 0588e27ea54f..83964417a6f1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1860,7 +1860,7 @@ class Namer { typer: Typer => for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do witnessNamesOfParam(tdef) = ws - /** Is each name in `wnames` defined spmewhere in the longest prefix of all `params` + /** Is each name in `wnames` defined somewhere in the longest prefix of all `params` * that have been typed ahead (i.e. that carry the TypedAhead attachment)? */ def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = @@ -1919,7 +1919,7 @@ class Namer { typer: Typer => && param.hasAttachment(ContextBoundParam) && sym.info.memberNames(abstractTypeNameFilter).nonEmpty - /** Set every context bound evidence parameter of a class to be tracked, + /** Under x.modularity, set every context bound evidence parameter of a class to be tracked, * provided it has a type that has an abstract type member. Reset private and local flags * so that the parameter becomes a `val`. */ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 2eeccb6e477d..2a69c948baae 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -888,7 +888,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case (tp1: TermRef, tp2: TermRef) => if tp1.info.isSingleton && (tp1 frozen_=:= tp2) then 1 else compare(tp1, tp2, preferGeneral = false) - case (tp1: TermRef, _) => 1 // should not happen, but prefer TermRefs over othersver others + case (tp1: TermRef, _) => 1 // should not happen, but prefer TermRefs over others case (_, tp2: TermRef) => -1 case _ => 0 @@ -4588,7 +4588,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) case _ => errorTree(tree, em"cannot convert from $tree to an instance creation expression") - val tycon = ctorResultType.underlyingClassRef(refinementOK = true) + val tycon = ctorResultType.underlyingClassRef(refinementOK = Feature.enabled(modularity)) typed( untpd.Select( untpd.New(untpd.TypedSplice(tpt.withType(tycon))), diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 05f89a344148..dd4a3af403ab 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -191,7 +191,7 @@ MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) AnnotType ::= SimpleType {Annotation} Annotated(t, annot) -AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) +AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) SimpleType ::= SimpleLiteral SingletonTypeTree(l) | ‘?’ TypeBounds diff --git a/library/src/scala/annotation/internal/WitnessNames.scala b/library/src/scala/annotation/internal/WitnessNames.scala index 80b8fea4a84d..3921c2083617 100644 --- a/library/src/scala/annotation/internal/WitnessNames.scala +++ b/library/src/scala/annotation/internal/WitnessNames.scala @@ -11,7 +11,7 @@ package internal * * 2. During Namer or Unpickling, when encountering a type declaration A with * a WitnessNames(n_1, ... , n_k) annotation, create a CB companion `val A` with - * rtype ``[ref_1 | ... | ref_k] where ref_i is a TermRef + * type ``[ref_1 | ... | ref_k] where ref_i is a TermRef * with the same prefix as A and name n_i. Except, don't do this if the type in * question is a type parameter and there is already a term parameter with name A * defined for the same method. @@ -20,7 +20,7 @@ package internal * * type ``[-Refs] * - * The context bound companion's variance is negative, so that unons in the + * The context bound companion's variance is negative, so that unions in the * arguments are joined when encountering multiple definfitions and forming a glb. * * 3. Add a special case for typing a selection A.m on a value A of type From 65218227371ea4e5eae43ae67ba4fb308bc46d1a Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 20:33:48 +0200 Subject: [PATCH 371/465] Adress review comments with changed docs and new tests [Cherry-picked 0dddcb7fb9511acf8e8ca676c95768d8b445d7bd] --- .../reference/experimental/typeclasses.md | 5 +--- tests/neg/deferred-givens-2.check | 12 ++++++++++ tests/neg/deferred-givens-2.scala | 23 +++++++++++++++++++ tests/pending/pos/cbproxy-default.scala | 4 ++++ tests/pending/pos/singleton-infer.scala | 8 +++++++ tests/pos/cbproxy-expansion.scala | 16 +++++++++++++ 6 files changed, 64 insertions(+), 4 deletions(-) create mode 100644 tests/neg/deferred-givens-2.check create mode 100644 tests/neg/deferred-givens-2.scala create mode 100644 tests/pending/pos/cbproxy-default.scala create mode 100644 tests/pending/pos/singleton-infer.scala create mode 100644 tests/pos/cbproxy-expansion.scala diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index cf5f3220faa6..a78e764bbe7d 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -1,12 +1,9 @@ - --- layout: doc-page -title: "Type Classes" +title: "Better Support for Type Classes" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses.html --- -# Some Proposed Changes for Better Support of Type Classes - Martin Odersky, 8.1.2024, edited 5.4.2024 A type class in Scala is a pattern where we define diff --git a/tests/neg/deferred-givens-2.check b/tests/neg/deferred-givens-2.check new file mode 100644 index 000000000000..4a29141cc48b --- /dev/null +++ b/tests/neg/deferred-givens-2.check @@ -0,0 +1,12 @@ +-- [E172] Type Error: tests/neg/deferred-givens-2.scala:17:6 ----------------------------------------------------------- +17 |class SortedIntWrong1 extends Sorted: // error + |^ + |No given instance of type Ord{type Self = SortedIntWrong1.this.Element} was found for inferring the implementation of the deferred given instance given_Ord_Element in trait Sorted +18 | type Element = Int +19 | override given (Element is Ord)() +-- [E172] Type Error: tests/neg/deferred-givens-2.scala:21:6 ----------------------------------------------------------- +21 |class SortedIntWrong2 extends Sorted: // error + |^ + |No given instance of type Ord{type Self = SortedIntWrong2.this.Element} was found for inferring the implementation of the deferred given instance given_Ord_Element in trait Sorted +22 | type Element = Int +23 | override given (Int is Ord)() diff --git a/tests/neg/deferred-givens-2.scala b/tests/neg/deferred-givens-2.scala new file mode 100644 index 000000000000..4e75ceb08728 --- /dev/null +++ b/tests/neg/deferred-givens-2.scala @@ -0,0 +1,23 @@ +//> using options -language:experimental.modularity -source future +trait Ord: + type Self + +trait Sorted: + type Element: Ord + +object Scoped: + given (Int is Ord)() + class SortedIntCorrect extends Sorted: + type Element = Int + +class SortedIntCorrect2 extends Sorted: + type Element = Int + override given (Int is Ord)() as given_Ord_Element + +class SortedIntWrong1 extends Sorted: // error + type Element = Int + override given (Element is Ord)() + +class SortedIntWrong2 extends Sorted: // error + type Element = Int + override given (Int is Ord)() \ No newline at end of file diff --git a/tests/pending/pos/cbproxy-default.scala b/tests/pending/pos/cbproxy-default.scala new file mode 100644 index 000000000000..e8f12ceeae75 --- /dev/null +++ b/tests/pending/pos/cbproxy-default.scala @@ -0,0 +1,4 @@ +def f[S: Monad]( + initial: S.Self = S.unit // error +) = + S.unit // works \ No newline at end of file diff --git a/tests/pending/pos/singleton-infer.scala b/tests/pending/pos/singleton-infer.scala new file mode 100644 index 000000000000..72e00baf3aab --- /dev/null +++ b/tests/pending/pos/singleton-infer.scala @@ -0,0 +1,8 @@ +//> using options -Xprint:typer -language:experimental.modularity -source future + +def f1[S, T <: S : Singleton](x: S) = () +def f2[S, T >: S : Singleton](x: S) = () + +def Test = + f1(42) // f1[Int, Singleton & Int] // should infer (42 : Int) or throw an error? + f2(42) // f2[(42 : Int), (42 : Int)] \ No newline at end of file diff --git a/tests/pos/cbproxy-expansion.scala b/tests/pos/cbproxy-expansion.scala new file mode 100644 index 000000000000..ee145b62d4ed --- /dev/null +++ b/tests/pos/cbproxy-expansion.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future +trait TC[T]: + type Self + +def f1[S, T: TC[S] as tc](x: S, y: tc.Self) = () +def f2[S, T: TC[S]](x: S, y: T.Self) = () +def f3[S, T: TC[S]](x: S, y: Int) = () + +given TC[String] with + type Self = Int + def unit = 42 + +def main = + f1("hello", 23) + f2("hello", 23) + f3("hello", 23) From 46c3eca537cafdcdc9c6e0eb3ccb0f03e2c4485d Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 7 May 2024 12:43:03 +0200 Subject: [PATCH 372/465] Update warn check files Error number changed [Cherry-picked 62e0244d0f77b4b9158da20d5a252e24e51e5db2] --- tests/warn/i16723.check | 2 +- tests/warn/i16723a.check | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/warn/i16723.check b/tests/warn/i16723.check index ed8e55502a80..6d55fa0a89d2 100644 --- a/tests/warn/i16723.check +++ b/tests/warn/i16723.check @@ -1,4 +1,4 @@ --- [E195] Potential Issue Warning: tests/warn/i16723.scala:3:2 --------------------------------------------------------- +-- [E197] Potential Issue Warning: tests/warn/i16723.scala:3:2 --------------------------------------------------------- 3 | new Object {} // warn | ^ | New anonymous class definition will be duplicated at each inline site diff --git a/tests/warn/i16723a.check b/tests/warn/i16723a.check index ba4794fac23e..ace11c5af1f9 100644 --- a/tests/warn/i16723a.check +++ b/tests/warn/i16723a.check @@ -1,4 +1,4 @@ --- [E195] Potential Issue Warning: tests/warn/i16723a.scala:5:38 ------------------------------------------------------- +-- [E197] Potential Issue Warning: tests/warn/i16723a.scala:5:38 ------------------------------------------------------- 5 |inline given Converter[Int, String] = new Converter { // warn | ^ | New anonymous class definition will be duplicated at each inline site From 97afac079b522d7158eaa28505bff19c52d43fe3 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 7 May 2024 13:05:53 +0200 Subject: [PATCH 373/465] Update InlayHints [Cherry-picked 9959f28ab5008d4a8deeb78f3764cec641f439db] --- .../test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala index e470f492657c..8ce7cdce4382 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala @@ -898,7 +898,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { | import quotes.reflect.* | Type.of[T] match | case '[f] => - | val fr/*: TypeRepr<>*/ = TypeRepr.of[T]/*(using evidence$1<<(3:21)>>)*/ + | val fr/*: TypeRepr<>*/ = TypeRepr.of[T]/*(using evidence$1<<(3:23)>>)*/ |""".stripMargin ) From 8482eb15b9c8faa6f15c2dedb9a54a5f8b731b2d Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 7 May 2024 15:15:39 +0200 Subject: [PATCH 374/465] Fix typo [Cherry-picked 3c78ada957b8f77f6055ea280e09693f40d0e845] --- library/src/scala/runtime/stdLibPatches/language.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index a5cd683775f0..1171c62602fb 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -96,7 +96,7 @@ object language: * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] */ @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") - object namedTupleas + object namedTuples /** Experimental support for new features for better modularity, including * - better tracking of dependencies through classes From cb37a1fd1b77e01159b777fa3180c148d055eae8 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 22 Apr 2024 10:23:54 +0200 Subject: [PATCH 375/465] step 1: basic script that forwards to prebuilt launcher and overrides default scala-version [Cherry-picked 4d291fb3b68867d5a970faad52d1ab5a861b395a] --- dist/bin/cli-common | 26 ++++++++++++++++ dist/bin/scala | 56 +++++++++++---------------------- dist/bin/scala_legacy | 72 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 116 insertions(+), 38 deletions(-) create mode 100644 dist/bin/cli-common create mode 100755 dist/bin/scala_legacy diff --git a/dist/bin/cli-common b/dist/bin/cli-common new file mode 100644 index 000000000000..780094f2e3d8 --- /dev/null +++ b/dist/bin/cli-common @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +SCALA_CLI_LAUNCHER="/Users/jamie/workspace/scala-cli/out/cli/3.3.3/launcher.dest/launcher" + +#/*-------------------------------------------------------------------------- +# * Credits: This script is based on the script generated by sbt-pack. +# *--------------------------------------------------------------------------*/ + +# save terminal settings +saved_stty=$(stty -g 2>/dev/null) +# clear on error so we don't later try to restore them +if [[ ! $? ]]; then + saved_stty="" +fi + +# restore stty settings (echo in particular) +function restoreSttySettings() { + stty $saved_stty + saved_stty="" +} + +scala_exit_status=127 +function onExit() { + [[ "$saved_stty" != "" ]] && restoreSttySettings + exit $scala_exit_status +} diff --git a/dist/bin/scala b/dist/bin/scala index bd69d40c2b97..04215fcfaa0b 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -26,47 +26,27 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common" - -while [[ $# -gt 0 ]]; do - case "$1" in - -D*) - # pass to scala as well: otherwise we lose it sometimes when we - # need it, e.g. communicating with a server compiler. - # respect user-supplied -Dscala.usejavacp - addJava "$1" - addScala "$1" - shift - ;; - -J*) - # as with -D, pass to scala even though it will almost - # never be used. - addJava "${1:2}" - addScala "$1" - shift - ;; - -classpath*) - if [ "$1" != "${1##* }" ]; then - # -classpath and its value have been supplied in a single string e.g. "-classpath 'lib/*'" - A=$1 ; shift # consume $1 before adding its substrings back - set -- $A "$@" # split $1 on whitespace and put it back - else - addScala "$1" - shift - fi - ;; - *) - addScala "$1" - shift - ;; - esac -done +source "$PROG_HOME/bin/cli-common" + +SCALA_VERSION="" +# iterate through lines in VERSION_SRC +while IFS= read -r line; do + # if line starts with "version:=" then extract the version + if [[ "$line" == version:=* ]]; then + SCALA_VERSION="${line#version:=}" + break + fi +done < "$PROG_HOME/VERSION" + +# assert that SCALA_VERSION is not empty +if [ -z "$SCALA_VERSION" ]; then + echo "Failed to extract Scala version from $PROG_HOME/VERSION" + exit 1 +fi # exec here would prevent onExit from being called, leaving terminal in unusable state -compilerJavaClasspathArgs [ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 -eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" +eval "\"$SCALA_CLI_LAUNCHER\"" "--cli-default-scala-version \"$SCALA_VERSION\"" "$@" scala_exit_status=$? - onExit diff --git a/dist/bin/scala_legacy b/dist/bin/scala_legacy new file mode 100755 index 000000000000..bd69d40c2b97 --- /dev/null +++ b/dist/bin/scala_legacy @@ -0,0 +1,72 @@ +#!/usr/bin/env bash + +# Try to autodetect real location of the script +if [ -z "${PROG_HOME-}" ] ; then + ## resolve links - $0 may be a link to PROG_HOME + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + PROG_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + PROG_HOME=`cd "$PROG_HOME" && pwd` + + cd "$saveddir" +fi + +source "$PROG_HOME/bin/common" + +while [[ $# -gt 0 ]]; do + case "$1" in + -D*) + # pass to scala as well: otherwise we lose it sometimes when we + # need it, e.g. communicating with a server compiler. + # respect user-supplied -Dscala.usejavacp + addJava "$1" + addScala "$1" + shift + ;; + -J*) + # as with -D, pass to scala even though it will almost + # never be used. + addJava "${1:2}" + addScala "$1" + shift + ;; + -classpath*) + if [ "$1" != "${1##* }" ]; then + # -classpath and its value have been supplied in a single string e.g. "-classpath 'lib/*'" + A=$1 ; shift # consume $1 before adding its substrings back + set -- $A "$@" # split $1 on whitespace and put it back + else + addScala "$1" + shift + fi + ;; + *) + addScala "$1" + shift + ;; + esac +done + +# exec here would prevent onExit from being called, leaving terminal in unusable state +compilerJavaClasspathArgs +[ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 +eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" +scala_exit_status=$? + + +onExit From 1784e67234ff5e9f611a82b8574c804d6f02a57d Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 25 Apr 2024 09:33:31 +0200 Subject: [PATCH 376/465] Resolve artefacts to a local repo: assemble a map of artefacts in maven format to both the local artefacts and library dependencies. Write them to dist/target/local-repo/maven2. Copy the local-repo to dist/target/pack/local. TODO: - evaluate how to remove lib dir in pack, only resolve from repo [Cherry-picked 420e2450b5f64d96e02d57823d59dc9739c3c60c] --- dist/bin/cli-common | 6 ++ dist/bin/scala | 13 ++++- project/Build.scala | 135 ++++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 148 insertions(+), 6 deletions(-) diff --git a/dist/bin/cli-common b/dist/bin/cli-common index 780094f2e3d8..67b8893223d3 100644 --- a/dist/bin/cli-common +++ b/dist/bin/cli-common @@ -24,3 +24,9 @@ function onExit() { [[ "$saved_stty" != "" ]] && restoreSttySettings exit $scala_exit_status } + +declare -a scala_args + +addScala () { + scala_args+=("'$1'") +} diff --git a/dist/bin/scala b/dist/bin/scala index 04215fcfaa0b..7d813d265e73 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -44,9 +44,20 @@ if [ -z "$SCALA_VERSION" ]; then exit 1 fi +MVN_REPOSITORY="file://$PROG_HOME/local/maven2" + +# escape all script arguments +while [[ $# -gt 0 ]]; do + addScala "$1" + shift +done + # exec here would prevent onExit from being called, leaving terminal in unusable state [ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 -eval "\"$SCALA_CLI_LAUNCHER\"" "--cli-default-scala-version \"$SCALA_VERSION\"" "$@" +eval "\"$SCALA_CLI_LAUNCHER\"" \ + "--cli-default-scala-version \"$SCALA_VERSION\"" \ + "-r \"$MVN_REPOSITORY\"" \ + "${scala_args[@]}" scala_exit_status=$? onExit diff --git a/project/Build.scala b/project/Build.scala index 350471cc3e12..6ebfa04f974f 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -26,6 +26,8 @@ import sbttastymima.TastyMiMaPlugin import sbttastymima.TastyMiMaPlugin.autoImport._ import scala.util.Properties.isJavaAtLeast +import scala.collection.mutable + import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._ import org.scalajs.linker.interface.{ModuleInitializer, StandardConfig} @@ -2110,17 +2112,128 @@ object Build { ) ) - lazy val commonDistSettings = Seq( + lazy val DistCacheConfig = config("DistCacheConfig") extend Compile + + val distModules = taskKey[Seq[(ModuleID, Map[Artifact, File])]]("fetch local artifacts for distribution.") + val distResolvedArtifacts = taskKey[Seq[ResolvedArtifacts]]("Resolve the dependencies for the distribution") + val distCaching = taskKey[File]("cache the dependencies for the distribution") + + def evalPublishSteps(dependencies: Seq[ProjectReference]): Def.Initialize[Task[Seq[(ModuleID, Map[Artifact, File])]]] = { + val publishAllLocalBin = dependencies.map({ d => ((d / publishLocalBin / packagedArtifacts)) }).join + val resolveId = dependencies.map({ d => ((d / projectID)) }).join + Def.task { + val s = streams.value + val log = s.log + val published = publishAllLocalBin.value + val ids = resolveId.value + + ids.zip(published) + } + } + + case class SimpleModuleId(org: String, name: String, revision: String) { + override def toString = s"$org:$name:$revision" + } + case class ResolvedArtifacts(id: SimpleModuleId, jar: File, pom: File) + + def commonDistSettings(dependencies: Seq[ClasspathDep[ProjectReference]]) = Seq( packMain := Map(), publishArtifact := false, packGenerateMakefile := false, - packExpandedClasspath := true, - packArchiveName := "scala3-" + dottyVersion + packArchiveName := "scala3-" + dottyVersion, + DistCacheConfig / distModules := { + evalPublishSteps(dependencies.map(_.project)).value + }, + DistCacheConfig / distResolvedArtifacts := { + val localArtifactIds = (DistCacheConfig / distModules).value + val report = (thisProjectRef / updateFull).value + + val found = mutable.Map.empty[SimpleModuleId, ResolvedArtifacts] + val evicted = mutable.Set.empty[SimpleModuleId] + + localArtifactIds.foreach({ case (id, as) => + val simpleId = { + val name0 = id.crossVersion match { + case _: CrossVersion.Binary => + // projectID does not add binary suffix + (id.name + "_3").ensuring(!id.name.endsWith("_3") && id.revision.startsWith("3.")) + case _ => id.name + } + SimpleModuleId(id.organization, name0, id.revision) + } + var jarOrNull: File = null + var pomOrNull: File = null + as.foreach({ case (a, f) => + if (a.`type` == "jar") { + jarOrNull = f + } else if (a.`type` == "pom") { + pomOrNull = f + } + }) + assert(jarOrNull != null, s"Could not find jar for ${id}") + assert(pomOrNull != null, s"Could not find pom for ${id}") + evicted += simpleId.copy(revision = simpleId.revision + "-nonbootstrapped") + found(simpleId) = ResolvedArtifacts(simpleId, jarOrNull, pomOrNull) + }) + + report.allModuleReports.foreach { mr => + val simpleId = { + val id = mr.module + SimpleModuleId(id.organization, id.name, id.revision) + } + + if (!found.contains(simpleId) && !evicted(simpleId)) { + var jarOrNull: File = null + var pomOrNull: File = null + mr.artifacts.foreach({ case (a, f) => + if (a.`type` == "jar" || a.`type` == "bundle") { + jarOrNull = f + } else if (a.`type` == "pom") { + pomOrNull = f + } + }) + assert(jarOrNull != null, s"Could not find jar for ${simpleId}") + if (pomOrNull == null) { + val jarPath = jarOrNull.toPath + // we found the jar, so assume we can resolve a sibling pom file + val pomPath = jarPath.resolveSibling(jarPath.getFileName.toString.stripSuffix(".jar") + ".pom") + assert(Files.exists(pomPath), s"Could not find pom for ${simpleId}") + pomOrNull = pomPath.toFile + } + found(simpleId) = ResolvedArtifacts(simpleId, jarOrNull, pomOrNull) + } + + } + found.values.toSeq + }, + DistCacheConfig / distCaching := { + val resolved = (DistCacheConfig / distResolvedArtifacts).value + val targetDir = target.value + val cacheDir = targetDir / "local-repo" + val mavenRepo = cacheDir / "maven2" + IO.createDirectory(mavenRepo) + resolved.foreach { ra => + val jar = ra.jar + val pom = ra.pom + + val pathElems = ra.id.org.split('.').toVector :+ ra.id.name :+ ra.id.revision + val artifactDir = pathElems.foldLeft(mavenRepo)(_ / _) + IO.createDirectory(artifactDir) + IO.copyFile(jar, artifactDir / jar.getName) + IO.copyFile(pom, artifactDir / pom.getName) + } + cacheDir + }, + Compile / pack := { + val localRepo = (DistCacheConfig / distCaching).value + (Compile / pack).value + } ) lazy val dist = project.asDist(Bootstrapped) .settings( packResourceDir += (baseDirectory.value / "bin" -> "bin"), + packResourceDir += (target.value / "local-repo" -> "local"), ) private def customMimaReportBinaryIssues(issueFilterLocation: String) = mimaReportBinaryIssues := { @@ -2251,12 +2364,24 @@ object Build { def asDist(implicit mode: Mode): Project = project. enablePlugins(PackPlugin). withCommonSettings. - dependsOn(`scala3-interfaces`, dottyCompiler, dottyLibrary, tastyCore, `scala3-staging`, `scala3-tasty-inspector`, scaladoc). - settings(commonDistSettings). + dependsOn( + `scala3-interfaces`, + dottyCompiler, + dottyLibrary, + tastyCore, + `scala3-staging`, + `scala3-tasty-inspector`, + scaladoc, + `scala3-sbt-bridge`, // for scala-cli + ). + withDepSettings(commonDistSettings). bootstrappedSettings( target := baseDirectory.value / "target" // override setting in commonBootstrappedSettings ) + def withDepSettings(f: Seq[ClasspathDep[ProjectReference]] => Seq[Setting[?]]): Project = + project.settings(f(project.dependencies)) + def withCommonSettings(implicit mode: Mode): Project = project.settings(mode match { case NonBootstrapped => commonNonBootstrappedSettings case Bootstrapped => commonBootstrappedSettings From 10bd87f5eb8dde7b853c58231dfd63e405e94697 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 26 Apr 2024 16:58:21 +0200 Subject: [PATCH 377/465] use scala-cli jar launcher (TODO: download automatically) [Cherry-picked 465da00d196c6ae4e27ab93fd22a07166bd49758] --- dist/bin/cli-common | 132 +++++++++++++++++++++++++++++++++++++++++++- dist/bin/scala | 3 +- 2 files changed, 132 insertions(+), 3 deletions(-) diff --git a/dist/bin/cli-common b/dist/bin/cli-common index 67b8893223d3..975df5abfee3 100644 --- a/dist/bin/cli-common +++ b/dist/bin/cli-common @@ -1,7 +1,5 @@ #!/usr/bin/env bash -SCALA_CLI_LAUNCHER="/Users/jamie/workspace/scala-cli/out/cli/3.3.3/launcher.dest/launcher" - #/*-------------------------------------------------------------------------- # * Credits: This script is based on the script generated by sbt-pack. # *--------------------------------------------------------------------------*/ @@ -25,6 +23,136 @@ function onExit() { exit $scala_exit_status } +#/*-------------------------------------------------------------------------- +# * SECTION FOR JAVA COMMAND +# *--------------------------------------------------------------------------*/ + +# to reenable echo if we are interrupted before completing. +trap onExit INT TERM EXIT + +unset cygwin mingw msys darwin conemu + +# COLUMNS is used together with command line option '-pageWidth'. +if command -v tput >/dev/null 2>&1; then + export COLUMNS="$(tput -Tdumb cols)" +fi + +case "`uname`" in + CYGWIN*) cygwin=true + ;; + MINGW*) mingw=true + ;; + MSYS*) msys=true + ;; + Darwin*) darwin=true + if [ -z "$JAVA_VERSION" ] ; then + JAVA_VERSION="CurrentJDK" + else + echo "Using Java version: $JAVA_VERSION" 1>&2 + fi + if [ -z "$JAVA_HOME" ] ; then + JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home + fi + JAVACMD="`which java`" + ;; +esac + +unset CYGPATHCMD +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + # ConEmu terminal is incompatible with jna-5.*.jar + [[ (${CONEMUANSI-} || ${ConEmuANSI-}) ]] && conemu=true + # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc. + CYGPATHCMD=`which cygpath 2>/dev/null` + case "$TERM" in + rxvt* | xterm* | cygwin*) + stty -icanon min 1 -echo + JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix" + ;; + esac +fi + +# Resolve JAVA_HOME from javac command path +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" -a -f "$javaExecutable" -a ! "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + javaExecutable="`readlink -f \"$javaExecutable\"`" + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "${JAVACMD-}" ] ; then + if [ -n "${JAVA_HOME-}" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." + echo " We cannot execute $JAVACMD" + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSPATH_SUFFIX="" +# Path separator used in EXTRA_CLASSPATH +PSEP=":" + +# translate paths to Windows-mixed format before running java +if [ -n "${CYGPATHCMD-}" ]; then + [ -n "${PROG_HOME-}" ] && + PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"` + CLASSPATH_SUFFIX=";" + PSEP=";" +elif [[ ${mingw-} || ${msys-} ]]; then + # For Mingw / Msys, convert paths from UNIX format before anything is touched + [ -n "$PROG_HOME" ] && + PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd -W | sed 's|/|\\\\|g')`" + CLASSPATH_SUFFIX=";" + PSEP=";" +fi + +#/*-------------------------------------------------- +# * The code below is for Dotty +# *-------------------------------------------------*/ + +find_lib () { + for lib in "$PROG_HOME"/lib/$1 ; do + if [[ -f "$lib" ]]; then + if [ -n "$CYGPATHCMD" ]; then + "$CYGPATHCMD" -am "$lib" + elif [[ $mingw || $msys ]]; then + echo "$lib" | sed 's|/|\\\\|g' + else + echo "$lib" + fi + return + fi + done +} + +SCALA_CLI_JAR=$(find_lib "*scala-cli*") + declare -a scala_args addScala () { diff --git a/dist/bin/scala b/dist/bin/scala index 7d813d265e73..59c5508f6ba4 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -54,7 +54,8 @@ done # exec here would prevent onExit from being called, leaving terminal in unusable state [ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 -eval "\"$SCALA_CLI_LAUNCHER\"" \ +eval "\"$JAVACMD\"" \ + "-jar \"$SCALA_CLI_JAR\"" \ "--cli-default-scala-version \"$SCALA_VERSION\"" \ "-r \"$MVN_REPOSITORY\"" \ "${scala_args[@]}" From 237a592b410bab7363f9f24485fe823d3dea2ef5 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 26 Apr 2024 17:26:38 +0200 Subject: [PATCH 378/465] refactor republishing to a plugin [Cherry-picked b1cd484d7f977d110f647ed162a389a473774a48] --- project/Build.scala | 120 ++--------------------------- project/RepublishPlugin.scala | 137 ++++++++++++++++++++++++++++++++++ 2 files changed, 144 insertions(+), 113 deletions(-) create mode 100644 project/RepublishPlugin.scala diff --git a/project/Build.scala b/project/Build.scala index 6ebfa04f974f..c3ee90fe722d 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -12,6 +12,8 @@ import pl.project13.scala.sbt.JmhPlugin import pl.project13.scala.sbt.JmhPlugin.JmhKeys.Jmh import sbt.Package.ManifestAttributes import sbt.PublishBinPlugin.autoImport._ +import dotty.tools.sbtplugin.RepublishPlugin +import dotty.tools.sbtplugin.RepublishPlugin.autoImport._ import sbt.plugins.SbtPlugin import sbt.ScriptedPlugin.autoImport._ import xerial.sbt.pack.PackPlugin @@ -2112,120 +2114,14 @@ object Build { ) ) - lazy val DistCacheConfig = config("DistCacheConfig") extend Compile - - val distModules = taskKey[Seq[(ModuleID, Map[Artifact, File])]]("fetch local artifacts for distribution.") - val distResolvedArtifacts = taskKey[Seq[ResolvedArtifacts]]("Resolve the dependencies for the distribution") - val distCaching = taskKey[File]("cache the dependencies for the distribution") - - def evalPublishSteps(dependencies: Seq[ProjectReference]): Def.Initialize[Task[Seq[(ModuleID, Map[Artifact, File])]]] = { - val publishAllLocalBin = dependencies.map({ d => ((d / publishLocalBin / packagedArtifacts)) }).join - val resolveId = dependencies.map({ d => ((d / projectID)) }).join - Def.task { - val s = streams.value - val log = s.log - val published = publishAllLocalBin.value - val ids = resolveId.value - - ids.zip(published) - } - } - - case class SimpleModuleId(org: String, name: String, revision: String) { - override def toString = s"$org:$name:$revision" - } - case class ResolvedArtifacts(id: SimpleModuleId, jar: File, pom: File) - - def commonDistSettings(dependencies: Seq[ClasspathDep[ProjectReference]]) = Seq( + lazy val commonDistSettings = Seq( packMain := Map(), publishArtifact := false, packGenerateMakefile := false, packArchiveName := "scala3-" + dottyVersion, - DistCacheConfig / distModules := { - evalPublishSteps(dependencies.map(_.project)).value - }, - DistCacheConfig / distResolvedArtifacts := { - val localArtifactIds = (DistCacheConfig / distModules).value - val report = (thisProjectRef / updateFull).value - - val found = mutable.Map.empty[SimpleModuleId, ResolvedArtifacts] - val evicted = mutable.Set.empty[SimpleModuleId] - - localArtifactIds.foreach({ case (id, as) => - val simpleId = { - val name0 = id.crossVersion match { - case _: CrossVersion.Binary => - // projectID does not add binary suffix - (id.name + "_3").ensuring(!id.name.endsWith("_3") && id.revision.startsWith("3.")) - case _ => id.name - } - SimpleModuleId(id.organization, name0, id.revision) - } - var jarOrNull: File = null - var pomOrNull: File = null - as.foreach({ case (a, f) => - if (a.`type` == "jar") { - jarOrNull = f - } else if (a.`type` == "pom") { - pomOrNull = f - } - }) - assert(jarOrNull != null, s"Could not find jar for ${id}") - assert(pomOrNull != null, s"Could not find pom for ${id}") - evicted += simpleId.copy(revision = simpleId.revision + "-nonbootstrapped") - found(simpleId) = ResolvedArtifacts(simpleId, jarOrNull, pomOrNull) - }) - - report.allModuleReports.foreach { mr => - val simpleId = { - val id = mr.module - SimpleModuleId(id.organization, id.name, id.revision) - } - - if (!found.contains(simpleId) && !evicted(simpleId)) { - var jarOrNull: File = null - var pomOrNull: File = null - mr.artifacts.foreach({ case (a, f) => - if (a.`type` == "jar" || a.`type` == "bundle") { - jarOrNull = f - } else if (a.`type` == "pom") { - pomOrNull = f - } - }) - assert(jarOrNull != null, s"Could not find jar for ${simpleId}") - if (pomOrNull == null) { - val jarPath = jarOrNull.toPath - // we found the jar, so assume we can resolve a sibling pom file - val pomPath = jarPath.resolveSibling(jarPath.getFileName.toString.stripSuffix(".jar") + ".pom") - assert(Files.exists(pomPath), s"Could not find pom for ${simpleId}") - pomOrNull = pomPath.toFile - } - found(simpleId) = ResolvedArtifacts(simpleId, jarOrNull, pomOrNull) - } - - } - found.values.toSeq - }, - DistCacheConfig / distCaching := { - val resolved = (DistCacheConfig / distResolvedArtifacts).value - val targetDir = target.value - val cacheDir = targetDir / "local-repo" - val mavenRepo = cacheDir / "maven2" - IO.createDirectory(mavenRepo) - resolved.foreach { ra => - val jar = ra.jar - val pom = ra.pom - - val pathElems = ra.id.org.split('.').toVector :+ ra.id.name :+ ra.id.revision - val artifactDir = pathElems.foldLeft(mavenRepo)(_ / _) - IO.createDirectory(artifactDir) - IO.copyFile(jar, artifactDir / jar.getName) - IO.copyFile(pom, artifactDir / pom.getName) - } - cacheDir - }, + republishRepo := target.value / "local-repo", Compile / pack := { - val localRepo = (DistCacheConfig / distCaching).value + val localRepo = republishClasspath.value // republish all artifacts to local repo (Compile / pack).value } ) @@ -2363,7 +2259,9 @@ object Build { def asDist(implicit mode: Mode): Project = project. enablePlugins(PackPlugin). + enablePlugins(RepublishPlugin). withCommonSettings. + settings(commonDistSettings). dependsOn( `scala3-interfaces`, dottyCompiler, @@ -2374,14 +2272,10 @@ object Build { scaladoc, `scala3-sbt-bridge`, // for scala-cli ). - withDepSettings(commonDistSettings). bootstrappedSettings( target := baseDirectory.value / "target" // override setting in commonBootstrappedSettings ) - def withDepSettings(f: Seq[ClasspathDep[ProjectReference]] => Seq[Setting[?]]): Project = - project.settings(f(project.dependencies)) - def withCommonSettings(implicit mode: Mode): Project = project.settings(mode match { case NonBootstrapped => commonNonBootstrappedSettings case Bootstrapped => commonBootstrappedSettings diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala new file mode 100644 index 000000000000..314c39be7e8a --- /dev/null +++ b/project/RepublishPlugin.scala @@ -0,0 +1,137 @@ +package dotty.tools.sbtplugin + +import sbt._ +import xerial.sbt.pack.PackPlugin +import sbt.Keys._ +import sbt.AutoPlugin +import sbt.PublishBinPlugin +import sbt.PublishBinPlugin.autoImport._ + +import scala.collection.mutable +import java.nio.file.Files + +/** This local plugin provides ways of publishing a project classpath and library dependencies to + * .a local repository */ +object RepublishPlugin extends AutoPlugin { + override def trigger = allRequirements + override def requires = super.requires && PublishBinPlugin && PackPlugin + + object autoImport { + val republishProjectRefs = taskKey[Seq[ProjectRef]]("fetch the classpath deps from the project.") + val republishLocalResolved = taskKey[Seq[ResolvedArtifacts]]("resolve local artifacts for distribution.") + val republishAllResolved = taskKey[Seq[ResolvedArtifacts]]("Resolve the dependencies for the distribution") + val republishClasspath = taskKey[File]("cache the dependencies for the distribution") + val republishRepo = settingKey[File]("the location to store the republished artifacts.") + } + + import autoImport._ + + case class SimpleModuleId(org: String, name: String, revision: String) { + override def toString = s"$org:$name:$revision" + } + case class ResolvedArtifacts(id: SimpleModuleId, jar: File, pom: File) + + override val projectSettings: Seq[Def.Setting[_]] = Def.settings( + republishLocalResolved / republishProjectRefs := { + val proj = thisProjectRef.value + val deps = buildDependencies.value + + deps.classpathRefs(proj) + }, + republishLocalResolved := Def.taskDyn { + val deps = (republishLocalResolved / republishProjectRefs).value + val publishAllLocalBin = deps.map({ d => ((d / publishLocalBin / packagedArtifacts)) }).join + val resolveId = deps.map({ d => ((d / projectID)) }).join + Def.task { + val s = streams.value + val log = s.log + val published = publishAllLocalBin.value + val ids = resolveId.value + + ids.zip(published).map({ case (id, as) => + val simpleId = { + val name0 = id.crossVersion match { + case _: CrossVersion.Binary => + // projectID does not add binary suffix + (id.name + "_3").ensuring(!id.name.endsWith("_3") && id.revision.startsWith("3.")) + case _ => id.name + } + SimpleModuleId(id.organization, name0, id.revision) + } + var jarOrNull: File = null + var pomOrNull: File = null + as.foreach({ case (a, f) => + if (a.`type` == "jar") { + jarOrNull = f + } else if (a.`type` == "pom") { + pomOrNull = f + } + }) + assert(jarOrNull != null, s"Could not find jar for ${id}") + assert(pomOrNull != null, s"Could not find pom for ${id}") + ResolvedArtifacts(simpleId, jarOrNull, pomOrNull) + }) + } + }.value, + republishAllResolved := { + val localResolved = republishLocalResolved.value + val report = (thisProjectRef / updateFull).value + + val found = mutable.Map.empty[SimpleModuleId, ResolvedArtifacts] + val evicted = mutable.Set.empty[SimpleModuleId] + + localResolved.foreach({ resolved => + val simpleId = resolved.id + evicted += simpleId.copy(revision = simpleId.revision + "-nonbootstrapped") + found(simpleId) = resolved + }) + + report.allModuleReports.foreach { mr => + val simpleId = { + val id = mr.module + SimpleModuleId(id.organization, id.name, id.revision) + } + + if (!found.contains(simpleId) && !evicted(simpleId)) { + var jarOrNull: File = null + var pomOrNull: File = null + mr.artifacts.foreach({ case (a, f) => + if (a.`type` == "jar" || a.`type` == "bundle") { + jarOrNull = f + } else if (a.`type` == "pom") { + pomOrNull = f + } + }) + assert(jarOrNull != null, s"Could not find jar for ${simpleId}") + if (pomOrNull == null) { + val jarPath = jarOrNull.toPath + // we found the jar, so assume we can resolve a sibling pom file + val pomPath = jarPath.resolveSibling(jarPath.getFileName.toString.stripSuffix(".jar") + ".pom") + assert(Files.exists(pomPath), s"Could not find pom for ${simpleId}") + pomOrNull = pomPath.toFile + } + found(simpleId) = ResolvedArtifacts(simpleId, jarOrNull, pomOrNull) + } + + } + found.values.toSeq + }, + republishClasspath := { + val resolved = republishAllResolved.value + val cacheDir = republishRepo.value + val mavenRepo = cacheDir / "maven2" + IO.createDirectory(mavenRepo) + resolved.foreach { ra => + val jar = ra.jar + val pom = ra.pom + + val pathElems = ra.id.org.split('.').toVector :+ ra.id.name :+ ra.id.revision + val artifactDir = pathElems.foldLeft(mavenRepo)(_ / _) + IO.createDirectory(artifactDir) + IO.copyFile(jar, artifactDir / jar.getName) + IO.copyFile(pom, artifactDir / pom.getName) + } + cacheDir + } + ) +} From 34e8fc5fee8347477443dbef87f999a20798c5e8 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 29 Apr 2024 19:00:32 +0200 Subject: [PATCH 379/465] download and cache launcher, add scalajs library [Cherry-picked 13f5f04720f863a0fbc19c61e83e9d88ffd0de97] --- dist/bin/cli-common | 2 +- dist/bin/scala | 2 +- project/Build.scala | 23 ++++++++---- project/Modes.scala | 6 +++- project/RepublishPlugin.scala | 68 +++++++++++++++++++++++++++++++---- 5 files changed, 86 insertions(+), 15 deletions(-) diff --git a/dist/bin/cli-common b/dist/bin/cli-common index 975df5abfee3..d295d58916da 100644 --- a/dist/bin/cli-common +++ b/dist/bin/cli-common @@ -151,7 +151,7 @@ find_lib () { done } -SCALA_CLI_JAR=$(find_lib "*scala-cli*") +SCALA_CLI_JAR="$PROG_HOME/etc/scala-cli.jar" declare -a scala_args diff --git a/dist/bin/scala b/dist/bin/scala index 59c5508f6ba4..4d357918ae07 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -44,7 +44,7 @@ if [ -z "$SCALA_VERSION" ]; then exit 1 fi -MVN_REPOSITORY="file://$PROG_HOME/local/maven2" +MVN_REPOSITORY="file://$PROG_HOME/maven2" # escape all script arguments while [[ $# -gt 0 ]]; do diff --git a/project/Build.scala b/project/Build.scala index c3ee90fe722d..cbc35c3f2f92 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -118,6 +118,9 @@ object Build { */ val mimaPreviousLTSDottyVersion = "3.3.0" + /** Version of Scala CLI to download */ + val scalaCliLauncherVersion = "1.3.0" + object CompatMode { final val BinaryCompatible = 0 final val SourceAndBinaryCompatible = 1 @@ -2119,17 +2122,21 @@ object Build { publishArtifact := false, packGenerateMakefile := false, packArchiveName := "scala3-" + dottyVersion, - republishRepo := target.value / "local-repo", - Compile / pack := { - val localRepo = republishClasspath.value // republish all artifacts to local repo - (Compile / pack).value - } + republishRepo := target.value / "republish", + republishLaunchers := { + val cliV = scalaCliLauncherVersion + Seq( + ("scala-cli.jar", cliV, url(s"https://github.com/VirtusLab/scala-cli/releases/download/v$cliV/scala-cli.jar")) + ) + }, + Compile / pack := (Compile / pack).dependsOn(republish).value, ) lazy val dist = project.asDist(Bootstrapped) .settings( packResourceDir += (baseDirectory.value / "bin" -> "bin"), - packResourceDir += (target.value / "local-repo" -> "local"), + packResourceDir += (republishRepo.value / "maven2" -> "maven2"), + packResourceDir += (republishRepo.value / "etc" -> "etc"), ) private def customMimaReportBinaryIssues(issueFilterLocation: String) = mimaReportBinaryIssues := { @@ -2260,6 +2267,7 @@ object Build { def asDist(implicit mode: Mode): Project = project. enablePlugins(PackPlugin). enablePlugins(RepublishPlugin). + bootstrappedEnablePlugins(DottyJSPlugin). withCommonSettings. settings(commonDistSettings). dependsOn( @@ -2272,6 +2280,9 @@ object Build { scaladoc, `scala3-sbt-bridge`, // for scala-cli ). + bootstrappedDependsOn( + `scala3-library-bootstrappedJS` // for scala-cli + ). bootstrappedSettings( target := baseDirectory.value / "target" // override setting in commonBootstrappedSettings ) diff --git a/project/Modes.scala b/project/Modes.scala index eddb5a3f1a7b..fcc13dea8a89 100644 --- a/project/Modes.scala +++ b/project/Modes.scala @@ -1,4 +1,4 @@ -import sbt.{Project, ProjectReference, SettingsDefinition} +import sbt.{Project, ProjectReference, SettingsDefinition, Plugins} object Modes { @@ -25,5 +25,9 @@ object Modes { def bootstrappedDependsOn(s: sbt.ClasspathDep[ProjectReference]*)(implicit mode: Mode): Project = if (mode == NonBootstrapped) project else project.dependsOn(s: _*) + /** Plugins only if the mode is bootstrapped */ + def bootstrappedEnablePlugins(ns: Plugins*)(implicit mode: Mode): Project = + if (mode == NonBootstrapped) project else project.enablePlugins(ns: _*) + } } diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 314c39be7e8a..0b71c9ecb6df 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -6,6 +6,8 @@ import sbt.Keys._ import sbt.AutoPlugin import sbt.PublishBinPlugin import sbt.PublishBinPlugin.autoImport._ +import sbt.io.Using +import sbt.util.CacheImplicits._ import scala.collection.mutable import java.nio.file.Files @@ -20,8 +22,11 @@ object RepublishPlugin extends AutoPlugin { val republishProjectRefs = taskKey[Seq[ProjectRef]]("fetch the classpath deps from the project.") val republishLocalResolved = taskKey[Seq[ResolvedArtifacts]]("resolve local artifacts for distribution.") val republishAllResolved = taskKey[Seq[ResolvedArtifacts]]("Resolve the dependencies for the distribution") - val republishClasspath = taskKey[File]("cache the dependencies for the distribution") + val republishClasspath = taskKey[Set[File]]("cache the dependencies for the distribution") + val republishFetchLaunchers = taskKey[Set[File]]("cache the launcher deps for the distribution") + val republish = taskKey[File]("cache the dependencies and download launchers for the distribution") val republishRepo = settingKey[File]("the location to store the republished artifacts.") + val republishLaunchers = settingKey[Seq[(String, String, URL)]]("launchers to download. Sequence of (name, version, URL).") } import autoImport._ @@ -43,17 +48,17 @@ object RepublishPlugin extends AutoPlugin { val publishAllLocalBin = deps.map({ d => ((d / publishLocalBin / packagedArtifacts)) }).join val resolveId = deps.map({ d => ((d / projectID)) }).join Def.task { - val s = streams.value - val log = s.log val published = publishAllLocalBin.value val ids = resolveId.value ids.zip(published).map({ case (id, as) => val simpleId = { + val disabled = CrossVersion.disabled val name0 = id.crossVersion match { - case _: CrossVersion.Binary => + case cv: CrossVersion.Binary => // projectID does not add binary suffix - (id.name + "_3").ensuring(!id.name.endsWith("_3") && id.revision.startsWith("3.")) + (s"${id.name}_${cv.prefix}${cv.suffix}3") + .ensuring(!id.name.endsWith("_3") && id.revision.startsWith("3.")) case _ => id.name } SimpleModuleId(id.organization, name0, id.revision) @@ -117,11 +122,15 @@ object RepublishPlugin extends AutoPlugin { found.values.toSeq }, republishClasspath := { + val s = streams.value val resolved = republishAllResolved.value val cacheDir = republishRepo.value + + val log = s.log val mavenRepo = cacheDir / "maven2" IO.createDirectory(mavenRepo) - resolved.foreach { ra => + resolved.map { ra => + log.info(s"[republish] publishing ${ra.id} to $mavenRepo...") val jar = ra.jar val pom = ra.pom @@ -130,7 +139,54 @@ object RepublishPlugin extends AutoPlugin { IO.createDirectory(artifactDir) IO.copyFile(jar, artifactDir / jar.getName) IO.copyFile(pom, artifactDir / pom.getName) + artifactDir + }.toSet + }, + republishFetchLaunchers := { + val s = streams.value + val log = s.log + val repoDir = republishRepo.value + val launcherVersions = republishLaunchers.value + + val etc = repoDir / "etc" + + val store = s.cacheStoreFactory / "versions" + + def work(dest: File, launcher: URL) = { + IO.delete(dest) + Using.urlInputStream(launcher) { in => + IO.createDirectory(etc) + log.info(s"[republish] Downloading $launcher to $dest...") + IO.transfer(in, dest) + log.info(s"[republish] Downloaded $launcher to $dest...") + } + dest + } + + val allLaunchers = { + for ((name, version, launcher) <- launcherVersions) yield { + val dest = etc / name + + val id = name.replaceAll("[^a-zA-Z0-9]", "_") + + val fetchAction = Tracked.inputChanged[String, File](store.make(id)) { (inChanged, version) => + if (inChanged || !Files.exists(dest.toPath)) { + work(dest, launcher) + } else { + log.info(s"[republish] Using cached $launcher at $dest...") + dest + } + } + + fetchAction(version) + } } + allLaunchers.toSet + }, + republish := { + val cacheDir = republishRepo.value + val artifacts = republishClasspath.value + val launchers = republishFetchLaunchers.value cacheDir } ) From 4682b52803313474ca1514336bb8a70043b9916b Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 30 Apr 2024 17:42:00 +0200 Subject: [PATCH 380/465] fix project/scripts/bootstrappedOnlyCmdTests for new scala launcher [Cherry-picked 008b5eedfecdbc0dd90761e49597d4d70172a528] --- project/scripts/bootstrappedOnlyCmdTests | 31 +++++++++++++----------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/project/scripts/bootstrappedOnlyCmdTests b/project/scripts/bootstrappedOnlyCmdTests index 4e18e3a1d4a4..74a0f5b59a8f 100755 --- a/project/scripts/bootstrappedOnlyCmdTests +++ b/project/scripts/bootstrappedOnlyCmdTests @@ -18,28 +18,37 @@ grep -qe "val a: scala.Int = 3" "$tmp" # setup for `scalac`/`scala` script tests "$SBT" dist/pack +echo "capturing scala version from dist/target/pack/VERSION" +IFS=':=' read -ra versionProps < "$ROOT/dist/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps +[ ${#versionProps[@]} -eq 3 ] && \ + [ ${versionProps[0]} = "version" ] && \ + [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $ROOT/dist/target/pack/VERSION" +scala_version=${versionProps[2]} + # check that `scalac` compiles and `scala` runs it echo "testing ./bin/scalac and ./bin/scala" clear_out "$OUT" ./bin/scalac "$SOURCE" -d "$OUT" -./bin/scala -classpath "$OUT" "$MAIN" > "$tmp" +./bin/scala -classpath "$OUT" -M "$MAIN" > "$tmp" test "$EXPECTED_OUTPUT" = "$(cat "$tmp")" # Test scaladoc based on compiled classes ./bin/scaladoc -project Staging -d "$OUT1" "$OUT" clear_out "$OUT1" -# check that `scalac` and `scala` works for staging +# check that `scalac` and `scala` works for staging. +# TODO: scala3-staging should be automatically added by Scala CLI +# - see: https://github.com/VirtusLab/scala-cli/issues/2879 clear_out "$OUT" ./bin/scalac tests/run-staging/i4044f.scala -d "$OUT" -./bin/scala -with-compiler -classpath "$OUT" Test > "$tmp" +./bin/scala -with-compiler -classpath "$OUT" --dep "org.scala-lang::scala3-staging:$scala_version" -M Test > "$tmp" # check that `scalac -from-tasty` compiles and `scala` runs it echo "testing ./bin/scalac -from-tasty and scala -classpath" clear_out "$OUT1" ./bin/scalac "$SOURCE" -d "$OUT" ./bin/scalac -from-tasty -d "$OUT1" "$OUT/$TASTY" -./bin/scala -classpath "$OUT1" "$MAIN" > "$tmp" +./bin/scala -classpath "$OUT1" -M "$MAIN" > "$tmp" test "$EXPECTED_OUTPUT" = "$(cat "$tmp")" # check that `sbt scalac -decompile` runs @@ -90,10 +99,12 @@ clear_out "$OUT" ./bin/scalac -help > "$tmp" 2>&1 grep -qe "Usage: scalac " "$tmp" +# TODO: JAVA launcher should be able to override "scala-cli" program name +# - see: https://github.com/VirtusLab/scala-cli/issues/2838#issuecomment-2085130815 ./bin/scala -help > "$tmp" 2>&1 -grep -qe "Usage: scala " "$tmp" +grep -qe "See 'scala-cli --help' to read about a specific subcommand." "$tmp" -./bin/scala -d hello.jar tests/run/hello.scala +./bin/scala -d hello.jar tests/run/hello.scala --server=false ls hello.jar echo "testing i12973" @@ -102,14 +113,6 @@ clear_out "$OUT" echo "Bug12973().check" | TERM=dumb ./bin/scala -cp "$OUT/out.jar" > "$tmp" 2>&1 grep -qe "Bug12973 is fixed" "$tmp" -echo "capturing scala version from dist/target/pack/VERSION" -cwd=$(pwd) -IFS=':=' read -ra versionProps < "$cwd/dist/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps -[ ${#versionProps[@]} -eq 3 ] && \ - [ ${versionProps[0]} = "version" ] && \ - [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $cwd/dist/target/pack/VERSION" -scala_version=${versionProps[2]} - echo "testing -sourcepath with incremental compile: inlining changed inline def into a def" # Here we will test that a changed inline method symbol loaded from the sourcepath (-sourcepath compiler option) # will have its `defTree` correctly set when its method body is required for inlining. From c6cc0a3d314007f54daa5b71c9f449c7af8d38b1 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 3 May 2024 20:10:00 +0200 Subject: [PATCH 381/465] fix bash script tests [Cherry-picked fbe8323fae1445429f15a3a5ea64b8c809fc28b5] --- bin/scala | 2 +- .../scripting/classpathReport.sc | 4 +- .../test-resources/scripting/envtestNu.sc | 2 + .../test-resources/scripting/scriptPathNu.sc | 13 ++++++ compiler/test-resources/scripting/showArgs.sc | 2 +- .../test-resources/scripting/showArgsNu.sc | 6 +++ .../test-resources/scripting/sqlDateError.sc | 2 +- .../scripting/sqlDateErrorNu.sc | 6 +++ .../scripting/unglobClasspath.sc | 8 ++-- .../tools/scripting/BashExitCodeTests.scala | 16 ++++--- .../tools/scripting/BashScriptsTests.scala | 42 ++++++++++++------- .../tools/scripting/ClasspathTests.scala | 16 ++++++- .../tools/scripting/ExpressionTest.scala | 2 +- .../dotty/tools/scripting/ScriptTestEnv.scala | 15 ++++++- .../tools/scripting/ScriptingTests.scala | 10 ++++- 15 files changed, 108 insertions(+), 38 deletions(-) create mode 100755 compiler/test-resources/scripting/envtestNu.sc create mode 100755 compiler/test-resources/scripting/scriptPathNu.sc create mode 100755 compiler/test-resources/scripting/showArgsNu.sc create mode 100755 compiler/test-resources/scripting/sqlDateErrorNu.sc diff --git a/bin/scala b/bin/scala index 66ec9a5774c7..6506e3b38ab1 100755 --- a/bin/scala +++ b/bin/scala @@ -2,4 +2,4 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scala" "$@" +"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scala" "$@" "--offline" "--server=false" diff --git a/compiler/test-resources/scripting/classpathReport.sc b/compiler/test-resources/scripting/classpathReport.sc index a9eacbbba1f7..cc68c4b1d52e 100755 --- a/compiler/test-resources/scripting/classpathReport.sc +++ b/compiler/test-resources/scripting/classpathReport.sc @@ -1,8 +1,8 @@ -#!bin/scala -classpath 'dist/target/pack/lib/*' +#!/usr/bin/env bin/scala import java.nio.file.Paths -def main(args: Array[String]): Unit = +// def main(args: Array[String]): Unit = // MIGRATION: Scala CLI expects `*.sc` files to be straight-line code val cwd = Paths.get(".").toAbsolutePath.normalize.toString.norm printf("cwd: %s\n", cwd) printf("classpath: %s\n", sys.props("java.class.path").norm) diff --git a/compiler/test-resources/scripting/envtestNu.sc b/compiler/test-resources/scripting/envtestNu.sc new file mode 100755 index 000000000000..fe4cd7851b0a --- /dev/null +++ b/compiler/test-resources/scripting/envtestNu.sc @@ -0,0 +1,2 @@ +// MIGRATION: Scala CLI expects `*.sc` files to be straight-line code + println("Hello " + util.Properties.propOrNull("key")) diff --git a/compiler/test-resources/scripting/scriptPathNu.sc b/compiler/test-resources/scripting/scriptPathNu.sc new file mode 100755 index 000000000000..bb3e459654b9 --- /dev/null +++ b/compiler/test-resources/scripting/scriptPathNu.sc @@ -0,0 +1,13 @@ +#!/usr/bin/env bin/scala + +// THIS FILE IS RAN WITH SCALA CLI, which wraps scripts exposing scriptPath and args variables + +args.zipWithIndex.foreach { case (arg,i) => printf("arg %d: [%s]\n",i,arg) } + +if !scriptPath.endsWith("scriptPathNu.sc") then + printf( s"incorrect script.path defined as [$scriptPath]") +else + printf("scriptPath: %s\n", scriptPath) // report the value + +extension(s: String) + def norm: String = s.replace('\\', '/') diff --git a/compiler/test-resources/scripting/showArgs.sc b/compiler/test-resources/scripting/showArgs.sc index 28f16a9022b3..8ef08f8962b0 100755 --- a/compiler/test-resources/scripting/showArgs.sc +++ b/compiler/test-resources/scripting/showArgs.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +#!/usr/bin/env bin/scala // precise output format expected by BashScriptsTests.scala def main(args: Array[String]): Unit = diff --git a/compiler/test-resources/scripting/showArgsNu.sc b/compiler/test-resources/scripting/showArgsNu.sc new file mode 100755 index 000000000000..f4c1aa6af257 --- /dev/null +++ b/compiler/test-resources/scripting/showArgsNu.sc @@ -0,0 +1,6 @@ +#!/usr/bin/env bin/scala + +// precise output format expected by BashScriptsTests.scala +// MIGRATION: Scala CLI expects `*.sc` files to be straight-line code +for (a,i) <- args.zipWithIndex do + printf(s"arg %2d:[%s]\n",i,a) diff --git a/compiler/test-resources/scripting/sqlDateError.sc b/compiler/test-resources/scripting/sqlDateError.sc index ceff98f40cad..35160fd6fcd5 100755 --- a/compiler/test-resources/scripting/sqlDateError.sc +++ b/compiler/test-resources/scripting/sqlDateError.sc @@ -1,4 +1,4 @@ -#!bin/scala +#!/usr/bin/env bin/scala def main(args: Array[String]): Unit = { println(new java.sql.Date(100L)) diff --git a/compiler/test-resources/scripting/sqlDateErrorNu.sc b/compiler/test-resources/scripting/sqlDateErrorNu.sc new file mode 100755 index 000000000000..a6f1bd50297d --- /dev/null +++ b/compiler/test-resources/scripting/sqlDateErrorNu.sc @@ -0,0 +1,6 @@ +#!/usr/bin/env bin/scala + +// def main(args: Array[String]): Unit = { MIGRATION: Scala CLI expects `*.sc` files to be straight-line code + println(new java.sql.Date(100L)) + System.err.println("SCALA_OPTS="+Option(System.getenv("SCALA_OPTS")).getOrElse("")) +// } diff --git a/compiler/test-resources/scripting/unglobClasspath.sc b/compiler/test-resources/scripting/unglobClasspath.sc index 796697cdedf2..deab2b8982ac 100755 --- a/compiler/test-resources/scripting/unglobClasspath.sc +++ b/compiler/test-resources/scripting/unglobClasspath.sc @@ -1,8 +1,6 @@ -#!bin/scala -classpath 'dist/target/pack/lib/*' +// won't compile unless classpath is set correctly +import dotty.tools.tasty.TastyFormat -// won't compile unless the hashbang line sets classpath -import org.jline.terminal.Terminal - -def main(args: Array[String]) = +// def main(args: Array[String]) = // MIGRATION: Scala CLI expects `*.sc` files to be straight-line code val cp = sys.props("java.class.path") printf("unglobbed classpath: %s\n", cp) diff --git a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala index cc53447cd64b..2fdc1eccaeb7 100644 --- a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala +++ b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala @@ -29,7 +29,7 @@ class BashExitCodeTests: }, expectedExitCode, exitCode) // Helpers for running scala, scalac, and scalac without the the output directory ("raw") - def scala(args: String*) = verifyExit(scalaPath, args*) + def scala(args: String*) = verifyExit(scalaPath, ("--offline" +: "--server=false" +: args)*) def scalacRaw(args: String*) = verifyExit(scalacPath, args*) def scalac(args: String*) = scalacRaw(("-d" +: tmpDir +: args)*) @@ -38,12 +38,16 @@ class BashExitCodeTests: Files.write(Files.createTempFile(tmpDir.toPath, getClass.getSimpleName, suffix), body.getBytes(UTF_8)).absPath @Test def neg = scalac(f("@main def Test = prin"))(1) - @Test def run = scalac(f("@main def Test = ???"))(0) & scala("-classpath", tmpDir, "Test")(1) - @Test def pos = scalac(f("@main def Test = ()"))(0) & scala("-classpath", tmpDir, "Test")(0) + @Test def run = scalac(f("@main def Test = ???"))(0) & scala("-classpath", tmpDir, "-M", "Test")(1) + @Test def pos = scalac(f("@main def Test = ()"))(0) & scala("-classpath", tmpDir, "-M", "Test")(0) - @Test def runNeg = scala(f("@main def Test = prin", ".sc"))(1) - @Test def runRun = scala(f("@main def Test = ???", ".sc"))(1) - @Test def runPos = scala(f("@main def Test = ()", ".sc"))(0) + @Test def runNeg_script = scala(f("prin", ".sc"))(1) + @Test def runRun_script = scala(f("???", ".sc"))(1) + @Test def runPos_script = scala(f("()", ".sc"))(0) + + @Test def runNeg = scala(f("@main def Test = prin", ".scala"))(1) + @Test def runRun = scala(f("@main def Test = ???", ".scala"))(1) + @Test def runPos = scala(f("@main def Test = ()", ".scala"))(0) @Test def scNeg = scalac("-script", f("@main def Test = prin", ".sc"))(1) @Test def scRun = scalac("-script", f("@main def Test = ???", ".sc"))(1) diff --git a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala index f3f364754e20..69fb861a0516 100644 --- a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala +++ b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala @@ -5,7 +5,7 @@ package scripting import scala.language.unsafeNulls import java.nio.file.Paths -import org.junit.{Test, AfterClass} +import org.junit.{Test, Ignore, AfterClass} import org.junit.Assert.assertEquals import org.junit.Assume.assumeFalse import org.junit.experimental.categories.Category @@ -50,7 +50,9 @@ object BashScriptsTests: val testScriptArgs = Seq( "a", "b", "c", "-repl", "-run", "-script", "-debug" ) - val showArgsScript = testFiles.find(_.getName == "showArgs.sc").get.absPath + val Seq(showArgsScript, showArgsScalaCli) = Seq("showArgs.sc", "showArgsNu.sc").map { name => + testFiles.find(_.getName == name).get.absPath + } def testFile(name: String): String = val file = testFiles.find(_.getName == name) match { @@ -64,13 +66,13 @@ object BashScriptsTests: } file - val Seq(envtestSc, envtestScala) = Seq("envtest.sc", "envtest.scala").map { testFile(_) } + val Seq(envtestNuSc, envtestScala) = Seq("envtestNu.sc", "envtest.scala").map { testFile(_) } // create command line with given options, execute specified script, return stdout def callScript(tag: String, script: String, keyPre: String): String = val keyArg = s"$keyPre=$tag" printf("pass tag [%s] via [%s] to script [%s]\n", tag, keyArg, script) - val cmd: String = Seq("SCALA_OPTS= ", scalaPath, keyArg, script).mkString(" ") + val cmd: String = Seq("SCALA_OPTS= ", scalaPath, "run", keyArg, "--offline", "--server=false", script).mkString(" ") printf("cmd: [%s]\n", cmd) val (validTest, exitCode, stdout, stderr) = bashCommand(cmd) stderr.filter { !_.contains("Inappropriate ioctl") }.foreach { System.err.printf("stderr [%s]\n", _) } @@ -84,13 +86,15 @@ class BashScriptsTests: ////////////////////////// begin tests ////////////////////// /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.sc */ + @Ignore // SCALA CLI does not support `-J` to pass java properties, only things like -Xmx5g @Test def verifyScJProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World1" - val stdout = callScript(tag, envtestSc, s"-J-Dkey") + val stdout = callScript(tag, envtestNuSc, s"-J-Dkey") assertEquals( s"Hello $tag", stdout) /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.scala */ + @Ignore // SCALA CLI does not support `-J` to pass java properties, only things like -Xmx5g @Test def verifyScalaJProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World2" @@ -101,7 +105,7 @@ class BashScriptsTests: @Test def verifyScDProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World3" - val stdout = callScript(tag, envtestSc, s"-Dkey") + val stdout = callScript(tag, envtestNuSc, s"-Dkey") assertEquals(s"Hello $tag", stdout) /* verify that `dist/bin/scala` can set system properties via -D for envtest.scala */ @@ -114,7 +118,9 @@ class BashScriptsTests: /* verify that `dist/bin/scala` can set system properties via -D when executing compiled script via -jar envtest.jar */ @Test def saveAndRunWithDProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val commandline = Seq("SCALA_OPTS= ", scalaPath.relpath, "-save", envtestScala.relpath).mkString(" ") + val libOut = envtestScala.relpath.stripSuffix(".scala") + ".jar" + val commandline = Seq( + "SCALA_OPTS= ", scalaPath.relpath, "--power", "package", envtestScala.relpath, "-o", libOut, "--library", "--offline", "--server=false").mkString(" ") val (_, _, _, _) = bashCommand(commandline) // compile jar, discard output val testJar = testFile("envtest.jar") // jar is created by the previous bashCommand() if (testJar.isFile){ @@ -124,7 +130,8 @@ class BashScriptsTests: } val tag = "World5" - val commandline2 = Seq("SCALA_OPTS= ", scalaPath.relpath, s"-Dkey=$tag", testJar.relpath) + val commandline2 = Seq( + "SCALA_OPTS= ", scalaPath.relpath, "run", s"-Dkey=$tag", "-classpath", testJar.relpath, "--offline", "--server=false") printf("cmd[%s]\n", commandline2.mkString(" ")) val (validTest, exitCode, stdout, stderr) = bashCommand(commandline2.mkString(" ")) assertEquals(s"Hello $tag", stdout.mkString("/n")) @@ -148,7 +155,11 @@ class BashScriptsTests: /* verify `dist/bin/scala` non-interference with command line args following script name */ @Test def verifyScalaArgs = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val commandline = (Seq("SCALA_OPTS= ", scalaPath, showArgsScript) ++ testScriptArgs).mkString(" ") + val commandline = ( + Seq("SCALA_OPTS= ", scalaPath, showArgsScalaCli) + ++ Seq("--offline", "--server=false") + ++ ("--" +: testScriptArgs) + ).mkString(" ") val (validTest, exitCode, stdout, stderr) = bashCommand(commandline) if verifyValid(validTest) then var fail = false @@ -162,13 +173,13 @@ class BashScriptsTests: assert(stdout == expectedOutput) /* - * verify that scriptPath.sc sees a valid script.path property, - * and that it's value is the path to "scriptPath.sc". + * verify that scriptPathNu.sc sees a valid script.path property, + * and that it's value is the path to "scriptPathNu.sc". */ @Category(Array(classOf[BootstrappedOnlyTests])) @Test def verifyScriptPathProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptFile = testFiles.find(_.getName == "scriptPath.sc").get + val scriptFile = testFiles.find(_.getName == "scriptPathNu.sc").get val expected = s"${scriptFile.getName}" printf("===> verify valid system property script.path is reported by script [%s]\n", scriptFile.getName) printf("calling scriptFile: %s\n", scriptFile) @@ -177,8 +188,8 @@ class BashScriptsTests: stdout.foreach { printf("stdout: [%s]\n", _) } stderr.foreach { printf("stderr: [%s]\n", _) } val valid = stdout.exists { _.endsWith(expected) } - if valid then printf("# valid script.path reported by [%s]\n", scriptFile.getName) - assert(valid, s"script ${scriptFile.absPath} did not report valid script.path value") + if valid then printf("# valid scriptPath reported by [%s]\n", scriptFile.getName) + assert(valid, s"script ${scriptFile.absPath} did not report valid scriptPath value") /* * verify SCALA_OPTS can specify an @argsfile when launching a scala script in `dist/bin/scala`. @@ -208,7 +219,7 @@ class BashScriptsTests: */ @Test def sqlDateTest = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptBase = "sqlDateError" + val scriptBase = "sqlDateErrorNu" val scriptFile = testFiles.find(_.getName == s"$scriptBase.sc").get val testJar = testFile(s"$scriptBase.jar") // jar should not be created when scriptFile runs val tj = Paths.get(testJar).toFile @@ -236,7 +247,6 @@ class BashScriptsTests: printf("===> verify -e is properly handled by `dist/bin/scala`\n") val expected = "9" val expression = s"println(3*3)" - val cmd = s"bin/scala -e $expression" val (validTest, exitCode, stdout, stderr) = bashCommand(s"""bin/scala -e '$expression'""") val result = stdout.filter(_.nonEmpty).mkString("") printf("stdout: %s\n", result) diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index 4fd1211698f6..40c16b7e962d 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -51,7 +51,7 @@ class ClasspathTests: // convert scriptCp to a list of files val hashbangJars: List[File] = scriptCp.split(psep).map { _.toFile }.toList val hashbangClasspathJars = hashbangJars.map { _.name }.sorted.distinct // get jar basenames, remove duplicates - val packlibDir = s"$scriptCwd/$packLibDir" // classpathReport.sc specifies a wildcard classpath in this directory + val packlibDir: String = ??? /* ??? was s"$scriptCwd/$packLibDir" */ // classpathReport.sc specifies a wildcard classpath in this directory val packlibJars: List[File] = listJars(packlibDir) // classpath entries expected to have been reported by the script printf("%d jar files in dist/target/pack/lib\n", packlibJars.size) @@ -84,11 +84,23 @@ class ClasspathTests: case Some(file) => file val relpath = testScript.toPath.relpath.norm + val scalaCommand = scalaPath.relpath.norm printf("===> unglobClasspathVerifyTest for script [%s]\n", relpath) printf("bash is [%s]\n", bashExe) if packBinScalaExists then - val bashCmdline = s"set +x ; SCALA_OPTS= $relpath" + val sv = packScalaVersion + val tastyDirGlob = s"$packMavenDir/org/scala-lang/tasty-core_3/$sv/*" + // ^^^^^^^^^^^^^ + // the classpath is a glob pattern that should be unglobbed by scala command, + // otherwise the script could not compile because it references a class + // from tasty-core + + val bashCmdline = Seq( + "set +x ;", + "SCALA_OPTS=", + scalaCommand, "run", "--classpath", s"'$tastyDirGlob'", "--offline", "--server=false", relpath + ).mkString(" ") val cmd = Array(bashExe, "-c", bashCmdline) cmd.foreach { printf("[%s]\n", _) } diff --git a/compiler/test/dotty/tools/scripting/ExpressionTest.scala b/compiler/test/dotty/tools/scripting/ExpressionTest.scala index 6b5248e67f08..1430ab38ebec 100755 --- a/compiler/test/dotty/tools/scripting/ExpressionTest.scala +++ b/compiler/test/dotty/tools/scripting/ExpressionTest.scala @@ -44,7 +44,7 @@ class ExpressionTest: assert(success) def getResult(expression: String): String = - val (_, _, stdout, stderr) = bashCommand(s"$scalaPath -e '$expression'") + val (_, _, stdout, stderr) = bashCommand(s"$scalaPath -e '$expression' --offline --server=false") printf("stdout: %s\n", stdout.mkString("|")) printf("stderr: %s\n", stderr.mkString("\n", "\n", "")) stdout.filter(_.nonEmpty).mkString("") diff --git a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala index 1db92d5415b4..a52014f14704 100644 --- a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala +++ b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala @@ -125,9 +125,22 @@ object ScriptTestEnv { def packBinDir = "dist/target/pack/bin" - def packLibDir = "dist/target/pack/lib" + // def packLibDir = "dist/target/pack/lib" // replaced by packMavenDir + def packMavenDir = "dist/target/pack/maven2" + def packVersionFile = "dist/target/pack/VERSION" def packBinScalaExists: Boolean = Files.exists(Paths.get(s"$packBinDir/scala")) + def packScalaVersion: String = { + val versionFile = Paths.get(packVersionFile) + if Files.exists(versionFile) then + val lines = Files.readAllLines(versionFile).asScala + lines.find { _.startsWith("version:=") } match + case Some(line) => line.drop(9) + case None => sys.error(s"no version:= found in $packVersionFile") + else + sys.error(s"no $packVersionFile found") + } + def listJars(dir: String): List[File] = val packlibDir = Paths.get(dir).toFile if packlibDir.isDirectory then diff --git a/compiler/test/dotty/tools/scripting/ScriptingTests.scala b/compiler/test/dotty/tools/scripting/ScriptingTests.scala index 5ec417090504..713695b62f4a 100644 --- a/compiler/test/dotty/tools/scripting/ScriptingTests.scala +++ b/compiler/test/dotty/tools/scripting/ScriptingTests.scala @@ -47,7 +47,10 @@ class ScriptingTests: */ @Test def scriptingMainTests = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") do + for + (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") + if !scriptFile.getName().endsWith("Nu.sc") + do showScriptUnderTest(scriptFile) val unexpectedJar = script2jar(scriptFile) unexpectedJar.delete @@ -66,7 +69,10 @@ class ScriptingTests: */ @Test def scriptingJarTest = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") do + for + (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") + if !scriptFile.getName().endsWith("Nu.sc") + do showScriptUnderTest(scriptFile) val expectedJar = script2jar(scriptFile) expectedJar.delete From eee90b41180882d0b49c66436935cc224f20fed5 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 6 May 2024 15:36:35 +0200 Subject: [PATCH 382/465] escape % in java executable path in batch commands. [Cherry-picked 26f918d4cdcb8e402983e7f1865c0fd0757ab7db] --- dist/bin/scala.bat | 3 +++ dist/bin/scalac.bat | 3 +++ dist/bin/scaladoc.bat | 4 ++++ 3 files changed, 10 insertions(+) diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index ca908fd340be..6c48794ddd40 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -21,6 +21,9 @@ call :args %* call :compilerJavaClasspathArgs +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" + call "%_JAVACMD%" %_JAVA_ARGS% "-Dscala.home=%_PROG_HOME%" -classpath "%_JVM_CP_ARGS%" dotty.tools.MainGenericRunner -classpath "%_JVM_CP_ARGS%" %_SCALA_ARGS% if not %ERRORLEVEL%==0 ( set _EXITCODE=1& goto end ) diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index cb1a76471f70..c8cd0babe60b 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -21,6 +21,9 @@ call :args %* call :compilerJavaClasspathArgs +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" + call "%_JAVACMD%" %_JAVA_ARGS% -classpath "%_JVM_CP_ARGS%" "-Dscala.usejavacp=true" "-Dscala.home=%_PROG_HOME%" dotty.tools.MainGenericCompiler %_SCALA_ARGS% if not %ERRORLEVEL%==0 ( set _EXITCODE=1 diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index bcc0d71788a3..c30a4689244c 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -26,6 +26,10 @@ call :classpathArgs if defined JAVA_OPTS ( set _JAVA_OPTS=%JAVA_OPTS% ) else ( set _JAVA_OPTS=%_DEFAULT_JAVA_OPTS% ) + +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" + call "%_JAVACMD%" %_JAVA_OPTS% %_JAVA_DEBUG% %_JAVA_ARGS% ^ -classpath "%_CLASS_PATH%" ^ -Dscala.usejavacp=true ^ From eb3083b3dcc0ee6fc4bbb4bc3f6ce1fd5e104b8d Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 6 May 2024 18:30:28 +0200 Subject: [PATCH 383/465] Add a warning message when launching from scala. [Cherry-picked 035c1d551c1be93bcba09103b32c5f76f413208f] --- compiler/src/dotty/tools/MainGenericRunner.scala | 16 ++++++++++++++++ .../tools/coursier/CoursierScalaTests.scala | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala index 1540cc86d7a6..5b238693a135 100644 --- a/compiler/src/dotty/tools/MainGenericRunner.scala +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -266,6 +266,22 @@ object MainGenericRunner { run(settings.withExecuteMode(ExecuteMode.Run)) else run(settings.withExecuteMode(ExecuteMode.Repl)) + end run + + val ranByCoursierBootstrap = + sys.props.isDefinedAt("coursier.mainJar") + || sys.props.get("bootstrap.mainClass").filter(_ == "dotty.tools.MainGenericRunner").isDefined + + val silenced = sys.props.get("scala.use_legacy_launcher") == Some("true") + + if !silenced then + Console.err.println(s"[warning] MainGenericRunner class is deprecated since Scala 3.5.0, and Scala CLI features will not work.") + Console.err.println(s"[warning] Please be sure to update to the Scala CLI launcher to use the new features.") + if ranByCoursierBootstrap then + Console.err.println(s"[warning] It appears that your Coursier-based Scala installation is misconfigured.") + Console.err.println(s"[warning] To update to the new Scala CLI runner, please update (coursier, cs) commands first before re-installing scala.") + Console.err.println(s"[warning] Check the Scala 3.5.0 release notes to troubleshoot your installation.") + run(settings) match case Some(ex: (StringDriverException | ScriptingException)) => errorFn(ex.getMessage) diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index b8dfa833c437..115803d79dc1 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -166,7 +166,7 @@ object CoursierScalaTests: case Nil => args case _ => "--" +: args val newJOpts = jOpts.map(s => s"--java-opt ${s.stripPrefix("-J")}").mkString(" ") - execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true"""" +: newOptions)*)._2 + execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true" --property "scala.use_legacy_launcher=true"""" +: newOptions)*)._2 /** Get coursier script */ @BeforeClass def setup(): Unit = From 8daca0cce4e98cd600bb659e678261cd5bd4a010 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 6 May 2024 22:25:05 +0200 Subject: [PATCH 384/465] Windows - extract scala version from VERSION file [Cherry-picked 673ae702cc5ce05e449823798457c4e843fff88f] --- dist/bin/scala.bat | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index 6c48794ddd40..ad622c87d1ed 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -21,10 +21,12 @@ call :args %* call :compilerJavaClasspathArgs +call :setScalaVersion + @rem we need to escape % in the java command path, for some reason this doesnt work in common.bat set "_JAVACMD=!_JAVACMD:%%=%%%%!" -call "%_JAVACMD%" %_JAVA_ARGS% "-Dscala.home=%_PROG_HOME%" -classpath "%_JVM_CP_ARGS%" dotty.tools.MainGenericRunner -classpath "%_JVM_CP_ARGS%" %_SCALA_ARGS% +call "%_JAVACMD%" %_JAVA_ARGS% "-Dscala.releaseversion=%_SCALA_VERSION%" "-Dscala.home=%_PROG_HOME%" -classpath "%_JVM_CP_ARGS%" dotty.tools.MainGenericRunner -classpath "%_JVM_CP_ARGS%" %_SCALA_ARGS% if not %ERRORLEVEL%==0 ( set _EXITCODE=1& goto end ) goto end @@ -36,6 +38,7 @@ goto end set _JAVA_ARGS= set _SCALA_ARGS= set _SCALA_CPATH= +set "_SCALA_VERSION=" :args_loop if "%~1"=="" goto args_done @@ -90,6 +93,20 @@ if defined _SCALA_CPATH ( ) goto :eof +:setScalaVersion + +@rem read for version:=_SCALA_VERSION in VERSION_FILE +FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\VERSION") DO ( + SET "line=%%G" + IF "!line:~0,9!"=="version:=" ( + SET "_SCALA_VERSION=!line:~9!" + GOTO :foundVersion + ) +) + +:foundVersion +goto :eof + @rem ######################################################################### @rem ## Cleanups From b3d9aeee42bd58800dbb9dd72f2bedbc9339d46c Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 6 May 2024 23:42:34 +0200 Subject: [PATCH 385/465] Windows - forward to scala-cli jar launcher [Cherry-picked ebbe3948da9077a48afaef4cdbd3f47f2a6b29a8] --- dist/bin/scala.bat | 71 ++++------------------------------------------ 1 file changed, 6 insertions(+), 65 deletions(-) diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index ad622c87d1ed..76617fb6057e 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -14,19 +14,15 @@ for %%f in ("%~dp0.") do ( call "%_PROG_HOME%\bin\common.bat" if not %_EXITCODE%==0 goto end -call :args %* - @rem ######################################################################### @rem ## Main -call :compilerJavaClasspathArgs - -call :setScalaVersion +call :setScalaOpts @rem we need to escape % in the java command path, for some reason this doesnt work in common.bat set "_JAVACMD=!_JAVACMD:%%=%%%%!" -call "%_JAVACMD%" %_JAVA_ARGS% "-Dscala.releaseversion=%_SCALA_VERSION%" "-Dscala.home=%_PROG_HOME%" -classpath "%_JVM_CP_ARGS%" dotty.tools.MainGenericRunner -classpath "%_JVM_CP_ARGS%" %_SCALA_ARGS% +call "%_JAVACMD%" "-jar" "%SCALA_CLI_JAR%" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* if not %ERRORLEVEL%==0 ( set _EXITCODE=1& goto end ) goto end @@ -34,66 +30,11 @@ goto end @rem ######################################################################### @rem ## Subroutines -:args -set _JAVA_ARGS= -set _SCALA_ARGS= -set _SCALA_CPATH= -set "_SCALA_VERSION=" - -:args_loop -if "%~1"=="" goto args_done -set "__ARG=%~1" -if "%__ARG:~0,2%"=="-D" ( - @rem pass to scala as well: otherwise we lose it sometimes when we - @rem need it, e.g. communicating with a server compiler. - set _JAVA_ARGS=!_JAVA_ARGS! "%__ARG%" - set _SCALA_ARGS=!_SCALA_ARGS! "%__ARG%" -) else if "%__ARG:~0,2%"=="-J" ( - @rem as with -D, pass to scala even though it will almost - @rem never be used. - set _JAVA_ARGS=!_JAVA_ARGS! %__ARG:~2% - set _SCALA_ARGS=!_SCALA_ARGS! "%__ARG%" -) else if "%__ARG%"=="-classpath" ( - set "_SCALA_CPATH=%~2" - shift -) else if "%__ARG%"=="-cp" ( - set "_SCALA_CPATH=%~2" - shift -) else ( - set _SCALA_ARGS=!_SCALA_ARGS! "%__ARG%" -) -shift -goto args_loop -:args_done -goto :eof - -@rem output parameter: _JVM_CP_ARGS -:compilerJavaClasspathArgs -set __TOOLCHAIN= -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_ASM%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SBT_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_COMP%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_TASTY_CORE%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_STAGING%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_TASTY_INSPECTOR%%_PSEP%" - -@rem # jline -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_READER%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL_JNA%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JNA%%_PSEP%" - -if defined _SCALA_CPATH ( - set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" -) else ( - set "_JVM_CP_ARGS=%__TOOLCHAIN%" -) -goto :eof +:setScalaOpts -:setScalaVersion +set "_SCALA_VERSION=" +set "MVN_REPOSITORY=file://%_PROG_HOME:\=/%/maven2" +set "SCALA_CLI_JAR=%_PROG_HOME%\etc\scala-cli.jar" @rem read for version:=_SCALA_VERSION in VERSION_FILE FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\VERSION") DO ( From 230a0785a9498096bb6cf9e265ecd32820c1fd3a Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 7 May 2024 00:06:32 +0200 Subject: [PATCH 386/465] properly convert path of repo to uri [Cherry-picked f31d7500bed7e4cbf869cc1e68556443b2836589] --- dist/bin/scala.bat | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index 76617fb6057e..c6a515ba617a 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -32,8 +32,28 @@ goto end :setScalaOpts +@REM sfind the index of the first colon in _PROG_HOME +set "index=0" +set "char=!_PROG_HOME:~%index%,1!" +:findColon +if not "%char%"==":" ( + set /a "index+=1" + set "char=!_PROG_HOME:~%index%,1!" + goto :findColon +) + +@REM set _PROG_HOME to the substring from the first colon to the end +set "_PROG_HOME_SUB=!_PROG_HOME:~%index%!" +@REM strip initial character +set "_PROG_HOME_SUB=!_PROG_HOME_SUB:~1!" + +@REM set drive to substring from 0 to the first colon +set "_PROG_HOME_DRIVE=!_PROG_HOME:~0,%index%!" + + + set "_SCALA_VERSION=" -set "MVN_REPOSITORY=file://%_PROG_HOME:\=/%/maven2" +set "MVN_REPOSITORY=file://%_PROG_HOME_DRIVE%\%_PROG_HOME_SUB:\=/%/maven2" set "SCALA_CLI_JAR=%_PROG_HOME%\etc\scala-cli.jar" @rem read for version:=_SCALA_VERSION in VERSION_FILE From 20009dbdcbbe22ef2026f6bca3e35c9bdee818eb Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 7 May 2024 11:16:17 +0200 Subject: [PATCH 387/465] fix windows command tests [Cherry-picked b53d7b27ffdf209fa8fade0c252d9192bd011a63] --- project/scripts/winCmdTests | 4 ++-- project/scripts/winCmdTests.bat | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/project/scripts/winCmdTests b/project/scripts/winCmdTests index d287b60992b2..2dffff5b196a 100644 --- a/project/scripts/winCmdTests +++ b/project/scripts/winCmdTests @@ -5,6 +5,6 @@ PREFIX="dist/target/pack" SOURCE="tests/pos/HelloWorld.scala" $PREFIX/bin/scalac @project/scripts/options "$SOURCE" $PREFIX/bin/scalac -d out "$SOURCE" -$PREFIX/bin/scala -classpath out HelloWorld -$PREFIX/bin/scala -classpath out -J-Xmx512m HelloWorld +$PREFIX/bin/scala --power -classpath out -M HelloWorld --offline '--server=false' +$PREFIX/bin/scala --power -classpath out -J -Xmx512m -M HelloWorld --offline '--server=false' mkdir -p _site && $PREFIX/bin/scaladoc -d _site -project Hello "$SOURCE" diff --git a/project/scripts/winCmdTests.bat b/project/scripts/winCmdTests.bat index ee9b8237c694..d9b594d560ab 100644 --- a/project/scripts/winCmdTests.bat +++ b/project/scripts/winCmdTests.bat @@ -14,10 +14,10 @@ if not %ERRORLEVEL%==0 endlocal& exit /b 1 call "%_PREFIX%\bin\scalac.bat" -d "%_OUT_DIR%" "%_SOURCE%" if not %ERRORLEVEL%==0 endlocal& exit /b 1 -call "%_PREFIX%\bin\scala.bat" -classpath "%_OUT_DIR%" HelloWorld +call "%_PREFIX%\bin\scala.bat" --power -classpath "%_OUT_DIR%" -M HelloWorld --offline --server=false if not %ERRORLEVEL%==0 endlocal& exit /b 1 -call "%_PREFIX%\bin\scala.bat" -classpath "%_OUT_DIR%" -J-Xmx512m HelloWorld +call "%_PREFIX%\bin\scala.bat" --power -classpath "%_OUT_DIR%" -J -Xmx512m -M HelloWorld --offline --server=false if not %ERRORLEVEL%==0 endlocal& exit /b 1 if not exist "%_SITE_DIR%" mkdir "%_SITE_DIR%" From cfbee3836036cd9308fe6208e46bfcfd66d75fc2 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 7 May 2024 11:59:15 +0200 Subject: [PATCH 388/465] adjust to new launcher scala cli 1.3.1 [Cherry-picked 040793ca11f860e6c4881fef0a34a09bba6e37e0] --- bin/scala | 2 +- bin/test/TestScripts.scala | 2 +- .../dotty/tools/scripting/BashExitCodeTests.scala | 2 +- .../test/dotty/tools/scripting/BashScriptsTests.scala | 6 +++--- .../test/dotty/tools/scripting/ClasspathTests.scala | 2 +- .../test/dotty/tools/scripting/ExpressionTest.scala | 2 +- dist/bin/scala | 1 + dist/bin/scala.bat | 2 +- project/Build.scala | 2 +- project/scripts/bootstrappedOnlyCmdTests | 11 +++-------- 10 files changed, 14 insertions(+), 18 deletions(-) diff --git a/bin/scala b/bin/scala index 6506e3b38ab1..85c1ac91d08f 100755 --- a/bin/scala +++ b/bin/scala @@ -2,4 +2,4 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scala" "$@" "--offline" "--server=false" +"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scala" "--power" "$@" "--offline" "--server=false" diff --git a/bin/test/TestScripts.scala b/bin/test/TestScripts.scala index bada140580fc..4a2fd9a05c83 100644 --- a/bin/test/TestScripts.scala +++ b/bin/test/TestScripts.scala @@ -57,7 +57,7 @@ class TestScripts { s"bin/scalac script did not run properly. Output:$lineSep$dotcOutput" ) - val (retDotr, dotrOutput) = executeScript("./bin/scala HelloWorld") + val (retDotr, dotrOutput) = executeScript("./bin/scala -M HelloWorld") assert( retDotr == 0 && dotrOutput == "hello world\n", s"Running hello world exited with status: $retDotr and output: $dotrOutput" diff --git a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala index 2fdc1eccaeb7..90a8d80330b4 100644 --- a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala +++ b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala @@ -29,7 +29,7 @@ class BashExitCodeTests: }, expectedExitCode, exitCode) // Helpers for running scala, scalac, and scalac without the the output directory ("raw") - def scala(args: String*) = verifyExit(scalaPath, ("--offline" +: "--server=false" +: args)*) + def scala(args: String*) = verifyExit(scalaPath, ("--power" +: "--offline" +: "--server=false" +: args)*) def scalacRaw(args: String*) = verifyExit(scalacPath, args*) def scalac(args: String*) = scalacRaw(("-d" +: tmpDir +: args)*) diff --git a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala index 69fb861a0516..25bc54e2dcbe 100644 --- a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala +++ b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala @@ -72,7 +72,7 @@ object BashScriptsTests: def callScript(tag: String, script: String, keyPre: String): String = val keyArg = s"$keyPre=$tag" printf("pass tag [%s] via [%s] to script [%s]\n", tag, keyArg, script) - val cmd: String = Seq("SCALA_OPTS= ", scalaPath, "run", keyArg, "--offline", "--server=false", script).mkString(" ") + val cmd: String = Seq("SCALA_OPTS= ", scalaPath, "run", keyArg, "--power", "--offline", "--server=false", script).mkString(" ") printf("cmd: [%s]\n", cmd) val (validTest, exitCode, stdout, stderr) = bashCommand(cmd) stderr.filter { !_.contains("Inappropriate ioctl") }.foreach { System.err.printf("stderr [%s]\n", _) } @@ -131,7 +131,7 @@ class BashScriptsTests: val tag = "World5" val commandline2 = Seq( - "SCALA_OPTS= ", scalaPath.relpath, "run", s"-Dkey=$tag", "-classpath", testJar.relpath, "--offline", "--server=false") + "SCALA_OPTS= ", scalaPath.relpath, "run", s"-Dkey=$tag", "-classpath", testJar.relpath, "--power", "--offline", "--server=false") printf("cmd[%s]\n", commandline2.mkString(" ")) val (validTest, exitCode, stdout, stderr) = bashCommand(commandline2.mkString(" ")) assertEquals(s"Hello $tag", stdout.mkString("/n")) @@ -157,7 +157,7 @@ class BashScriptsTests: assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val commandline = ( Seq("SCALA_OPTS= ", scalaPath, showArgsScalaCli) - ++ Seq("--offline", "--server=false") + ++ Seq("--power", "--offline", "--server=false") ++ ("--" +: testScriptArgs) ).mkString(" ") val (validTest, exitCode, stdout, stderr) = bashCommand(commandline) diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index 40c16b7e962d..5107af5eee43 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -99,7 +99,7 @@ class ClasspathTests: val bashCmdline = Seq( "set +x ;", "SCALA_OPTS=", - scalaCommand, "run", "--classpath", s"'$tastyDirGlob'", "--offline", "--server=false", relpath + scalaCommand, "run", "--classpath", s"'$tastyDirGlob'", "--power", "--offline", "--server=false", relpath ).mkString(" ") val cmd = Array(bashExe, "-c", bashCmdline) diff --git a/compiler/test/dotty/tools/scripting/ExpressionTest.scala b/compiler/test/dotty/tools/scripting/ExpressionTest.scala index 1430ab38ebec..02963f50ee52 100755 --- a/compiler/test/dotty/tools/scripting/ExpressionTest.scala +++ b/compiler/test/dotty/tools/scripting/ExpressionTest.scala @@ -44,7 +44,7 @@ class ExpressionTest: assert(success) def getResult(expression: String): String = - val (_, _, stdout, stderr) = bashCommand(s"$scalaPath -e '$expression' --offline --server=false") + val (_, _, stdout, stderr) = bashCommand(s"$scalaPath -e '$expression' --power --offline --server=false") printf("stdout: %s\n", stdout.mkString("|")) printf("stderr: %s\n", stderr.mkString("\n", "\n", "")) stdout.filter(_.nonEmpty).mkString("") diff --git a/dist/bin/scala b/dist/bin/scala index 4d357918ae07..3040c5a9a0f3 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -56,6 +56,7 @@ done [ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 eval "\"$JAVACMD\"" \ "-jar \"$SCALA_CLI_JAR\"" \ + "--prog-name scala" \ "--cli-default-scala-version \"$SCALA_VERSION\"" \ "-r \"$MVN_REPOSITORY\"" \ "${scala_args[@]}" diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index c6a515ba617a..78336272055b 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -22,7 +22,7 @@ call :setScalaOpts @rem we need to escape % in the java command path, for some reason this doesnt work in common.bat set "_JAVACMD=!_JAVACMD:%%=%%%%!" -call "%_JAVACMD%" "-jar" "%SCALA_CLI_JAR%" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* +call "%_JAVACMD%" "-jar" "%SCALA_CLI_JAR%" "--prog-name" "scala" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* if not %ERRORLEVEL%==0 ( set _EXITCODE=1& goto end ) goto end diff --git a/project/Build.scala b/project/Build.scala index cbc35c3f2f92..11ed959b2c29 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -119,7 +119,7 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.3.0" + val scalaCliLauncherVersion = "1.3.1" object CompatMode { final val BinaryCompatible = 0 diff --git a/project/scripts/bootstrappedOnlyCmdTests b/project/scripts/bootstrappedOnlyCmdTests index 74a0f5b59a8f..f3d730f8f494 100755 --- a/project/scripts/bootstrappedOnlyCmdTests +++ b/project/scripts/bootstrappedOnlyCmdTests @@ -14,7 +14,6 @@ echo "testing scala.quoted.Expr.run from sbt scala" "$SBT" ";scala3-compiler-bootstrapped/scalac -with-compiler tests/run-staging/quote-run.scala; scala3-compiler-bootstrapped/scala -with-compiler Test" > "$tmp" grep -qe "val a: scala.Int = 3" "$tmp" - # setup for `scalac`/`scala` script tests "$SBT" dist/pack @@ -37,11 +36,9 @@ test "$EXPECTED_OUTPUT" = "$(cat "$tmp")" clear_out "$OUT1" # check that `scalac` and `scala` works for staging. -# TODO: scala3-staging should be automatically added by Scala CLI -# - see: https://github.com/VirtusLab/scala-cli/issues/2879 clear_out "$OUT" ./bin/scalac tests/run-staging/i4044f.scala -d "$OUT" -./bin/scala -with-compiler -classpath "$OUT" --dep "org.scala-lang::scala3-staging:$scala_version" -M Test > "$tmp" +./bin/scala -with-compiler -classpath "$OUT" -M Test > "$tmp" # check that `scalac -from-tasty` compiles and `scala` runs it echo "testing ./bin/scalac -from-tasty and scala -classpath" @@ -99,12 +96,10 @@ clear_out "$OUT" ./bin/scalac -help > "$tmp" 2>&1 grep -qe "Usage: scalac " "$tmp" -# TODO: JAVA launcher should be able to override "scala-cli" program name -# - see: https://github.com/VirtusLab/scala-cli/issues/2838#issuecomment-2085130815 ./bin/scala -help > "$tmp" 2>&1 -grep -qe "See 'scala-cli --help' to read about a specific subcommand." "$tmp" +grep -qe "See 'scala --help' to read about a specific subcommand." "$tmp" -./bin/scala -d hello.jar tests/run/hello.scala --server=false +./bin/scala -d hello.jar tests/run/hello.scala ls hello.jar echo "testing i12973" From 205d0456611c22dc8ac6bc2b0efaa4e41a010bae Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 7 May 2024 14:11:58 +0200 Subject: [PATCH 389/465] remove scala-js from local caching [Cherry-picked acbd46755bea5a81be175c183f1aa73234b467fd] --- project/Build.scala | 4 ---- 1 file changed, 4 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 11ed959b2c29..7656cb545413 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2267,7 +2267,6 @@ object Build { def asDist(implicit mode: Mode): Project = project. enablePlugins(PackPlugin). enablePlugins(RepublishPlugin). - bootstrappedEnablePlugins(DottyJSPlugin). withCommonSettings. settings(commonDistSettings). dependsOn( @@ -2280,9 +2279,6 @@ object Build { scaladoc, `scala3-sbt-bridge`, // for scala-cli ). - bootstrappedDependsOn( - `scala3-library-bootstrappedJS` // for scala-cli - ). bootstrappedSettings( target := baseDirectory.value / "target" // override setting in commonBootstrappedSettings ) From fd672eb8ee5926909c99d9357db50e3a17c96330 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 7 May 2024 14:33:23 +0200 Subject: [PATCH 390/465] escape error message in test [Cherry-picked bcdf5e762c624258b8d48bc193576d5c2130e7af] --- tests/run-with-compiler/i14541.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/run-with-compiler/i14541.scala b/tests/run-with-compiler/i14541.scala index 0fdfb89674d5..2b942007c5b6 100644 --- a/tests/run-with-compiler/i14541.scala +++ b/tests/run-with-compiler/i14541.scala @@ -6,6 +6,7 @@ object Test: def main(args: Array[String]): Unit = getClass.getClassLoader.run("echo", List("hello", "raw", "world")) // caution: uses "SCALA_OPTS" + sys.props("scala.use_legacy_launcher") = "true" dotty.tools.MainGenericRunner.main(Array("--class-path", classpath, "echo", "hello", "run", "world")) @main def echo(args: String*): Unit = println { From 95e53df0b360849efc49f724125094869eaf98b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Fri, 10 May 2024 11:06:03 +0200 Subject: [PATCH 391/465] Disable windows tests for RC1 --- .github/workflows/ci.yaml | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 12e90eb9d653..b606e6ae1732 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -204,16 +204,7 @@ jobs: test_windows_fast: runs-on: [self-hosted, Windows] - if: "( - github.event_name == 'push' - && github.ref != 'refs/heads/main' - ) - || github.event_name == 'merge_group' - || ( - github.event_name == 'pull_request' - && !contains(github.event.pull_request.body, '[skip ci]') - && !contains(github.event.pull_request.body, '[skip test_windows_fast]') - )" + if: false steps: - name: Reset existing repo @@ -251,13 +242,7 @@ jobs: test_windows_full: runs-on: [self-hosted, Windows] - if: "github.event_name == 'schedule' && github.repository == 'scala/scala3' - || github.event_name == 'push' - || ( - github.event_name == 'pull_request' - && !contains(github.event.pull_request.body, '[skip ci]') - && contains(github.event.pull_request.body, '[test_windows_full]') - )" + if: false steps: - name: Reset existing repo From d08d71bc3f5e0563a491a34621fe25ebd8a89a32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Fri, 10 May 2024 13:40:50 +0200 Subject: [PATCH 392/465] Filter out the dot directories form tests --- compiler/test/dotty/tools/vulpix/FileFilter.scala | 4 ++++ compiler/test/dotty/tools/vulpix/ParallelTesting.scala | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/compiler/test/dotty/tools/vulpix/FileFilter.scala b/compiler/test/dotty/tools/vulpix/FileFilter.scala index b2aef8af038e..9f62a7db2fb6 100644 --- a/compiler/test/dotty/tools/vulpix/FileFilter.scala +++ b/compiler/test/dotty/tools/vulpix/FileFilter.scala @@ -23,4 +23,8 @@ object FileFilter { object NoFilter extends FileFilter { def accept(file: String) = true } + + object ExcludeDotFiles extends FileFilter { + def accept(file: String) = !file.startsWith(".") + } } diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index e7e5936a4b29..09d3614b64a5 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -1411,7 +1411,7 @@ trait ParallelTesting extends RunnerOrchestration { self => private def compilationTargets(sourceDir: JFile, fileFilter: FileFilter = FileFilter.NoFilter): (List[JFile], List[JFile]) = sourceDir.listFiles.foldLeft((List.empty[JFile], List.empty[JFile])) { case ((dirs, files), f) => if (!fileFilter.accept(f.getName)) (dirs, files) - else if (f.isDirectory) (f :: dirs, files) + else if (f.isDirectory && FileFilter.ExcludeDotFiles.accept(f.getName)) (f :: dirs, files) else if (isSourceFile(f)) (dirs, f :: files) else (dirs, files) } From 4ebe8f429e2259e0eab7543ef592d8e5ec431add Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 14 May 2024 18:50:06 +0200 Subject: [PATCH 393/465] Take into account the version when releasing in the CI --- project/RepublishPlugin.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 0b71c9ecb6df..bd1190dfec88 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -36,6 +36,8 @@ object RepublishPlugin extends AutoPlugin { } case class ResolvedArtifacts(id: SimpleModuleId, jar: File, pom: File) + val isRelease = sys.env.get("RELEASEBUILD") == Some("yes") + override val projectSettings: Seq[Def.Setting[_]] = Def.settings( republishLocalResolved / republishProjectRefs := { val proj = thisProjectRef.value @@ -87,7 +89,10 @@ object RepublishPlugin extends AutoPlugin { localResolved.foreach({ resolved => val simpleId = resolved.id - evicted += simpleId.copy(revision = simpleId.revision + "-nonbootstrapped") + if(isRelease) + evicted += simpleId.copy(revision = simpleId.revision + "-bin-nonbootstrapped") + else + evicted += simpleId.copy(revision = simpleId.revision + "-nonbootstrapped") found(simpleId) = resolved }) From 782d1f64529215e888718d294c761b697994e52d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Tue, 14 May 2024 14:36:24 +0200 Subject: [PATCH 394/465] Add changelog for 3.5.0-RC1 --- changelogs/3.5.0-RC1.md | 254 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 254 insertions(+) create mode 100644 changelogs/3.5.0-RC1.md diff --git a/changelogs/3.5.0-RC1.md b/changelogs/3.5.0-RC1.md new file mode 100644 index 000000000000..4cbc2aa1d668 --- /dev/null +++ b/changelogs/3.5.0-RC1.md @@ -0,0 +1,254 @@ +# Highlights of the release + +- Bundle scala-cli in scala command (For RC1 requires JVM 17, further RCs will use native launchers) +- Introduce Best Effort compilation options [#17582](https://github.com/lampepfl/dotty/pull/17582) +- Add support for Pipelined builds [#18880](https://github.com/lampepfl/dotty/pull/18880) +- Add support for `var` in refinements [#19982](https://github.com/lampepfl/dotty/pull/19982) +- Implement SIP-42 - Support for binary integer literals [#19405](https://github.com/lampepfl/dotty/pull/19405) + +# Other changes and fixes + +## Backend + +- Fix Closure span assignment in makeClosure [#15841](https://github.com/lampepfl/dotty/pull/15841) + +## Default parameters + +- Fix default args lookup for given classes [#20256](https://github.com/lampepfl/dotty/pull/20256) +- Fix implicit search failure reporting [#20261](https://github.com/lampepfl/dotty/pull/20261) + +## Derivation + +- Fix infinite loop in Mirror synthesis of unreducible match type [#20133](https://github.com/lampepfl/dotty/pull/20133) + +## Desugaring + +- Add explanation to checkCaseClassInheritanceInvariant error msg [#20141](https://github.com/lampepfl/dotty/pull/20141) + +## Exports + +- Add annotations in parameters for exports [#20140](https://github.com/lampepfl/dotty/pull/20140) +- Fix isAliasType [#20195](https://github.com/lampepfl/dotty/pull/20195) + +## Implicits + +- Fix implicitNotFound message for type aliases [#19343](https://github.com/lampepfl/dotty/pull/19343) +- Normalize types before collecting parts determining implicit scope [#20077](https://github.com/lampepfl/dotty/pull/20077) +- Better error diagnostics under -explain-cyclic [#20251](https://github.com/lampepfl/dotty/pull/20251) +- Update unreducible match types error reporting [#19954](https://github.com/lampepfl/dotty/pull/19954) +- Improve ConstraintHandling of SkolemTypes [#20175](https://github.com/lampepfl/dotty/pull/20175) + +## Incremental Compilation + +- Retain default parameters with `export` [#20167](https://github.com/lampepfl/dotty/pull/20167) + +## Inline + +- Fix by-name parameter in beta-reduction [#20096](https://github.com/lampepfl/dotty/pull/20096) +- Add warning for anonymous inline classes (#16723) [#20291](https://github.com/lampepfl/dotty/pull/20291) +- Avoid conversion of `Unit` type into `()` term [#20295](https://github.com/lampepfl/dotty/pull/20295) +- Type desugared `transparent inline def unapply` call in the correct mode [#20108](https://github.com/lampepfl/dotty/pull/20108) +- Regression: fix compilation performance on Windows [#20193](https://github.com/lampepfl/dotty/pull/20193) +- Fix inline match on blocks with multiple statements [#20125](https://github.com/lampepfl/dotty/pull/20125) +- Inline `unapply`s in the inlining phase [#19382](https://github.com/lampepfl/dotty/pull/19382) +- Fix outerSelect in Inliner [#20313](https://github.com/lampepfl/dotty/pull/20313) + +## Linting + +- Fix #20146: attach the original name if there is an import selection for an indent [#20163](https://github.com/lampepfl/dotty/pull/20163) +- Add regression test for issue 18632 [#20308](https://github.com/lampepfl/dotty/pull/20308) + +## Match Types + +- Make aliases of `MatchAlias`es normal `TypeAlias`es [#19871](https://github.com/lampepfl/dotty/pull/19871) +- Fix #19746: Do not follow param term refs in `isConcrete`. [#20015](https://github.com/lampepfl/dotty/pull/20015) +- Do match type reduction atPhaseNoLater than ElimOpaque [#20017](https://github.com/lampepfl/dotty/pull/20017) +- Do not flag match types as `Deferred` and amend #20077 [#20147](https://github.com/lampepfl/dotty/pull/20147) +- Always use baseType when constraining patternTp with scrutineeTp [#20032](https://github.com/lampepfl/dotty/pull/20032) +- Use `MirrorSource.reduce` result for `companionPath` [#20207](https://github.com/lampepfl/dotty/pull/20207) +- Regression: Fix match type extraction of a MatchAlias [#20111](https://github.com/lampepfl/dotty/pull/20111) + +## Polyfunctions + +- Discard poly-functions when trying to resolve overloading [#20181](https://github.com/lampepfl/dotty/pull/20181) + +## Presentation Compiler + +- Stabilise returned completions by improving deduplication + extra completions for constructors [#19976](https://github.com/lampepfl/dotty/pull/19976) +- Fix active param index for empty param lists [#20142](https://github.com/lampepfl/dotty/pull/20142) +- Delias type members in hover [#20173](https://github.com/lampepfl/dotty/pull/20173) +- Interactive: handle context bounds in extension construct workaround [#20201](https://github.com/lampepfl/dotty/pull/20201) +- Fix: prefer non-export definition locations [#20252](https://github.com/lampepfl/dotty/pull/20252) +- Don't show enum completions in new keyword context [#20304](https://github.com/lampepfl/dotty/pull/20304) +- Chore: Backport changes for presentation compiler [#20345](https://github.com/lampepfl/dotty/pull/20345) +- Add custom matchers for completions (fuzzy search for presentation compiler) [#19850](https://github.com/lampepfl/dotty/pull/19850) + +## Quotes + +- Fix TermRef prefixes not having their type healed [#20102](https://github.com/lampepfl/dotty/pull/20102) +- Improve reporting in staging about the possible use of an incorrect class loader [#20137](https://github.com/lampepfl/dotty/pull/20137) +- Introduce MethodTypeKind to quotes reflection API [#20249](https://github.com/lampepfl/dotty/pull/20249) +- Add quote ASTs to TASTy [#20165](https://github.com/lampepfl/dotty/pull/20165) + +## Reflection + +- Allow to beta reduce curried function applications in quotes reflect [#18121](https://github.com/lampepfl/dotty/pull/18121) +- Set the inlining phase in the Context used for checking macro trees [#20087](https://github.com/lampepfl/dotty/pull/20087) +- Add Symbol.isSuperAccessor to reflection API [#13388](https://github.com/lampepfl/dotty/pull/13388) +- Stabilize reflect `SymbolMethods.isSuperAccessor` [#20198](https://github.com/lampepfl/dotty/pull/20198) + +## Repl + +- Fix validity period of derived SingleDenotations [#19983](https://github.com/lampepfl/dotty/pull/19983) +- Fix #18383: Never consider top-level `import`s as unused in the repl. [#20310](https://github.com/lampepfl/dotty/pull/20310) + +## Reporting + +- Warn if extension receiver already has member [#17543](https://github.com/lampepfl/dotty/pull/17543) +- Deprecation of case class elements [#17911](https://github.com/lampepfl/dotty/pull/17911) +- Support src filter in -WConf (Closes #17635) [#18783](https://github.com/lampepfl/dotty/pull/18783) +- Add note about type mismatch in automatically inserted apply argument [#20023](https://github.com/lampepfl/dotty/pull/20023) +- Make error reporting resilient to exception thrown while reporting [#20158](https://github.com/lampepfl/dotty/pull/20158) +- Remove duplicate comma from Matchable selector warning [#20159](https://github.com/lampepfl/dotty/pull/20159) +- Generalize warnings for top-level calls to Any or AnyRef methods [#20312](https://github.com/lampepfl/dotty/pull/20312) +- Make CheckUnused not slow. [#20321](https://github.com/lampepfl/dotty/pull/20321) + +## Rewrites + +- Patch indentation when removing braces (and other bug fixes in `-indent -rewrite`) [#17522](https://github.com/lampepfl/dotty/pull/17522) +- Extra check to avoid converting block expressions on the rhs of an in… [#20043](https://github.com/lampepfl/dotty/pull/20043) + +## Scaladoc + +- Fix scaladoc crash on Windows - illegal path character [#20311](https://github.com/lampepfl/dotty/pull/20311) +- Scaladoc: improve refined function types rendering [#20333](https://github.com/lampepfl/dotty/pull/20333) +- Relax font-weight reset [#20348](https://github.com/lampepfl/dotty/pull/20348) + +## Scala JS + +- Optimize main.js [#20093](https://github.com/lampepfl/dotty/pull/20093) + +## Settings + +- Lift Scala Settings from experimental to stabilized [#20199](https://github.com/lampepfl/dotty/pull/20199) + +## Tooling + +- Detect macro dependencies that are missing from the classloader [#20139](https://github.com/lampepfl/dotty/pull/20139) +- Write pipelined tasty in parallel. [#20153](https://github.com/lampepfl/dotty/pull/20153) +- ConsoleReporter sends INFO to stdout [#20328](https://github.com/lampepfl/dotty/pull/20328) + +## Transform + +- Fix overloaded default methods test in RefChecks [#20218](https://github.com/lampepfl/dotty/pull/20218) +- Fix handling of AppliedType aliases in outerPrefix [#20190](https://github.com/lampepfl/dotty/pull/20190) +- Elide unit binding when beta-reducing [#20085](https://github.com/lampepfl/dotty/pull/20085) + +## Typer + +- Reduce projections of type aliases with class type prefixes [#19931](https://github.com/lampepfl/dotty/pull/19931) +- Re-lub also hard union types in simplify [#20027](https://github.com/lampepfl/dotty/pull/20027) +- Fix #19789: Merge same TypeParamRef in orDominator [#20090](https://github.com/lampepfl/dotty/pull/20090) +- Allow SAM types to contain match alias refinements [#20092](https://github.com/lampepfl/dotty/pull/20092) +- Don't dealias when deciding which arguments to defer [#20116](https://github.com/lampepfl/dotty/pull/20116) +- Avoid the TypeVar.inst trap [#20160](https://github.com/lampepfl/dotty/pull/20160) +- Avoid crash when superType does not exist after erasure [#20188](https://github.com/lampepfl/dotty/pull/20188) +- Refine overloading and implicit disambiguation [#20084](https://github.com/lampepfl/dotty/pull/20084) +- Refactor constant folding of applications [#20099](https://github.com/lampepfl/dotty/pull/20099) +- Rollback constraints if `isSameType` failed second direction [#20109](https://github.com/lampepfl/dotty/pull/20109) +- Suppress "extension method will never be selected" for overrides [#20164](https://github.com/lampepfl/dotty/pull/20164) +- Allow SAM types to contain multiple refinements [#20172](https://github.com/lampepfl/dotty/pull/20172) +- Normalize when verifying if TypeTestCasts are unchecked [#20258](https://github.com/lampepfl/dotty/pull/20258) + +# Experimental Changes + +- Named tuples second implementation [#19174](https://github.com/lampepfl/dotty/pull/19174) +- Change rules for given prioritization [#19300](https://github.com/lampepfl/dotty/pull/19300) +- Enable experimental mode when experimental feature is imported [#19807](https://github.com/lampepfl/dotty/pull/19807) +- Add message parameter to `@experimental` annotation [#19935](https://github.com/lampepfl/dotty/pull/19935) +- Implement match type amendment: extractors follow aliases and singletons [#20161](https://github.com/lampepfl/dotty/pull/20161) + +## Capture Checking + +- Carry and check universal capability from parents correctly [#20004](https://github.com/lampepfl/dotty/pull/20004) +- Make parameter types of context functions inferred type trees [#20155](https://github.com/lampepfl/dotty/pull/20155) +- Handle reach capabilities correctly in depedent functions [#20203](https://github.com/lampepfl/dotty/pull/20203) +- Fix the visibility check in `markFree` [#20221](https://github.com/lampepfl/dotty/pull/20221) +- Make inline proxy vals have inferred types [#20241](https://github.com/lampepfl/dotty/pull/20241) +- CC: Give more info when context function parameters leak [#20244](https://github.com/lampepfl/dotty/pull/20244) +- Plug soundness hole for reach capabilities [#20051](https://github.com/lampepfl/dotty/pull/20051) +- Tighten the screws a bit more to seal the soundness hole for reach capabilities [#20056](https://github.com/lampepfl/dotty/pull/20056) +- Drop retains annotations in inferred type trees [#20057](https://github.com/lampepfl/dotty/pull/20057) +- Allow @retains arguments to be context functions [#20232](https://github.com/lampepfl/dotty/pull/20232) +- Fix conversion of this.fld capture refs under separate compilation [#20238](https://github.com/lampepfl/dotty/pull/20238) + +## Erased definitions + +- Fix "Compiler crash when using CanThrow" [#20210](https://github.com/lampepfl/dotty/pull/20210) +- Only allow erased parameters in erased definitions [#19686](https://github.com/lampepfl/dotty/pull/19686) + +## Initialization + +- Deprecate `StandardPlugin.init` in favor of `initialize` method taking implicit Context [#20330](https://github.com/lampepfl/dotty/pull/20330) +- Fix missing changesParents in PostTyper [#20062](https://github.com/lampepfl/dotty/pull/20062) +- Special case for next field of colon colon in global init checker [#20281](https://github.com/lampepfl/dotty/pull/20281) +- Extend whitelist in global initialization checker [#20290](https://github.com/lampepfl/dotty/pull/20290) + +## Macro Annotations + +- Allow macro annotation to transform companion [#19677](https://github.com/lampepfl/dotty/pull/19677) +- Remove experimental `MainAnnotation`/`newMain` (replaced with `MacroAnnotation`) [#19937](https://github.com/lampepfl/dotty/pull/19937) + +## Nullability + +- Add flexible types to deal with Java-defined signatures under -Yexplicit-nulls [#18112](https://github.com/lampepfl/dotty/pull/18112) +- Fix #20287: Add flexible types to Quotes library [#20293](https://github.com/lampepfl/dotty/pull/20293) +- Add fromNullable to Predef for explicit nulls [#20222](https://github.com/lampepfl/dotty/pull/20222) + + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.4.2..3.5.0-RC1` these are: + +``` + 137 Martin Odersky + 51 Eugene Flesselle + 32 Jamie Thompson + 25 Nicolas Stucki + 22 Sébastien Doeraene + 18 noti0na1 + 16 Matt Bovel + 12 Guillaume Martres + 9 Paweł Marks + 9 Yichen Xu + 8 Jan Chyb + 7 Hamza REMMAL + 6 Jędrzej Rochala + 6 Som Snytt + 5 Fengyun Liu + 5 dependabot[bot] + 3 Mikołaj Fornal + 2 Aviv Keller + 2 EnzeXing + 2 Wojciech Mazur + 1 Chris Pado + 1 Filip Zybała + 1 Georgi Krastev + 1 Hamza Remmal + 1 Jisoo Park + 1 Katarzyna Marek + 1 Lucas Nouguier + 1 Lucy Martin + 1 Ola Flisbäck + 1 Pascal Weisenburger + 1 Quentin Bernet + 1 Raphael Jolly + 1 Stephane Bersier + 1 Tomasz Godzik + 1 Yoonjae Jeon + 1 aherlihy + 1 rochala + 1 willerf +``` From a15fc7d5f3d793cf6aad50565aab73f309b0b859 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Tue, 14 May 2024 14:38:52 +0200 Subject: [PATCH 395/465] Release 3.5.0-RC1 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 7656cb545413..0876353a6a2f 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -87,7 +87,7 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.4.2-RC1" + val referenceVersion = "3.4.2" val baseVersion = "3.5.0-RC1" From 4992e3740bc2cd1ddd07673060844f3b90f0e866 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 14 May 2024 22:23:10 +0200 Subject: [PATCH 396/465] Backport: Avoid forcing whole package when using `-experimental` This backports https://github.com/scala/scala3/pull/20409 which fixes a regression introduced in 3.5.0-RC1 causing compiler crashes when enabling `-experimental`. --- compiler/src/dotty/tools/dotc/typer/Checking.scala | 3 ++- sbt-test/java-compat/moduleInfo/A.scala | 2 ++ sbt-test/java-compat/moduleInfo/build.sbt | 5 +++++ sbt-test/java-compat/moduleInfo/test | 1 + 4 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 sbt-test/java-compat/moduleInfo/A.scala create mode 100644 sbt-test/java-compat/moduleInfo/build.sbt create mode 100644 sbt-test/java-compat/moduleInfo/test diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 073055ba5b58..1f82b9ddc084 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -806,10 +806,11 @@ object Checking { def checkAndAdaptExperimentalImports(trees: List[Tree])(using Context): Unit = def nonExperimentalTopLevelDefs(pack: Symbol): Iterator[Symbol] = def isNonExperimentalTopLevelDefinition(sym: Symbol) = - !sym.isExperimental + sym.isDefinedInCurrentRun && sym.source == ctx.compilationUnit.source && !sym.isConstructor // not constructor of package object && !sym.is(Package) && !sym.name.isPackageObjectName + && !sym.isExperimental pack.info.decls.toList.iterator.flatMap: sym => if sym.isClass && (sym.is(Package) || sym.isPackageObject) then diff --git a/sbt-test/java-compat/moduleInfo/A.scala b/sbt-test/java-compat/moduleInfo/A.scala new file mode 100644 index 000000000000..4b46ae7047d6 --- /dev/null +++ b/sbt-test/java-compat/moduleInfo/A.scala @@ -0,0 +1,2 @@ +// Previously, we crashed trying to parse module-info.class in the empty package. +class A diff --git a/sbt-test/java-compat/moduleInfo/build.sbt b/sbt-test/java-compat/moduleInfo/build.sbt new file mode 100644 index 000000000000..a0308b6cb83a --- /dev/null +++ b/sbt-test/java-compat/moduleInfo/build.sbt @@ -0,0 +1,5 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +scalacOptions ++= Seq( + "-experimental" +) diff --git a/sbt-test/java-compat/moduleInfo/test b/sbt-test/java-compat/moduleInfo/test new file mode 100644 index 000000000000..5df2af1f3956 --- /dev/null +++ b/sbt-test/java-compat/moduleInfo/test @@ -0,0 +1 @@ +> compile From 7885c247391c19a3b0c00c3aeda2df477b411fad Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 17 Jun 2024 10:54:07 +0200 Subject: [PATCH 397/465] This reverts one part of #20261. When we fail with both an ambiguity on one implicit argument and another error on another argument we prefer the other error. I added a comment why this is needed. --- .../src/dotty/tools/dotc/typer/Typer.scala | 9 +++++- tests/pos/i20344.scala | 28 +++++++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i20344.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 2a69c948baae..ae50d626cb1f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4113,7 +4113,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * `SearchFailureType`. */ def issueErrors(fun: Tree, args: List[Tree]): Tree = - def firstFailure = args.tpes.find(_.isInstanceOf[SearchFailureType]).getOrElse(NoType) + // Prefer other errors over ambiguities. If nested in outer searches a missing + // implicit can be healed by simply dropping this alternative and trying something + // else. But an ambiguity is sticky and propagates outwards. If we have both + // a missing implicit on one argument and an ambiguity on another the whole + // branch should be classified as a missing implicit. + val firstNonAmbiguous = args.tpes.find(tp => tp.isError && !tp.isInstanceOf[AmbiguousImplicits]) + def firstError = args.tpes.find(_.isInstanceOf[SearchFailureType]).getOrElse(NoType) + def firstFailure = firstNonAmbiguous.getOrElse(firstError) val errorType = firstFailure match case tp: AmbiguousImplicits => diff --git a/tests/pos/i20344.scala b/tests/pos/i20344.scala new file mode 100644 index 000000000000..d3b2a060d6e2 --- /dev/null +++ b/tests/pos/i20344.scala @@ -0,0 +1,28 @@ +trait Monad[F[_]] extends Invariant[F] + +trait Invariant[F[_]] +object Invariant: + implicit def catsInstancesForList: Monad[List] = ??? + implicit def catsInstancesForVector: Monad[Vector] = ??? + +trait Shrink[T] +object Shrink extends ShrinkLowPriorityImplicits: + trait Buildable[T,C] + implicit def shrinkContainer[C[_],T](implicit v: C[T] => Traversable[T], s: Shrink[T], b: Buildable[T,C[T]]): Shrink[C[T]] = ??? +trait ShrinkLowPriorityImplicits: + implicit def shrinkAny[T]: Shrink[T] = ??? + +trait Distribution[F[_], -P, X] extends (P => F[X]) +type GenBeta[A, B, X] = [F[_]] =>> Distribution[F, Beta.Params[A, B], X] +type Beta[R] = [F[_]] =>> GenBeta[R, R, R][F] + +object Beta: + trait Params[+A, +B] +trait BetaInstances: + given schrodingerRandomBetaForDouble[F[_]: Monad]: Beta[Double][F] = ??? + +object all extends BetaInstances + +@main def Test = + import all.given + summon[Shrink[Beta.Params[Double, Double]]] \ No newline at end of file From f913d89129259459d7c1d29901c11fbb7f2d092f Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 17 Jun 2024 11:54:25 +0200 Subject: [PATCH 398/465] Treat 3.5-migration the same as 3.5 for a warning about implicit priority change Fixes #20420 --- .../dotty/tools/dotc/typer/Implicits.scala | 4 +-- tests/warn/i20420.scala | 27 +++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 tests/warn/i20420.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index fd22f0ec5529..54821444aed6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1311,14 +1311,14 @@ trait Implicits: else var cmp = comp(using searchContext()) val sv = Feature.sourceVersion - if sv == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` then + if sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) if cmp != prev then def choice(c: Int) = c match case -1 => "the second alternative" case 1 => "the first alternative" case _ => "none - it's ambiguous" - if sv == SourceVersion.`3.5` then + if sv.stable == SourceVersion.`3.5` then report.warning( em"""Given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} will change |Current choice : ${choice(prev)} diff --git a/tests/warn/i20420.scala b/tests/warn/i20420.scala new file mode 100644 index 000000000000..d28270509f91 --- /dev/null +++ b/tests/warn/i20420.scala @@ -0,0 +1,27 @@ +//> using options -source 3.5-migration + +final class StrictEqual[V] +final class Less[V] +type LessEqual[V] = Less[V] | StrictEqual[V] + +object TapirCodecIron: + trait ValidatorForPredicate[Value, Predicate] + trait PrimitiveValidatorForPredicate[Value, Predicate] + extends ValidatorForPredicate[Value, Predicate] + + given validatorForLessEqual[N: Numeric, NM <: N](using + ValueOf[NM] + ): PrimitiveValidatorForPredicate[N, LessEqual[NM]] = ??? + given validatorForDescribedOr[N, P](using + IsDescription[P] + ): ValidatorForPredicate[N, P] = ??? + + trait IsDescription[A] + object IsDescription: + given derived[A]: IsDescription[A] = ??? + +@main def Test = { + import TapirCodecIron.{*, given} + type IntConstraint = LessEqual[3] + summon[ValidatorForPredicate[Int, IntConstraint]] // warn +} \ No newline at end of file From 0626b972a961910d6654b95835923fa1d560d6f6 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 17 Jun 2024 12:54:32 +0200 Subject: [PATCH 399/465] Set default source version to 3.5 --- community-build/community-projects/Lucre | 2 +- community-build/community-projects/Monocle | 2 +- community-build/community-projects/akka | 2 +- community-build/community-projects/cask | 2 +- .../community-projects/endpoints4s | 2 +- .../community-projects/izumi-reflect | 2 +- community-build/community-projects/os-lib | 2 +- community-build/community-projects/scalatest | 2 +- community-build/community-projects/scalaz | 2 +- community-build/community-projects/scas | 2 +- community-build/community-projects/spire | 2 +- community-build/community-projects/upickle | 2 +- community-build/community-projects/utest | 2 +- .../tools/dotc/config/SourceVersion.scala | 2 +- library/src/scala/quoted/ToExpr.scala | 2 +- .../src/scala/Array.scala | 690 +++++++ .../src/scala/collection/ArrayOps.scala | 1664 +++++++++++++++++ .../src/scala/collection/Factory.scala | 784 ++++++++ .../src/scala/collection/Iterable.scala | 1043 +++++++++++ .../src/scala/collection/SortedMap.scala | 220 +++ .../StrictOptimizedSortedMapOps.scala | 46 + .../generic/DefaultSerializationProxy.scala | 87 + .../scala/collection/mutable/ArraySeq.scala | 354 ++++ .../mutable/CollisionProofHashMap.scala | 888 +++++++++ scala2-library-cc/src/scala/Array.scala | 690 +++++++ .../src/scala/collection/ArrayOps.scala | 4 +- .../src/scala/collection/Factory.scala | 20 +- .../src/scala/collection/Iterable.scala | 12 +- .../src/scala/collection/SortedMap.scala | 6 +- .../StrictOptimizedSortedMapOps.scala | 2 +- .../generic/DefaultSerializationProxy.scala | 4 +- .../scala/collection/mutable/ArraySeq.scala | 6 +- .../mutable/CollisionProofHashMap.scala | 2 +- tests/neg/given-loop-prevention.check | 14 + tests/neg/given-loop-prevention.scala | 12 + tests/neg/i6716.check | 6 +- tests/neg/i6716.scala | 4 +- tests/neg/i7294-a.check | 27 - tests/neg/i7294-a.scala | 14 - tests/neg/i7294-b.scala | 12 - tests/neg/i7294.check | 25 + tests/neg/i7294.scala | 10 + tests/neg/looping-givens.check | 48 + tests/neg/looping-givens.scala | 11 + .../CollectionStrawMan6.scala | 4 +- tests/pos/extmethods.scala | 2 +- tests/pos/given-loop-prevention.scala | 14 - tests/pos/i17245.scala | 2 +- tests/pos/i9967.scala | 2 +- tests/pos/t5643.scala | 2 +- .../run/colltest6/CollectionStrawMan6_1.scala | 4 +- tests/run/i502.scala | 6 +- tests/run/t2029.scala | 2 +- tests/run/t3326.scala | 8 +- .../expect/InventedNames.expect.scala | 2 +- tests/semanticdb/expect/InventedNames.scala | 2 +- tests/semanticdb/metac.expect | 12 +- tests/warn/context-bounds-migration.scala | 9 - tests/warn/i15474.scala | 2 +- tests/warn/looping-givens.check | 45 + tests/warn/looping-givens.scala | 2 + 61 files changed, 6702 insertions(+), 153 deletions(-) create mode 100644 scala2-library-bootstrapped/src/scala/Array.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/Factory.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/Iterable.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/SortedMap.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala create mode 100644 scala2-library-cc/src/scala/Array.scala create mode 100644 tests/neg/given-loop-prevention.check create mode 100644 tests/neg/given-loop-prevention.scala delete mode 100644 tests/neg/i7294-a.check delete mode 100644 tests/neg/i7294-a.scala delete mode 100644 tests/neg/i7294-b.scala create mode 100644 tests/neg/i7294.check create mode 100644 tests/neg/i7294.scala create mode 100644 tests/neg/looping-givens.check create mode 100644 tests/neg/looping-givens.scala delete mode 100644 tests/pos/given-loop-prevention.scala delete mode 100644 tests/warn/context-bounds-migration.scala create mode 100644 tests/warn/looping-givens.check diff --git a/community-build/community-projects/Lucre b/community-build/community-projects/Lucre index 1008f0b7f513..21a27a294ac7 160000 --- a/community-build/community-projects/Lucre +++ b/community-build/community-projects/Lucre @@ -1 +1 @@ -Subproject commit 1008f0b7f51374ddbc947e677c505fa97677b7d4 +Subproject commit 21a27a294ac7c413f80839d96a02942b2c6d021c diff --git a/community-build/community-projects/Monocle b/community-build/community-projects/Monocle index a0e70744e9b3..b303aa3b98d9 160000 --- a/community-build/community-projects/Monocle +++ b/community-build/community-projects/Monocle @@ -1 +1 @@ -Subproject commit a0e70744e9b3bfb0f12e4ea292151c49c3302cd1 +Subproject commit b303aa3b98d9a10c3f77a56765ca5be2f3cc51f7 diff --git a/community-build/community-projects/akka b/community-build/community-projects/akka index 79b294048f89..ee0ac854f36f 160000 --- a/community-build/community-projects/akka +++ b/community-build/community-projects/akka @@ -1 +1 @@ -Subproject commit 79b294048f893d9d6b9332618f7aebedce9a5340 +Subproject commit ee0ac854f36f537bf3062fd4e9d9f2ff5c1de4c9 diff --git a/community-build/community-projects/cask b/community-build/community-projects/cask index d5fa6d47da5e..2db6020a2d11 160000 --- a/community-build/community-projects/cask +++ b/community-build/community-projects/cask @@ -1 +1 @@ -Subproject commit d5fa6d47da5ea99d94887fafd555696ba07aa205 +Subproject commit 2db6020a2d11566d504ae9af4de28c7a6e20b7ed diff --git a/community-build/community-projects/endpoints4s b/community-build/community-projects/endpoints4s index 3a667a3608ff..b004d1388872 160000 --- a/community-build/community-projects/endpoints4s +++ b/community-build/community-projects/endpoints4s @@ -1 +1 @@ -Subproject commit 3a667a3608ff9950c24e9b2b5038c71c1690a21d +Subproject commit b004d13888723de9f6a86f560137fc31e22edcb6 diff --git a/community-build/community-projects/izumi-reflect b/community-build/community-projects/izumi-reflect index c0756faa7311..2c7e4a69c386 160000 --- a/community-build/community-projects/izumi-reflect +++ b/community-build/community-projects/izumi-reflect @@ -1 +1 @@ -Subproject commit c0756faa7311f70c6da6af29b8cb25506634bf09 +Subproject commit 2c7e4a69c386201e479584333a84ce018fef1795 diff --git a/community-build/community-projects/os-lib b/community-build/community-projects/os-lib index a4400deb3bec..4c8c82b23d76 160000 --- a/community-build/community-projects/os-lib +++ b/community-build/community-projects/os-lib @@ -1 +1 @@ -Subproject commit a4400deb3bec415fd82d331fc1f8b749f3d64e60 +Subproject commit 4c8c82b23d767bc927290829514b8de7148052d9 diff --git a/community-build/community-projects/scalatest b/community-build/community-projects/scalatest index d430625d9621..d6eeedbfc1e0 160000 --- a/community-build/community-projects/scalatest +++ b/community-build/community-projects/scalatest @@ -1 +1 @@ -Subproject commit d430625d96218c9031b1434cc0c2110f3740fa1c +Subproject commit d6eeedbfc1e04f2eff55506f07f93f448cc21407 diff --git a/community-build/community-projects/scalaz b/community-build/community-projects/scalaz index 97cccf3b3fcb..868749fdb951 160000 --- a/community-build/community-projects/scalaz +++ b/community-build/community-projects/scalaz @@ -1 +1 @@ -Subproject commit 97cccf3b3fcb71885a32b2e567171c0f70b06104 +Subproject commit 868749fdb951909bb04bd6dd7ad2cd89295fd439 diff --git a/community-build/community-projects/scas b/community-build/community-projects/scas index fbccb263207b..acaad1055738 160000 --- a/community-build/community-projects/scas +++ b/community-build/community-projects/scas @@ -1 +1 @@ -Subproject commit fbccb263207b3a7b735b8a9dc312acf7368a0816 +Subproject commit acaad1055738dbbcae7b18e6c6c2fc95f06eb7d6 diff --git a/community-build/community-projects/spire b/community-build/community-projects/spire index bc524eeea735..d60fe2c38848 160000 --- a/community-build/community-projects/spire +++ b/community-build/community-projects/spire @@ -1 +1 @@ -Subproject commit bc524eeea735a3cf4d5108039f95950b024a14e4 +Subproject commit d60fe2c38848ef193031c18eab3a14d3306b3761 diff --git a/community-build/community-projects/upickle b/community-build/community-projects/upickle index aa3bc0e43ec7..0c09bbcabc66 160000 --- a/community-build/community-projects/upickle +++ b/community-build/community-projects/upickle @@ -1 +1 @@ -Subproject commit aa3bc0e43ec7b618eb087753878f3d845e58277a +Subproject commit 0c09bbcabc664abf98462022fc9036a366135e70 diff --git a/community-build/community-projects/utest b/community-build/community-projects/utest index eae17c7a4d0d..f4a9789e2750 160000 --- a/community-build/community-projects/utest +++ b/community-build/community-projects/utest @@ -1 +1 @@ -Subproject commit eae17c7a4d0d63bab1406ca75791d3cb6394233d +Subproject commit f4a9789e2750523feee4a3477efb42eb15424fc7 diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 3a44021af2df..935b95003729 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -28,7 +28,7 @@ enum SourceVersion: def isAtMost(v: SourceVersion) = stable.ordinal <= v.ordinal object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.4` + def defaultSourceVersion = `3.5` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) diff --git a/library/src/scala/quoted/ToExpr.scala b/library/src/scala/quoted/ToExpr.scala index 042c8ff37a52..6c167c353d87 100644 --- a/library/src/scala/quoted/ToExpr.scala +++ b/library/src/scala/quoted/ToExpr.scala @@ -97,7 +97,7 @@ object ToExpr { /** Default implementation of `ToExpr[Array[T]]` */ given ArrayToExpr[T: Type: ToExpr: ClassTag]: ToExpr[Array[T]] with { def apply(arr: Array[T])(using Quotes): Expr[Array[T]] = - '{ Array[T](${Expr(arr.toSeq)}*)(${Expr(summon[ClassTag[T]])}) } + '{ Array[T](${Expr(arr.toSeq)}*)(using ${Expr(summon[ClassTag[T]])}) } } /** Default implementation of `ToExpr[Array[Boolean]]` */ diff --git a/scala2-library-bootstrapped/src/scala/Array.scala b/scala2-library-bootstrapped/src/scala/Array.scala new file mode 100644 index 000000000000..d2098a76f32f --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/Array.scala @@ -0,0 +1,690 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +//import scala.collection.generic._ +import scala.collection.{Factory, immutable, mutable} +import mutable.ArrayBuilder +import immutable.ArraySeq +import scala.language.implicitConversions +import scala.reflect.{ClassTag, classTag} +import scala.runtime.BoxedUnit +import scala.runtime.ScalaRunTime +import scala.runtime.ScalaRunTime.{array_apply, array_update} + +/** Utility methods for operating on arrays. + * For example: + * {{{ + * val a = Array(1, 2) + * val b = Array.ofDim[Int](2) + * val c = Array.concat(a, b) + * }}} + * where the array objects `a`, `b` and `c` have respectively the values + * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. + */ +object Array { + val emptyBooleanArray = new Array[Boolean](0) + val emptyByteArray = new Array[Byte](0) + val emptyCharArray = new Array[Char](0) + val emptyDoubleArray = new Array[Double](0) + val emptyFloatArray = new Array[Float](0) + val emptyIntArray = new Array[Int](0) + val emptyLongArray = new Array[Long](0) + val emptyShortArray = new Array[Short](0) + val emptyObjectArray = new Array[Object](0) + + /** Provides an implicit conversion from the Array object to a collection Factory */ + implicit def toFactory[A : ClassTag](dummy: Array.type): Factory[A, Array[A]] = new ArrayFactory(dummy) + @SerialVersionUID(3L) + private class ArrayFactory[A : ClassTag](dummy: Array.type) extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = Array.from[A](it) + def newBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder[A] + } + + /** + * Returns a new [[scala.collection.mutable.ArrayBuilder]]. + */ + def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T](using t) + + /** Build an array from the iterable collection. + * + * {{{ + * scala> val a = Array.from(Seq(1, 5)) + * val a: Array[Int] = Array(1, 5) + * + * scala> val b = Array.from(Range(1, 5)) + * val b: Array[Int] = Array(1, 2, 3, 4) + * }}} + * + * @param it the iterable collection + * @return an array consisting of elements of the iterable collection + */ + def from[A : ClassTag](it: IterableOnce[A]): Array[A] = it match { + case it: Iterable[A] => it.toArray[A] + case _ => it.iterator.toArray[A] + } + + private def slowcopy(src : AnyRef, + srcPos : Int, + dest : AnyRef, + destPos : Int, + length : Int): Unit = { + var i = srcPos + var j = destPos + val srcUntil = srcPos + length + while (i < srcUntil) { + array_update(dest, j, array_apply(src, i)) + i += 1 + j += 1 + } + } + + /** Copy one array to another. + * Equivalent to Java's + * `System.arraycopy(src, srcPos, dest, destPos, length)`, + * except that this also works for polymorphic and boxed arrays. + * + * Note that the passed-in `dest` array will be modified by this call. + * + * @param src the source array. + * @param srcPos starting position in the source array. + * @param dest destination array. + * @param destPos starting position in the destination array. + * @param length the number of array elements to be copied. + * + * @see `java.lang.System#arraycopy` + */ + def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { + val srcClass = src.getClass + if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) + java.lang.System.arraycopy(src, srcPos, dest, destPos, length) + else + slowcopy(src, srcPos, dest, destPos, length) + } + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength)`, + * except that this works for primitive and object arrays in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyOf[A](original: Array[A], newLength: Int): Array[A] = ((original: @unchecked) match { + case x: Array[BoxedUnit] => newUnitArray(newLength).asInstanceOf[Array[A]] + case x: Array[AnyRef] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Int] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Double] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Long] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Float] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Char] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Byte] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Short] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Boolean] => java.util.Arrays.copyOf(x, newLength) + }).asInstanceOf[Array[A]] + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. The new array can have + * a different type than the original one as long as the values are + * assignment-compatible. When copying between primitive and object arrays, + * boxing and unboxing are supported. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength, newType)`, + * except that this works for all combinations of primitive and object arrays + * in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyAs[A](original: Array[_], newLength: Int)(implicit ct: ClassTag[A]): Array[A] = { + val runtimeClass = ct.runtimeClass + if (runtimeClass == Void.TYPE) newUnitArray(newLength).asInstanceOf[Array[A]] + else { + val destClass = runtimeClass.asInstanceOf[Class[A]] + if (destClass.isAssignableFrom(original.getClass.getComponentType)) { + if (destClass.isPrimitive) copyOf[A](original.asInstanceOf[Array[A]], newLength) + else { + val destArrayClass = java.lang.reflect.Array.newInstance(destClass, 0).getClass.asInstanceOf[Class[Array[AnyRef]]] + java.util.Arrays.copyOf(original.asInstanceOf[Array[AnyRef]], newLength, destArrayClass).asInstanceOf[Array[A]] + } + } else { + val dest = new Array[A](newLength) + Array.copy(original, 0, dest, 0, original.length) + dest + } + } + } + + private def newUnitArray(len: Int): Array[Unit] = { + val result = new Array[Unit](len) + java.util.Arrays.fill(result.asInstanceOf[Array[AnyRef]], ()) + result + } + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = new Array[T](0) + + /** Creates an array with given elements. + * + * @param xs the elements to put in the array + * @return an array containing all elements from xs. + */ + // Subject to a compiler optimization in Cleanup. + // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } + def apply[T: ClassTag](xs: T*): Array[T] = { + val len = xs.length + xs match { + case wa: immutable.ArraySeq[_] if wa.unsafeArray.getClass.getComponentType == classTag[T].runtimeClass => + // We get here in test/files/run/sd760a.scala, `Array[T](t)` for + // a specialized type parameter `T`. While we still pay for two + // copies of the array it is better than before when we also boxed + // each element when populating the result. + ScalaRunTime.array_clone(wa.unsafeArray).asInstanceOf[Array[T]] + case _ => + val array = new Array[T](len) + val iterator = xs.iterator + var i = 0 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + } + + /** Creates an array of `Boolean` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { + val array = new Array[Boolean](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Byte` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Byte, xs: Byte*): Array[Byte] = { + val array = new Array[Byte](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Short` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Short, xs: Short*): Array[Short] = { + val array = new Array[Short](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Char` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Char, xs: Char*): Array[Char] = { + val array = new Array[Char](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Int` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Int, xs: Int*): Array[Int] = { + val array = new Array[Int](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Long` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Long, xs: Long*): Array[Long] = { + val array = new Array[Long](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Float` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Float, xs: Float*): Array[Float] = { + val array = new Array[Float](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Double` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Double, xs: Double*): Array[Double] = { + val array = new Array[Double](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Unit` objects */ + def apply(x: Unit, xs: Unit*): Array[Unit] = { + val array = new Array[Unit](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = + new Array[T](n1) + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = { + val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]]) + for (i <- 0 until n1) arr(i) = new Array[T](n2) + arr + // tabulate(n1)(_ => ofDim[T](n2)) + } + /** Creates a 3-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] = + tabulate(n1)(_ => ofDim[T](n2, n3)) + /** Creates a 4-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4)) + /** Creates a 5-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5)) + + /** Concatenates all arrays into a single array. + * + * @param xss the given arrays + * @return the array created from concatenating `xss` + */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = { + val b = newBuilder[T] + b.sizeHint(xss.map(_.length).sum) + for (xs <- xss) b ++= xs + b.result() + } + + /** Returns an array that contains the results of some element computation a number + * of times. + * + * Note that this means that `elem` is computed a total of n times: + * {{{ + * scala> Array.fill(3){ math.random } + * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306) + * }}} + * + * @param n the number of elements desired + * @param elem the element computation + * @return an Array of size n, where each element contains the result of computing + * `elem`. + */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = elem + i += 1 + } + array + } + } + + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = + tabulate(n1)(_ => fill(n2)(elem)) + + /** Returns a three-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = + tabulate(n1)(_ => fill(n2, n3)(elem)) + + /** Returns a four-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => fill(n2, n3, n4)(elem)) + + /** Returns a five-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. + * + * @param n The number of elements in the array + * @param f The function computing element values + * @return An `Array` consisting of elements `f(0),f(1), ..., f(n - 1)` + */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = f(i) + i += 1 + } + array + } + } + + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Returns a three-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Returns a four-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Returns a five-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Returns an array containing a sequence of increasing integers in a range. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @return the array with values in range `start, start + 1, ..., end - 1` + * up to, but excluding, `end`. + */ + def range(start: Int, end: Int): Array[Int] = range(start, end, 1) + + /** Returns an array containing equally spaced values in some integer interval. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @param step the increment value of the array (may not be zero) + * @return the array with values in `start, start + step, ...` up to, but excluding `end` + */ + def range(start: Int, end: Int, step: Int): Array[Int] = { + if (step == 0) throw new IllegalArgumentException("zero step") + val array = new Array[Int](immutable.Range.count(start, end, step, isInclusive = false)) + + var n = 0 + var i = start + while (if (step < 0) end < i else i < end) { + array(n) = i + i += step + n += 1 + } + array + } + + /** Returns an array containing repeated applications of a function to a start value. + * + * @param start the start value of the array + * @param len the number of elements returned by the array + * @param f the function that is repeatedly applied + * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { + if (len > 0) { + val array = new Array[T](len) + var acc = start + var i = 1 + array(0) = acc + + while (i < len) { + acc = f(acc) + array(i) = acc + i += 1 + } + array + } else { + empty[T] + } + } + + /** Compare two arrays per element. + * + * A more efficient version of `xs.sameElements(ys)`. + * + * Note that arrays are invariant in Scala, but it may + * be sound to cast an array of arbitrary reference type + * to `Array[AnyRef]`. Arrays on the JVM are covariant + * in their element type. + * + * `Array.equals(xs.asInstanceOf[Array[AnyRef]], ys.asInstanceOf[Array[AnyRef]])` + * + * @param xs an array of AnyRef + * @param ys an array of AnyRef + * @return true if corresponding elements are equal + */ + def equals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = + (xs eq ys) || + (xs.length == ys.length) && { + var i = 0 + while (i < xs.length && xs(i) == ys(i)) i += 1 + i >= xs.length + } + + /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. + * + * @param x the selector value + * @return sequence wrapped in a [[scala.Some]], if `x` is an Array, otherwise `None` + */ + def unapplySeq[T](x: Array[T]): UnapplySeqWrapper[T] = new UnapplySeqWrapper(x) + + final class UnapplySeqWrapper[T](private val a: Array[T]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[T] = this + def lengthCompare(len: Int): Int = a.lengthCompare(len) + def apply(i: Int): T = a(i) + def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 + def toSeq: scala.Seq[T] = a.toSeq // clones the array + } +} + +/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation + * for Java's `T[]`. + * + * {{{ + * val numbers = Array(1, 2, 3, 4) + * val first = numbers(0) // read the first element + * numbers(3) = 100 // replace the 4th array element with 100 + * val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two + * }}} + * + * Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above + * example code. + * Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to + * `update(Int, T)`. + * + * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion + * to [[scala.collection.ArrayOps]] (shown on line 4 of the example above) and a conversion + * to [[scala.collection.mutable.ArraySeq]] (a subtype of [[scala.collection.Seq]]). + * Both types make available many of the standard operations found in the Scala collections API. + * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, + * while the conversion to `ArraySeq` is permanent as all operations return a `ArraySeq`. + * + * The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. For instance, + * consider the following code: + * + * {{{ + * val arr = Array(1, 2, 3) + * val arrReversed = arr.reverse + * val seqReversed : collection.Seq[Int] = arr.reverse + * }}} + * + * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring + * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed + * by converting to `ArraySeq` first and invoking the variant of `reverse` that returns another + * `ArraySeq`. + * + * @see [[https://www.scala-lang.org/files/archive/spec/2.13/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) + * @see [[https://docs.scala-lang.org/sips/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. + * @hideImplicitConversion scala.Predef.booleanArrayOps + * @hideImplicitConversion scala.Predef.byteArrayOps + * @hideImplicitConversion scala.Predef.charArrayOps + * @hideImplicitConversion scala.Predef.doubleArrayOps + * @hideImplicitConversion scala.Predef.floatArrayOps + * @hideImplicitConversion scala.Predef.intArrayOps + * @hideImplicitConversion scala.Predef.longArrayOps + * @hideImplicitConversion scala.Predef.refArrayOps + * @hideImplicitConversion scala.Predef.shortArrayOps + * @hideImplicitConversion scala.Predef.unitArrayOps + * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray + * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray + * @define coll array + * @define Coll `Array` + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define collectExample + * @define undefinedorder + */ +final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { + + /** The length of the array */ + def length: Int = throw new Error() + + /** The element at given index. + * + * Indices start at `0`; `xs.apply(0)` is the first element of array `xs`. + * Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`. + * + * @param i the index + * @return the element at the given index + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def apply(i: Int): T = throw new Error() + + /** Update the element at given index. + * + * Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array. + * Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`. + * + * @param i the index + * @param x the value to be written at index `i` + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def update(i: Int, x: T): Unit = { throw new Error() } + + /** Clone the Array. + * + * @return A clone of the Array. + */ + override def clone(): Array[T] = throw new Error() +} diff --git a/scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala b/scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala new file mode 100644 index 000000000000..d4659bbb0dba --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala @@ -0,0 +1,1664 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.Math.{max, min} +import java.util.Arrays + +import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + booleanArrayOps => _, + byteArrayOps => _, + charArrayOps => _, + doubleArrayOps => _, + floatArrayOps => _, + intArrayOps => _, + longArrayOps => _, + refArrayOps => _, + shortArrayOps => _, + unitArrayOps => _, + genericWrapArray => _, + wrapRefArray => _, + wrapIntArray => _, + wrapDoubleArray => _, + wrapLongArray => _, + wrapFloatArray => _, + wrapCharArray => _, + wrapByteArray => _, + wrapShortArray => _, + wrapBooleanArray => _, + wrapUnitArray => _, + wrapString => _, + copyArrayToImmutableIndexedSeq => _, + _ +} +import scala.collection.Stepper.EfficientSplit +import scala.collection.immutable.Range +import scala.collection.mutable.ArrayBuilder +import scala.math.Ordering +import scala.reflect.ClassTag +import scala.util.Sorting + +object ArrayOps { + + @SerialVersionUID(3L) + private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + def length = xs.length + def apply(n: Int) = xs(n) + override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") + } + + /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter[A](p: A => Boolean, xs: Array[A]) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + while(i < len) { + val x = xs(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B: ClassTag](f: A => B): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val x = xs(i) + if(p(x)) b += f(x) + i = i + 1 + } + b.result() + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + } + + @SerialVersionUID(3L) + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = 0 + private[this] val len = xs.length + override def knownSize: Int = len - pos + def hasNext: Boolean = pos < len + def next(): A = { + if (pos >= xs.length) Iterator.empty.next() + val r = xs(pos) + pos += 1 + r + } + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } + this + } + } + + @SerialVersionUID(3L) + private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = xs.length-1 + def hasNext: Boolean = pos >= 0 + def next(): A = { + if (pos < 0) Iterator.empty.next() + val r = xs(pos) + pos -= 1 + r + } + + override def drop(n: Int): Iterator[A] = { + if (n > 0) pos = Math.max( -1, pos - n) + this + } + } + + @SerialVersionUID(3L) + private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private[this] var pos = 0 + def hasNext: Boolean = pos < xs.length + def next(): Array[A] = { + if(pos >= xs.length) throw new NoSuchElementException + val r = new ArrayOps(xs).slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to + * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. + */ + private final val MaxStableSortLength = 300 + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** This class serves as a wrapper for `Array`s with many of the operations found in + * indexed sequences. Where needed, instances of arrays are implicitly converted + * into this class. There is generally no reason to create an instance explicitly or use + * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on + * the implicit conversion to `ArrayOps` when calling a method (which does not actually + * allocate an instance of `ArrayOps` because it is a value class). + * + * Neither `Array` nor `ArrayOps` are proper collection types + * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and + * `immutable.ArraySeq` serve this purpose. + * + * The difference between this class and `ArraySeq`s is that calling transformer methods such as + * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. + * + * @tparam A type of the elements contained in this array. + */ +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + + @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def size: Int = xs.length + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def knownSize: Int = xs.length + + /** Tests whether the array is empty. + * + * @return `true` if the array contains no elements, `false` otherwise. + */ + @`inline` def isEmpty: Boolean = xs.length == 0 + + /** Tests whether the array is not empty. + * + * @return `true` if the array contains at least one element, `false` otherwise. + */ + @`inline` def nonEmpty: Boolean = xs.length != 0 + + /** Selects the first element of this array. + * + * @return the first element of this array. + * @throws NoSuchElementException if the array is empty. + */ + def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") + + /** Selects the last element. + * + * @return The last element of this array. + * @throws NoSuchElementException If the array is empty. + */ + def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") + + /** Optionally selects the first element. + * + * @return the first element of this array if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = if(isEmpty) None else Some(head) + + /** Optionally selects the last element. + * + * @return the last element of this array$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if(isEmpty) None else Some(last) + + /** Compares the size of this array to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + */ + def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) + + /** Compares the length of this array to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + */ + def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) + + /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` + * because `size` is known and comparison is constant-time. + * + * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + def sizeIs: Int = xs.length + + /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` + * because `length` is known and comparison is constant-time. + * + * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + def lengthIs: Int = xs.length + + /** Selects an interval of elements. The returned array is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this array. + * @param until the lowest index to EXCLUDE from this array. + * @return an array containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this array. + */ + def slice(from: Int, until: Int): Array[A] = { + import java.util.Arrays.copyOfRange + val lo = max(from, 0) + val hi = min(until, xs.length) + if (hi > lo) { + (((xs: Array[_]): @unchecked) match { + case x: Array[AnyRef] => copyOfRange(x, lo, hi) + case x: Array[Int] => copyOfRange(x, lo, hi) + case x: Array[Double] => copyOfRange(x, lo, hi) + case x: Array[Long] => copyOfRange(x, lo, hi) + case x: Array[Float] => copyOfRange(x, lo, hi) + case x: Array[Char] => copyOfRange(x, lo, hi) + case x: Array[Byte] => copyOfRange(x, lo, hi) + case x: Array[Short] => copyOfRange(x, lo, hi) + case x: Array[Boolean] => copyOfRange(x, lo, hi) + }).asInstanceOf[Array[A]] + } else new Array[A](0) + } + + /** The rest of the array without its first element. */ + def tail: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) + + /** The initial part of the array without its last element. */ + def init: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) + + /** Iterates over the tails of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this array + */ + def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) + + /** Iterates over the inits of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this array + */ + def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) + + /** An array containing the first `n` elements of this array. */ + def take(n: Int): Array[A] = slice(0, n) + + /** The rest of the array without its `n` first elements. */ + def drop(n: Int): Array[A] = slice(n, xs.length) + + /** An array containing the last `n` elements of this array. */ + def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) + + /** The rest of the array without its `n` last elements. */ + def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) + + /** Takes longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest prefix of this array whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val hi = if(i < 0) xs.length else i + slice(0, hi) + } + + /** Drops longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this array whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val lo = if(i < 0) xs.length else i + slice(lo, xs.length) + } + + def iterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = (shape.shape: @unchecked) match { + case StepperShape.ReferenceShape => (xs: Any) match { + case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) + case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) + } + case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) + case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) + case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) + case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) + case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) + case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) + case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Partitions elements in fixed size arrays. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing arrays of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) + + /** Splits this array into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this array whose + * elements all satisfy `p`, and the rest of this array. + */ + def span(p: A => Boolean): (Array[A], Array[A]) = { + val i = indexWhere(x => !p(x)) + val idx = if(i < 0) xs.length else i + (slice(0, idx), slice(idx, xs.length)) + } + + /** Splits this array into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of arrays consisting of the first `n` + * elements of this array, and the other elements. + */ + def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ + def partition(p: A => Boolean): (Array[A], Array[A]) = { + val res1, res2 = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + (if(p(x)) res1 else res2) += x + i += 1 + } + (res1.result(), res2.result()) + } + + /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == (Array(1, 2, 3), + * // Array(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] + * + * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. */ + def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + val res1 = ArrayBuilder.make[A1] + val res2 = ArrayBuilder.make[A2] + var i = 0 + while(i < xs.length) { + f(xs(i)) match { + case Left(x) => res1 += x + case Right(x) => res2 += x + } + i += 1 + } + (res1.result(), res2.result()) + } + + /** Returns a new array with the elements in reversed order. */ + @inline def reverse: Array[A] = { + val len = xs.length + val res = new Array[A](len) + var i = 0 + while(i < len) { + res(len-i-1) = xs(i) + i += 1 + } + res + } + + /** An iterator yielding elements in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the elements of this array in reversed order + */ + def reverseIterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + /** Selects all elements of this array which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. + */ + def filter(p: A => Boolean): Array[A] = { + val res = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) res += x + i += 1 + } + res.result() + } + + /** Selects all elements of this array which do not satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. + */ + def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) + + /** Sorts this array according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return an array consisting of the elements of this array + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { + val len = xs.length + def boxed = if(len < ArrayOps.MaxStableSortLength) { + val a = xs.clone() + Sorting.stableSort(a)(using ord.asInstanceOf[Ordering[A]]) + a + } else { + val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + Array.copyAs[A](a, len) + } + if(len <= 1) xs.clone() + else ((xs: Array[_]) match { + case xs: Array[AnyRef] => + val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a + case xs: Array[Int] => + if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Long] => + if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Char] => + if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Byte] => + if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Short] => + if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Boolean] => + if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } + else boxed + case xs => boxed + }).asInstanceOf[Array[A]] + } + + /** Sorts this array according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return an array consisting of the elements of this array + * sorted according to the comparison function `lt`. + */ + def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this array according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return an array consisting of the elements of this array + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) + + /** Creates a non-strict filter of this array. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new array, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `ArrayOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this array + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + + /** Finds index of first occurrence of some value in this array after or at some start index. + * + * @param elem the element value to search for. + * @param from the start index + * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf(elem: A, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(elem == xs(i)) return i + i += 1 + } + -1 + } + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(p(xs(i))) return i + i += 1 + } + -1 + } + + /** Finds index of last occurrence of some value in this array before or at a given end index. + * + * @param elem the element value to search for. + * @param end the end index. + * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(elem == xs(i)) return i + i -= 1 + } + -1 + } + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(p(xs(i))) return i + i -= 1 + } + -1 + } + + /** Finds the first element of the array satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the array + * that satisfies `p`, or `None` if none exists. + */ + def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { + val idx = indexWhere(p) + if(idx == -1) None else Some(xs(idx)) + } + + /** Tests whether a predicate holds for at least one element of this array. + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` + */ + def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 + + /** Tests whether a predicate holds for all elements of this array. + * + * @param p the predicate used to test elements. + * @return `true` if this array is empty or the given predicate `p` + * holds for all elements of this array, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { + var i = 0 + while(i < xs.length) { + if(!p(xs(i))) return false + i += 1 + } + true + } + + /** Applies a binary operator to a start value and all elements of this array, + * going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going left to right with the start value `z` on the left: + * {{{ + * op(...op(z, x_1), x_2, ..., x_n) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + val length = xs.length + var v: Any = z + var i = 0 + while(i < length) { + v = op(v, xs(i)) + i += 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException // null-check first helps static analysis of instanceOf + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + } + + /** Produces an array containing cumulative results of applying the binary + * operator going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) + * }}} + * + */ + def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + var v = z + var i = 0 + val res = new Array[B](xs.length + 1) + while(i < xs.length) { + res(i) = v + v = op(v, xs(i)) + i += 1 + } + res(i) = v + res + } + + /** Computes a prefix scan of the elements of the array. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting array + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new array containing the prefix scan of the elements in this array + */ + def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + + /** Produces an array containing cumulative results of applying the binary + * operator going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) + * }}} + * + */ + def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + var v = z + var i = xs.length - 1 + val res = new Array[B](xs.length + 1) + res(xs.length) = z + while(i >= 0) { + v = op(xs(i), v) + res(i) = v + i -= 1 + } + res + } + + /** Applies a binary operator to all elements of this array and a start value, + * going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going right to left with the start value `z` on the right: + * {{{ + * op(x_1, op(x_2, ... op(x_n, z)...)) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + var v = z + var i = xs.length - 1 + while(i >= 0) { + v = op(xs(i), v) + i -= 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + + } + + /** Folds the elements of this array using the specified associative binary operator. + * + * @tparam A1 a type parameter for the binary operator, a supertype of `A`. + * @param z a neutral element for the fold operation; may be added to the result + * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + val len = xs.length + val ys = new Array[B](len) + if(len > 0) { + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + ys + } + + def mapInPlace(f: A => A): Array[A] = { + var i = 0 + while (i < xs.length) { + xs.update(i, f(xs(i))) + i = i + 1 + } + xs + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Flattens a two-dimensional array by concatenating all its rows + * into a single array. + * + * @tparam B Type of row elements. + * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. + * @return An array obtained by concatenating rows of this array. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val len = xs.length + var size = 0 + var i = 0 + while(i < len) { + xs(i) match { + case it: IterableOnce[_] => + val k = it.knownSize + if(k > 0) size += k + case a: Array[_] => size += a.length + case _ => + } + i += 1 + } + if(size > 0) b.sizeHint(size) + i = 0 + while(i < len) { + b ++= asIterable(xs(i)) + i += 1 + } + b.result() + } + + /** Builds a new array by applying a partial function to all elements of this array + * on which the function is defined. + * + * @param pf the partial function which filters and maps the array. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + val fallback: Any => Any = ArrayOps.fallback + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Finds the first element of the array for which the given partial function is defined, and applies the + * partial function to it. */ + def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { + val fallback: Any => Any = ArrayOps.fallback + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) + i += 1 + } + None + } + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the minimum of the lengths of this array and `that`. + */ + def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + val b = new ArrayBuilder.ofRef[(A, B)]() + val k = that.knownSize + b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + b.result() + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the maximum of the lengths of this array and `that`. + * If this array is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this array, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + val b = new ArrayBuilder.ofRef[(A1, B)]() + val k = that.knownSize + b.sizeHint(max(k, xs.length)) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + while(it.hasNext) { + b += ((thisElem, it.next())) + i += 1 + } + while(i < xs.length) { + b += ((xs(i), thatElem)) + i += 1 + } + b.result() + } + + /** Zips this array with its indices. + * + * @return A new array containing pairs consisting of all elements of this array paired with their index. + * Indices start at `0`. + */ + def zipWithIndex: Array[(A, Int)] = { + val b = new Array[(A, Int)](xs.length) + var i = 0 + while(i < xs.length) { + b(i) = ((xs(i), i)) + i += 1 + } + b + } + + /** A copy of this array with an element appended. */ + def appended[B >: A : ClassTag](x: B): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+1) + dest(xs.length) = x + dest + } + + @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + + /** A copy of this array with an element prepended. */ + def prepended[B >: A : ClassTag](x: B): Array[B] = { + val dest = new Array[B](xs.length + 1) + dest(0) = x + Array.copy(xs, 0, dest, 1, xs.length) + dest + } + + @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + + /** A copy of this array with all elements of a collection prepended. */ + def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = prefix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + xs.length) + b.addAll(xs) + b.result() + } + + /** A copy of this array with all elements of an array prepended. */ + def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](prefix, prefix.length+xs.length) + Array.copy(xs, 0, dest, prefix.length, xs.length) + dest + } + + @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + + @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + + /** A copy of this array with all elements of a collection appended. */ + def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = suffix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(xs) + b.addAll(suffix) + b.result() + } + + /** A copy of this array with all elements of an array appended. */ + def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+suffix.length) + Array.copy(suffix, 0, dest, xs.length, suffix.length) + dest + } + + @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + + @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + + /** Tests whether this array contains a given value as an element. + * + * @param elem the element to test. + * @return `true` if this array has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: A): Boolean = exists (_ == elem) + + /** Returns a copy of this array with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original array appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param other The patch values + * @param replaced The number of values in the original array that are replaced by the patch. + */ + def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + val b = ArrayBuilder.make[B] + val k = other.knownSize + val r = if(replaced < 0) 0 else replaced + if(k >= 0) b.sizeHint(xs.length + k - r) + val chunk1 = if(from > 0) min(from, xs.length) else 0 + if(chunk1 > 0) b.addAll(xs, 0, chunk1) + b ++= other + val remaining = xs.length - chunk1 - r + if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) + b.result() + } + + /** Converts an array of pairs into an array of first elements and an array of second elements. + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this Array is a pair. + * @param ct1 a class tag for `A1` type parameter that is required to create an instance + * of `Array[A1]` + * @param ct2 a class tag for `A2` type parameter that is required to create an instance + * of `Array[A2]` + * @return a pair of Arrays, containing, respectively, the first and second half + * of each element pair of this Array. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + var i = 0 + while (i < xs.length) { + val e = asPair(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + i += 1 + } + (a1, a2) + } + + /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. + * + * @tparam A1 the type of the first of three elements in the triple + * @tparam A2 the type of the second of three elements in the triple + * @tparam A3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Array is a triple. + * @param ct1 a class tag for T1 type parameter that is required to create an instance + * of Array[T1] + * @param ct2 a class tag for T2 type parameter that is required to create an instance + * of Array[T2] + * @param ct3 a class tag for T3 type parameter that is required to create an instance + * of Array[T3] + * @return a triple of Arrays, containing, respectively, the first, second, and third + * elements from each element triple of this Array. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + val a3 = new Array[A3](xs.length) + var i = 0 + while (i < xs.length) { + val e = asTriple(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + a3(i) = e._3 + i += 1 + } + (a1, a2, a3) + } + + /** Transposes a two dimensional array. + * + * @tparam B Type of row elements. + * @param asArray A function that converts elements of this array to rows - arrays of type `B`. + * @return An array obtained by replacing elements of this arrays with rows the represent. + */ + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](using ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- this) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + + /** Selects all the elements of this array ignoring the duplicates. + * + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinct: Array[A] = distinctBy(identity) + + /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinctBy[B](f: A => B): Array[A] = + ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() + + /** A copy of this array with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned array. + * @return a new array consisting of + * all elements of this array followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + var i = xs.length + val newlen = max(i, len) + val dest = Array.copyAs[B](xs, newlen) + while(i < newlen) { + dest(i) = elem + i += 1 + } + dest + } + + /** Produces the range of all indices of this sequence. + * + * @return a `Range` value from `0` to one less than the length of this array. + */ + def indices: Range = Range(0, xs.length) + + /** Partitions this array into a map of arrays according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to arrays such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to an array of those elements `x` + * for which `f(x)` equals `k`. + */ + def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { + val m = mutable.Map.empty[K, ArrayBuilder[A]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val key = f(elem) + val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) + bldr += elem + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + /** + * Partitions this array into a map of arrays according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Array[User]): Map[Int, Array[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + val m = mutable.Map.empty[K, ArrayBuilder[B]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val k = key(elem) + val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) + bldr += f(elem) + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq + + def toIndexedSeq: immutable.IndexedSeq[A] = + immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start` with at most `len` values. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) + if (copied > 0) { + Array.copy(this.xs, 0, xs, start, copied) + } + copied + } + + /** Create a copy of this array with the specified element type. */ + def toArray[B >: A: ClassTag]: Array[B] = { + val destination = new Array[B](xs.length) + @annotation.unused val copied = copyToArray(destination, 0) + //assert(copied == xs.length) + destination + } + + /** Counts the number of elements in this array which satisfy a predicate */ + def count(p: A => Boolean): Int = { + var i, res = 0 + val len = xs.length + while(i < len) { + if(p(xs(i))) res += 1 + i += 1 + } + res + } + + // can't use a default arg because we already have another overload with a default arg + /** Tests whether this array starts with the given array. */ + @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + + /** Tests whether this array contains the given array at a given index. + * + * @param that the array to test + * @param offset the index where the array is searched. + * @return `true` if the array `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + val safeOffset = offset.max(0) + val thatl = that.length + if(thatl > xs.length-safeOffset) thatl == 0 + else { + var i = 0 + while(i < thatl) { + if(xs(i+safeOffset) != that(i)) return false + i += 1 + } + true + } + } + + /** Tests whether this array ends with the given array. + * + * @param that the array to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Array[B]): Boolean = { + val thatl = that.length + val off = xs.length - thatl + if(off < 0) false + else { + var i = 0 + while(i < thatl) { + if(xs(i+off) != that(i)) return false + i += 1 + } + true + } + } + + /** A copy of this array with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + */ + def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") + val dest = toArray[B] + dest(index) = elem + dest + } + + @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which + may not provide the best possible performance. We need them in `ArrayOps` because their return type + mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this array and another sequence. + * + * @param that the sequence of elements to remove + * @return a new array which contains all elements of this array + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] + + /** Computes the multiset intersection between this array and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive groups + * @return An iterator producing arrays of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + */ + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this array + * @example {{{ + * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b) + * // Array(a, c) + * // Array(b, b) + * // Array(b, c) + * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(b, b) + * // Array(b, a) + * }}} + */ + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this array. + * @example {{{ + * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b, b) + * // Array(b, a, b) + * // Array(b, b, a) + * }}} + */ + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array contains the given sequence at a given index. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array ends with the given sequence. + * + * @param that the sequence to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) +} diff --git a/scala2-library-bootstrapped/src/scala/collection/Factory.scala b/scala2-library-bootstrapped/src/scala/collection/Factory.scala new file mode 100644 index 000000000000..6006f292bb19 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/Factory.scala @@ -0,0 +1,784 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.collection.immutable.NumericRange +import scala.language.implicitConversions +import scala.collection.mutable.Builder +import scala.annotation.unchecked.uncheckedVariance +import scala.reflect.ClassTag + +/** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * This is a general form of any factory ([[IterableFactory]], + * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose + * element type is fixed. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait Factory[-A, +C] extends Any { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: Builder[A, C] +} + +object Factory { + + implicit val stringFactory: Factory[Char, String] = new StringFactory + @SerialVersionUID(3L) + private class StringFactory extends Factory[Char, String] with Serializable { + def fromSpecific(it: IterableOnce[Char]): String = { + val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[Char, String] = new mutable.StringBuilder() + } + + implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + @SerialVersionUID(3L) + private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = { + val b = newBuilder + b.sizeHint(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] + } + +} + +/** Base trait for companion objects of unconstrained collection types that may require + * multiple traversals of a source collection to build a target collection `CC`. + * + * @tparam CC Collection type constructor (e.g. `List`) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait IterableFactory[+CC[_]] extends Serializable { + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + def from[A](source: IterableOnce[A]): CC[A] + + /** An empty collection + * @tparam A the type of the ${coll}'s elements + */ + def empty[A]: CC[A] + + /** Creates a $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A](elems: A*): CC[A] = from(elems) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, CC[A]] + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Iterable[A]*): CC[A] = { + from(xss.foldLeft(View.empty[A])(_ ++ _)) + } + + implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) +} + +object IterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = + new BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder + } + + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `List`) + */ +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object SeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } + + final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[A] = this + def lengthCompare(len: Int): Int = c.lengthCompare(len) + def apply(i: Int): A = c(i) + def drop(n: Int): scala.Seq[A] = c match { + case seq: scala.Seq[A] => seq.drop(n) + case _ => c.view.drop(n).toSeq + } + def toSeq: scala.Seq[A] = c.toSeq + } +} + +trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { + + override def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + override def concat[A](xss: Iterable[A]*): CC[A] = { + val b = newBuilder[A] + val knownSizes = xss.view.map(_.knownSize) + if (knownSizes forall (_ >= 0)) { + b.sizeHint(knownSizes.sum) + } + for (xs <- xss) b ++= xs + b.result() + } + +} + +/** + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + def empty: C + def apply(xs: A*): C = fromSpecific(xs) + def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) + def newBuilder: Builder[A, C] + + implicit def specificIterableFactory: Factory[A, C] = this +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait MapFactory[+CC[_, _]] extends Serializable { + + /** + * An empty Map + */ + def empty[K, V]: CC[K, V] + + /** + * A collection of type Map generated from given iterable object. + */ + def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + + /** + * A collection of type Map that contains given key/value bindings. + */ + def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) + + /** + * The default builder for Map objects. + */ + def newBuilder[K, V]: Builder[(K, V), CC[K, V]] + + /** + * The default Factory instance for maps. + */ + implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) +} + +object MapFactory { + + /** + * Fixes the key and value types of `factory` to `K` and `V`, respectively + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` + * and values of type `V` + */ + implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = + new BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { + override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) + def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def empty[K, V]: C[K, V] = delegate.empty + def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder + } +} + +/** Base trait for companion objects of collections that require an implicit evidence. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + * @tparam Ev Unary type constructor for the implicit evidence required for an element type + * (typically `Ordering` or `ClassTag`) + * + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { + + def from[E : Ev](it: IterableOnce[E]): CC[E] + + def empty[A : Ev]: CC[A] + + def apply[A : Ev](xs: A*): CC[A] = from(xs) + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + def newBuilder[A : Ev]: Builder[A, CC[A]] + + implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) +} + +object EvidenceIterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) + * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) + private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] + } + + @SerialVersionUID(3L) + class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { + override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) + def empty[A : Ev]: CC[A] = delegate.empty + def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** Base trait for companion objects of collections that require an implicit `Ordering`. + * @tparam CC Collection type constructor (e.g. `SortedSet`) + */ +trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] + +object SortedIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) + extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] +} + +/** Base trait for companion objects of collections that require an implicit `ClassTag`. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { + + @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = + ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) +} + +object ClassTagIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) + extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] + + /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { + def empty[A]: CC[A] = delegate.empty(using ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(using ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(using ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(using ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(using ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(using ClassTag.Any).asInstanceOf[CC[A]] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object ClassTagSeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] + + /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] +} + +trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { + + override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SortedMapFactory[+CC[_, _]] extends Serializable { + + def empty[K : Ordering, V]: CC[K, V] + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + + def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) + + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] + + implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) + +} + +object SortedMapFactory { + + /** + * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, + * respectively. + * + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of + * type `K` and values of type `V` + */ + implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) + private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { + override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def empty[K : Ordering, V]: CC[K, V] = delegate.empty + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/Iterable.scala b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala new file mode 100644 index 000000000000..8f9142583b29 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala @@ -0,0 +1,1043 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder +import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} + +/** Base trait for generic collections. + * + * @tparam A the element type of the collection + * + * @define Coll `Iterable` + * @define coll iterable collection + */ +trait Iterable[+A] extends IterableOnce[A] + with IterableOps[A, Iterable, Iterable[A]] + with IterableFactoryDefaults[A, Iterable] { + + // The collection itself + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + final def toIterable: this.type = this + + final protected def coll: this.type = this + + def iterableFactory: IterableFactory[Iterable] = Iterable + + @deprecated("Iterable.seq always returns the iterable itself", "2.13.0") + def seq: this.type = this + + /** Defines the prefix of this object's `toString` representation. + * + * It is recommended to return the name of the concrete collection type, but + * not implementation subclasses. For example, for `ListMap` this method should + * return `"ListMap"`, not `"Map"` (the supertype) or `"Node"` (an implementation + * subclass). + * + * The default implementation returns "Iterable". It is overridden for the basic + * collection kinds "Seq", "IndexedSeq", "LinearSeq", "Buffer", "Set", "Map", + * "SortedSet", "SortedMap" and "View". + * + * @return a string representation which starts the result of `toString` + * applied to this $coll. By default the string prefix is the + * simple name of the collection class $coll. + */ + protected[this] def className: String = stringPrefix + + /** Forwarder to `className` for use in `scala.runtime.ScalaRunTime`. + * + * This allows the proper visibility for `className` to be + * published, but provides the exclusive access needed by + * `scala.runtime.ScalaRunTime.stringOf` (and a few tests in + * the test suite). + */ + private[scala] final def collectionClassName: String = className + + @deprecatedOverriding("Override className instead", "2.13.0") + protected[this] def stringPrefix: String = "Iterable" + + /** Converts this $coll to a string. + * + * @return a string representation of this collection. By default this + * string consists of the `className` of this $coll, followed + * by all elements separated by commas and enclosed in parentheses. + */ + override def toString = mkString(className + "(", ", ", ")") + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that) +} + +/** Base trait for Iterable operations + * + * =VarianceNote= + * + * We require that for all child classes of Iterable the variance of + * the child class and the variance of the `C` parameter passed to `IterableOps` + * are the same. We cannot express this since we lack variance polymorphism. That's + * why we have to resort at some places to write `C[A @uncheckedVariance]`. + * + * @tparam CC type constructor of the collection (e.g. `List`, `Set`). Operations returning a collection + * with a different type of element `B` (e.g. `map`) return a `CC[B]`. + * @tparam C type of the collection (e.g. `List[Int]`, `String`, `BitSet`). Operations returning a collection + * with the same type of element (e.g. `drop`, `filter`) return a `C`. + * + * @define Coll Iterable + * @define coll iterable collection + * @define orderDependent + * + * Note: might return different results for different runs, unless the underlying collection type is ordered. + * @define orderDependentFold + * + * Note: might return different results for different runs, unless the + * underlying collection type is ordered or the operator is associative + * and commutative. + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * @define undefinedorder + * The order in which operations are performed on elements is unspecified + * and may be nondeterministic. + */ +trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with IterableOnceOps[A, CC, C] { + /** + * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. + */ + // Should be `protected def asIterable`, or maybe removed altogether if it's not needed + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + def toIterable: Iterable[A] + + /** Converts this $coll to an unspecified Iterable. Will return + * the same collection if this instance is already Iterable. + * @return An Iterable containing all elements of this $coll. + */ + @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") + final def toTraversable: Traversable[A] = toIterable + + override def isTraversableAgain: Boolean = true + + /** + * @return This collection as a `C`. + */ + protected def coll: C + + @deprecated("Use coll instead of repr in a collection implementation, use the collection value itself from the outside", "2.13.0") + final def repr: C = coll + + /** + * Defines how to turn a given `Iterable[A]` into a collection of type `C`. + * + * This process can be done in a strict way or a non-strict way (ie. without evaluating + * the elements of the resulting collections). In other words, this methods defines + * the evaluation model of the collection. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method + * might be unsound. However, as long as it is called with an + * `Iterable[A]` obtained from `this` collection (as it is the case in the + * implementations of operations where we use a `View[A]`), it is safe. + */ + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): C + + /** The companion object of this ${coll}, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def iterableFactory: IterableFactory[CC] + + @deprecated("Use iterableFactory instead", "2.13.0") + @deprecatedOverriding("Use iterableFactory instead", "2.13.0") + @`inline` def companion: IterableFactory[CC] = iterableFactory + + /** + * @return a strict builder for the same collection type. + * + * Note that in the case of lazy collections (e.g. [[scala.collection.View]] or [[scala.collection.immutable.LazyList]]), + * it is possible to implement this method but the resulting `Builder` will break laziness. + * As a consequence, operations should preferably be implemented with `fromSpecific` + * instead of this method. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method might + * be unsound. However, as long as the returned builder is only fed + * with `A` values taken from `this` instance, it is safe. + */ + protected def newSpecificBuilder: Builder[A @uncheckedVariance, C] + + /** The empty iterable of the same type as this iterable + * + * @return an empty iterable of type `C`. + */ + def empty: C = fromSpecific(Nil) + + /** Selects the first element of this $coll. + * $orderDependent + * @return the first element of this $coll. + * @throws NoSuchElementException if the $coll is empty. + */ + def head: A = iterator.next() + + /** Optionally selects the first element. + * $orderDependent + * @return the first element of this $coll if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = { + val it = iterator + if (it.hasNext) Some(it.next()) else None + } + + /** Selects the last element. + * $orderDependent + * @return The last element of this $coll. + * @throws NoSuchElementException If the $coll is empty. + */ + def last: A = { + val it = iterator + var lst = it.next() + while (it.hasNext) lst = it.next() + lst + } + + /** Optionally selects the last element. + * $orderDependent + * @return the last element of this $coll$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if (isEmpty) None else Some(last) + + /** A view over the elements of this collection. */ + def view: View[A] = View.fromIteratorProvider(() => iterator) + + /** Compares the size of this $coll to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(size min otherSize)` instead of `O(size)`. The method should be overridden + * if computing `size` is cheap and `knownSize` returns `-1`. + * + * @see [[sizeIs]] + */ + def sizeCompare(otherSize: Int): Int = { + if (otherSize < 0) 1 + else { + val known = knownSize + if (known >= 0) Integer.compare(known, otherSize) + else { + var i = 0 + val it = iterator + while (it.hasNext) { + if (i == otherSize) return 1 + it.next() + i += 1 + } + i - otherSize + } + } + } + + /** Returns a value class containing operations for comparing the size of this $coll to a test value. + * + * These operations are implemented in terms of [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + @inline final def sizeIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) + + /** Compares the size of this $coll to the size of another `Iterable`. + * + * @param that the `Iterable` whose size is compared with this $coll's size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < that.size + * x == 0 if this.size == that.size + * x > 0 if this.size > that.size + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(this.size min that.size)` instead of `O(this.size + that.size)`. + * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. + */ + def sizeCompare(that: Iterable[_]): Int = { + val thatKnownSize = that.knownSize + + if (thatKnownSize >= 0) this sizeCompare thatKnownSize + else { + val thisKnownSize = this.knownSize + + if (thisKnownSize >= 0) { + val res = that sizeCompare thisKnownSize + // can't just invert the result, because `-Int.MinValue == Int.MinValue` + if (res == Int.MinValue) 1 else -res + } else { + val thisIt = this.iterator + val thatIt = that.iterator + while (thisIt.hasNext && thatIt.hasNext) { + thisIt.next() + thatIt.next() + } + java.lang.Boolean.compare(thisIt.hasNext, thatIt.hasNext) + } + } + } + + /** A view over a slice of the elements of this collection. */ + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + def view(from: Int, until: Int): View[A] = view.slice(from, until) + + /** Transposes this $coll of iterable collections into + * a $coll of ${coll}s. + * + * The resulting collection's type will be guided by the + * static type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(4, 5, 6)).transpose + * // xs == List( + * // List(1, 4), + * // List(2, 5), + * // List(3, 6)) + * + * val ys = Vector( + * List(1, 2, 3), + * List(4, 5, 6)).transpose + * // ys == Vector( + * // Vector(1, 4), + * // Vector(2, 5), + * // Vector(3, 6)) + * }}} + * + * $willForceEvaluation + * + * @tparam B the type of the elements of each iterable collection. + * @param asIterable an implicit conversion which asserts that the + * element type of this $coll is an `Iterable`. + * @return a two-dimensional $coll of ${coll}s which has as ''n''th row + * the ''n''th column of this $coll. + * @throws IllegalArgumentException if all collections in this $coll + * are not of the same size. + */ + def transpose[B](implicit asIterable: A => /*<:= headSize) fail + bs(i) += x + i += 1 + } + if (i != headSize) + fail + } + iterableFactory.from(bs.map(_.result())) + } + + def filter(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = false)) + + def filterNot(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = true)) + + /** Creates a non-strict filter of this $coll. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new collection, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * $orderDependent + * + * @param p the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): collection.WithFilter[A, CC] = new IterableOps.WithFilter(this, p) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, + * all elements that do not. Interesting because it splits a collection in two. + * + * The default implementation provided here needs to traverse the collection twice. + * Strict collections have an overridden version of `partition` in `StrictOptimizedIterableOps`, + * which requires only a single traversal. + */ + def partition(p: A => Boolean): (C, C) = { + val first = new View.Filter(this, p, false) + val second = new View.Filter(this, p, true) + (fromSpecific(first), fromSpecific(second)) + } + + override def splitAt(n: Int): (C, C) = (take(n), drop(n)) + + def take(n: Int): C = fromSpecific(new View.Take(this, n)) + + /** Selects the last ''n'' elements. + * $orderDependent + * @param n the number of elements to take from this $coll. + * @return a $coll consisting only of the last `n` elements of this $coll, + * or else the whole $coll, if it has less than `n` elements. + * If `n` is negative, returns an empty $coll. + */ + def takeRight(n: Int): C = fromSpecific(new View.TakeRight(this, n)) + + /** Takes longest prefix of elements that satisfy a predicate. + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest prefix of this $coll whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): C = fromSpecific(new View.TakeWhile(this, p)) + + def span(p: A => Boolean): (C, C) = (takeWhile(p), dropWhile(p)) + + def drop(n: Int): C = fromSpecific(new View.Drop(this, n)) + + /** Selects all elements except last ''n'' ones. + * $orderDependent + * @param n the number of elements to drop from this $coll. + * @return a $coll consisting of all elements of this $coll except the last `n` ones, or else the + * empty $coll, if this $coll has less than `n` elements. + * If `n` is negative, don't drop any elements. + */ + def dropRight(n: Int): C = fromSpecific(new View.DropRight(this, n)) + + def dropWhile(p: A => Boolean): C = fromSpecific(new View.DropWhile(this, p)) + + /** Partitions elements in fixed size ${coll}s. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[C] = + iterator.grouped(size).map(fromSpecific) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in `grouped`.) + * + * An empty collection returns an empty iterator, and a non-empty + * collection containing fewer elements than the window size returns + * an iterator that will produce the original collection as its only + * element. + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except for a + * non-empty collection with less than `size` elements, which + * returns an iterator that produces the source collection itself + * as its only element. + * @example `List().sliding(2) = empty iterator` + * @example `List(1).sliding(2) = Iterator(List(1))` + * @example `List(1, 2).sliding(2) = Iterator(List(1, 2))` + * @example `List(1, 2, 3).sliding(2) = Iterator(List(1, 2), List(2, 3))` + */ + def sliding(size: Int): Iterator[C] = sliding(size, 1) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * + * The returned iterator will be empty when called on an empty collection. + * The last element the iterator produces may be smaller than the window + * size when the original collection isn't exhausted by the window before + * it and its last element isn't skipped by the step before it. + * + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive + * groups + * @return An iterator producing ${coll}s of size `size`, except the last + * element (which may be the only element) will be smaller + * if there are fewer than `size` elements remaining to be grouped. + * @example `List(1, 2, 3, 4, 5).sliding(2, 2) = Iterator(List(1, 2), List(3, 4), List(5))` + * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` + */ + def sliding(size: Int, step: Int): Iterator[C] = + iterator.sliding(size, step).map(fromSpecific) + + /** The rest of the collection without its first element. */ + def tail: C = { + if (isEmpty) throw new UnsupportedOperationException + drop(1) + } + + /** The initial part of the collection without its last element. + * $willForceEvaluation + */ + def init: C = { + if (isEmpty) throw new UnsupportedOperationException + dropRight(1) + } + + def slice(from: Int, until: Int): C = + fromSpecific(new View.Drop(new View.Take(this, until), from)) + + /** Partitions this $coll into a map of ${coll}s according to some discriminator function. + * + * $willForceEvaluation + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to ${coll}s such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to a $coll of those elements `x` + * for which `f(x)` equals `k`. + * + */ + def groupBy[K](f: A => K): immutable.Map[K, C] = { + val m = mutable.Map.empty[K, Builder[A, C]] + val it = iterator + while (it.hasNext) { + val elem = it.next() + val key = f(elem) + val bldr = m.getOrElseUpdate(key, newSpecificBuilder) + bldr += elem + } + var result = immutable.HashMap.empty[K, C] + val mapIt = m.iterator + while (mapIt.hasNext) { + val (k, v) = mapIt.next() + result = result.updated(k, v.result()) + } + result + } + + /** + * Partitions this $coll into a map of ${coll}s according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Seq[User]): Map[Int, Seq[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * $willForceEvaluation + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B](key: A => K)(f: A => B): immutable.Map[K, CC[B]] = { + val m = mutable.Map.empty[K, Builder[B, CC[B]]] + for (elem <- this) { + val k = key(elem) + val bldr = m.getOrElseUpdate(k, iterableFactory.newBuilder[B]) + bldr += f(elem) + } + class Result extends runtime.AbstractFunction1[(K, Builder[B, CC[B]]), Unit] { + var built = immutable.Map.empty[K, CC[B]] + def apply(kv: (K, Builder[B, CC[B]])) = + built = built.updated(kv._1, kv._2.result()) + } + val result = new Result + m.foreach(result) + result.built + } + + /** + * Partitions this $coll into a map according to a discriminator function `key`. All the values that + * have the same discriminator are then transformed by the `f` function and then reduced into a + * single value with the `reduce` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f).reduce(reduce))`, but more efficient. + * + * {{{ + * def occurrences[A](as: Seq[A]): Map[A, Int] = + * as.groupMapReduce(identity)(_ => 1)(_ + _) + * }}} + * + * $willForceEvaluation + */ + def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): immutable.Map[K, B] = { + val m = mutable.Map.empty[K, B] + for (elem <- this) { + val k = key(elem) + val v = + m.get(k) match { + case Some(b) => reduce(b, f(elem)) + case None => f(elem) + } + m.put(k, v) + } + m.to(immutable.Map) + } + + /** Computes a prefix scan of the elements of the collection. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting collection + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new $coll containing the prefix scan of the elements in this $coll + */ + def scan[B >: A](z: B)(op: (B, B) => B): CC[B] = scanLeft(z)(op) + + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = iterableFactory.from(new View.ScanLeft(this, z, op)) + + /** Produces a collection containing cumulative results of applying the operator going right to left. + * The head of the collection is the last cumulative result. + * $willNotTerminateInf + * $orderDependent + * $willForceEvaluation + * + * Example: + * {{{ + * List(1, 2, 3, 4).scanRight(0)(_ + _) == List(10, 9, 7, 4, 0) + * }}} + * + * @tparam B the type of the elements in the resulting collection + * @param z the initial value + * @param op the binary operator applied to the intermediate result and the element + * @return collection with intermediate results + */ + def scanRight[B](z: B)(op: (A, B) => B): CC[B] = { + class Scanner extends runtime.AbstractFunction1[A, Unit] { + var acc = z + var scanned = acc :: immutable.Nil + def apply(x: A) = { + acc = op(x, acc) + scanned ::= acc + } + } + val scanner = new Scanner + reversed.foreach(scanner) + iterableFactory.from(scanner.scanned) + } + + def map[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(this, f)) + + def flatMap[B](f: A => IterableOnce[B]): CC[B] = iterableFactory.from(new View.FlatMap(this, f)) + + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] = flatMap(asIterable) + + def collect[B](pf: PartialFunction[A, B]): CC[B] = + iterableFactory.from(new View.Collect(this, pf)) + + /** Applies a function `f` to each element of the $coll and returns a pair of ${coll}s: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = $Coll(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this $coll to an [[scala.util.Either]] + * + * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. + */ + def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { + val left: View[A1] = new LeftPartitionMapped(this, f) + val right: View[A2] = new RightPartitionMapped(this, f) + (iterableFactory.from(left), iterableFactory.from(right)) + } + + /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $coll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param suffix the iterable to append. + * @tparam B the element type of the returned collection. + * @return a new $coll which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def concat[B >: A](suffix: IterableOnce[B]): CC[B] = iterableFactory.from(suffix match { + case xs: Iterable[B] => new View.Concat(this, xs) + case xs => iterator ++ suffix.iterator + }) + + /** Alias for `concat` */ + @`inline` final def ++ [B >: A](suffix: IterableOnce[B]): CC[B] = concat(suffix) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = iterableFactory.from(that match { // sound bcs of VarianceNote + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + def zipWithIndex: CC[(A @uncheckedVariance, Int)] = iterableFactory.from(new View.ZipWithIndex(this)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. + * @return a new collection of type `That` containing pairs consisting of + * corresponding elements of this $coll and `that`. The length + * of the returned collection is the maximum of the lengths of this $coll and `that`. + * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): CC[(A1, B)] = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) + + /** Converts this $coll of pairs into two collections of the first and second + * half of each pair. + * + * {{{ + * val xs = $Coll( + * (1, "one"), + * (2, "two"), + * (3, "three")).unzip + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this $coll is a pair. + * @return a pair of ${coll}s, containing the first, respectively second + * half of each element pair of this $coll. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val first: View[A1] = new View.Map[A, A1](this, asPair(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asPair(_)._2) + (iterableFactory.from(first), iterableFactory.from(second)) + } + + /** Converts this $coll of triples into three collections of the first, second, + * and third element of each triple. + * + * {{{ + * val xs = $Coll( + * (1, "one", '1'), + * (2, "two", '2'), + * (3, "three", '3')).unzip3 + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three), + * // $Coll(1, 2, 3)) + * }}} + * + * @tparam A1 the type of the first member of the element triples + * @tparam A2 the type of the second member of the element triples + * @tparam A3 the type of the third member of the element triples + * @param asTriple an implicit conversion which asserts that the element type + * of this $coll is a triple. + * @return a triple of ${coll}s, containing the first, second, respectively + * third member of each element triple of this $coll. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val first: View[A1] = new View.Map[A, A1](this, asTriple(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asTriple(_)._2) + val third: View[A3] = new View.Map[A, A3](this, asTriple(_)._3) + (iterableFactory.from(first), iterableFactory.from(second), iterableFactory.from(third)) + } + + /** Iterates over the tails of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this $coll + * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` + */ + def tails: Iterator[C] = iterateUntilEmpty(_.tail) + + /** Iterates over the inits of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `init`. + * + * $willForceEvaluation + * + * @return an iterator over all the inits of this $coll + * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` + */ + def inits: Iterator[C] = iterateUntilEmpty(_.init) + + override def tapEach[U](f: A => U): C = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { + // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` + // `this.tail.tail` doesn't compile as `C` is unbounded + // `Iterable.from(this)` would eagerly copy non-immutable collections + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) + (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) + } + + @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") + def ++:[B >: A](that: IterableOnce[B]): CC[B] = iterableFactory.from(that match { + case xs: Iterable[B] => new View.Concat(xs, this) + case _ => that.iterator ++ iterator + }) +} + +object IterableOps { + + /** Operations for comparing the size of a collection to a test value. + * + * These operations are implemented in terms of + * [[scala.collection.IterableOps.sizeCompare(Int) `sizeCompare(Int)`]]. + */ + final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]) extends AnyVal { + /** Tests if the size of the collection is less than some value. */ + @inline def <(size: Int): Boolean = it.sizeCompare(size) < 0 + /** Tests if the size of the collection is less than or equal to some value. */ + @inline def <=(size: Int): Boolean = it.sizeCompare(size) <= 0 + /** Tests if the size of the collection is equal to some value. */ + @inline def ==(size: Int): Boolean = it.sizeCompare(size) == 0 + /** Tests if the size of the collection is not equal to some value. */ + @inline def !=(size: Int): Boolean = it.sizeCompare(size) != 0 + /** Tests if the size of the collection is greater than or equal to some value. */ + @inline def >=(size: Int): Boolean = it.sizeCompare(size) >= 0 + /** Tests if the size of the collection is greater than some value. */ + @inline def >(size: Int): Boolean = it.sizeCompare(size) > 0 + } + + /** A trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ + @SerialVersionUID(3L) + class WithFilter[+A, +CC[_]]( + self: IterableOps[A, CC, _], + p: A => Boolean + ) extends collection.WithFilter[A, CC] with Serializable { + + protected def filtered: Iterable[A] = + new View.Filter(self, p, isFlipped = false) + + def map[B](f: A => B): CC[B] = + self.iterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B](f: A => IterableOnce[B]): CC[B] = + self.iterableFactory.from(new View.FlatMap(filtered, f)) + + def foreach[U](f: A => U): Unit = filtered.foreach(f) + + def withFilter(q: A => Boolean): WithFilter[A, CC] = + new WithFilter(self, (a: A) => p(a) && q(a)) + + } + +} + +@SerialVersionUID(3L) +object Iterable extends IterableFactory.Delegate[Iterable](immutable.Iterable) { + + def single[A](a: A): Iterable[A] = new AbstractIterable[A] { + override def iterator = Iterator.single(a) + override def knownSize = 1 + override def head = a + override def headOption: Some[A] = Some(a) + override def last = a + override def lastOption: Some[A] = Some(a) + override def view: View.Single[A] = new View.Single(a) + override def take(n: Int) = if (n > 0) this else Iterable.empty + override def takeRight(n: Int) = if (n > 0) this else Iterable.empty + override def drop(n: Int) = if (n > 0) Iterable.empty else this + override def dropRight(n: Int) = if (n > 0) Iterable.empty else this + override def tail: Iterable[Nothing] = Iterable.empty + override def init: Iterable[Nothing] = Iterable.empty + } +} + +/** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ +abstract class AbstractIterable[+A] extends Iterable[A] + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = iterableFactory.from(coll) + protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] + + // overridden for efficiency, since we know CC[A] =:= C + override def empty: CC[A @uncheckedVariance] = iterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for collections that have an additional constraint, + * expressed by the `evidenceIterableFactory` method. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait EvidenceIterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]], Ev[_]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def evidenceIterableFactory: EvidenceIterableFactory[CC, Ev] + implicit protected def iterableEvidence: Ev[A @uncheckedVariance] + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = evidenceIterableFactory.from(coll) + override protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = evidenceIterableFactory.newBuilder[A] + override def empty: CC[A @uncheckedVariance] = evidenceIterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted sets. + * + * Note that in sorted sets, the `CC` type of the set is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Set` in [[SortedSetOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedSetFactoryDefaults[+A, + +CC[X] <: SortedSet[X] with SortedSetOps[X, CC, CC[X]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { + self: IterableOps[A, WithFilterCC, _] => + + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = sortedIterableFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](using ordering) + override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(using ordering) + + override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC] = + new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) +} + + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for maps. + * + * Note that in maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Map` in [[MapOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait MapFactoryDefaults[K, +V, + +CC[x, y] <: IterableOps[(x, y), Iterable, Iterable[(x, y)]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x]] extends MapOps[K, V, CC, CC[K, V @uncheckedVariance]] with IterableOps[(K, V), WithFilterCC, CC[K, V @uncheckedVariance]] { + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = mapFactory.from(coll) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] + override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { + // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) + case self: immutable.TreeSeqMap[_, _] => immutable.TreeSeqMap.empty(self.orderedBy).asInstanceOf[CC[K, V]] + case _ => mapFactory.empty + } + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC] = + new MapOps.WithFilter[K, V, WithFilterCC, CC](this, p) +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted maps. + * + * Note that in sorted maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying map (which is fixed to `Map` in [[SortedMapOps]]). This trait has therefore + * three type parameters `CC`, `WithFilterCC` and `UnsortedCC`. The `withFilter` method inherited + * from `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedMapFactoryDefaults[K, +V, + +CC[x, y] <: Map[x, y] with SortedMapOps[x, y, CC, CC[x, y]] with UnsortedCC[x, y], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x], + +UnsortedCC[x, y] <: Map[x, y]] extends SortedMapOps[K, V, CC, CC[K, V @uncheckedVariance]] with MapOps[K, V, UnsortedCC, CC[K, V @uncheckedVariance]] { + self: IterableOps[(K, V), WithFilterCC, _] => + + override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(using ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = sortedMapFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](using ordering) + + override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC] = + new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) +} diff --git a/scala2-library-bootstrapped/src/scala/collection/SortedMap.scala b/scala2-library-bootstrapped/src/scala/collection/SortedMap.scala new file mode 100644 index 000000000000..5beb811ed0b2 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/SortedMap.scala @@ -0,0 +1,220 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{implicitNotFound, nowarn} + +/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ +trait SortedMap[K, +V] + extends Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ + + def unsorted: Map[K, V] = this + + def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedMap" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => + (sm canEqual this) && + (this.size == sm.size) && { + val i1 = this.iterator + val i2 = sm.iterator + var allEqual = true + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } + allEqual + } + case _ => super.equals(that) + } +} + +trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] + with SortedOps[K, C] { + + /** The companion object of this sorted map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedMapFactory: SortedMapFactory[CC] + + /** Similar to `mapFromIterable`, but returns a SortedMap collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) + + def unsorted: Map[K, V] + + /** + * Creates an iterator over all the key/value pairs + * contained in this map having a key greater than or + * equal to `start` according to the ordering of + * this map. x.iteratorFrom(y) is equivalent + * to but often more efficient than x.from(y).iterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def iteratorFrom(start: K): Iterator[(K, V)] + + /** + * Creates an iterator over all the keys(or elements) contained in this + * collection greater than or equal to `start` + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than + * x.from(y).keysIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def keysIteratorFrom(start: K): Iterator[K] + + /** + * Creates an iterator over all the values contained in this + * map that are associated with a key greater than or equal to `start` + * according to the ordering of this map. x.valuesIteratorFrom(y) is + * equivalent to but often more efficient than + * x.from(y).valuesIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) + + def firstKey: K = head._1 + def lastKey: K = last._1 + + /** Find the element with smallest key larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption + + /** Find the element with largest key less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption + + def rangeTo(to: K): C = { + val i = keySet.rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + override def keySet: SortedSet[K] = new KeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { + def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = SortedMapOps.this.rangeImpl(from, until) + new map.KeySortedSet + } + } + + /** A generic trait that is reused by sorted keyset implementations */ + protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => + implicit def ordering: Ordering[K] = SortedMapOps.this.ordering + def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) + } + + // And finally, we add new overloads taking an ordering + /** Builds a new sorted map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new sorted map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + })(using ordering) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(using ordering) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(using ordering) +} + +object SortedMapOps { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + /** Specializes `MapWithFilter` for sorted Map collections + * + * @define coll sorted map collection + */ + class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( + self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], + p: ((K, V)) => Boolean + ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { + + def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + self.sortedMapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + self.sortedMapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] = + new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala b/scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala new file mode 100644 index 000000000000..ad5d67a64635 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound + +/** + * Trait that overrides sorted map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedMap(sortedMapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) + + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(using ordering)) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedCollect(sortedMapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] + if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala b/scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala new file mode 100644 index 000000000000..e794044a1af9 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala @@ -0,0 +1,87 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.generic + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.collection.{Factory, Iterable} +import scala.collection.mutable.Builder + +/** The default serialization proxy for collection implementations. + * + * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` + * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed + * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any + * additional state required to create the proper `Builder` needs to be captured by the `factory`. + */ +@SerialVersionUID(3L) +final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { + + @transient protected var builder: Builder[A, Any] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val k = coll.knownSize + out.writeInt(k) + var count = 0 + coll.foreach { x => + out.writeObject(x) + count += 1 + } + if(k >= 0) { + if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") + } else out.writeObject(SerializeEnd) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + builder = factory.newBuilder + val k = in.readInt() + if(k >= 0) { + builder.sizeHint(k) + var count = 0 + while(count < k) { + builder += in.readObject().asInstanceOf[A] + count += 1 + } + } else { + while (true) in.readObject match { + case SerializeEnd => return + case a => builder += a.asInstanceOf[A] + } + } + } + + protected[this] def readResolve(): Any = builder.result() +} + +@SerialVersionUID(3L) +private[collection] case object SerializeEnd + +/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type + * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or + * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement + * it directly without using this trait if you need a non-standard factory or if you want to use a different + * serialization scheme. + */ +trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => + protected[this] def writeReplace(): AnyRef = { + val f: Factory[Any, Any] = this match { + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](using it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](using it.ordering.asInstanceOf[Ordering[Any]]) + case it => it.iterableFactory.iterableFactory + } + new DefaultSerializationProxy(f, this) + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala b/scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala new file mode 100644 index 000000000000..ebefa4c3c17a --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala @@ -0,0 +1,354 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable +import java.util.Arrays +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 + +/** + * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same + * underlying `Array`, therefore it is not growable or shrinkable. + * + * @tparam T type of the elements in this wrapped array. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed abstract class ArraySeq[T] + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] + with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] + with Serializable { + + override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged + + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + val b = ArrayBuilder.make(using elemTag).asInstanceOf[ArrayBuilder[T]] + val s = coll.knownSize + if(s > 0) b.sizeHint(s) + b ++= coll + ArraySeq.make(b.result()) + } + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(using elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(using elemTag.asInstanceOf[ClassTag[T]]) + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def elemTag: ClassTag[_] + + /** Update element at given index */ + def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit + + /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def array: Array[_] + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit + + override protected[this] def className = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + + override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + override def equals(other: Any): Boolean = other match { + case that: ArraySeq[_] if this.array.length != that.array.length => + false + case _ => + super.equals(other) + } + + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + + override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + this + } +} + +/** A companion object used to create instances of `ArraySeq`. + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + // This is reused for all calls to empty. + private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + + /** + * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type + * without copying. + * + * Note that an array containing boxed primitives can be converted to a `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` + * at runtime. + */ + def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { + def elemTag: ClassTag[T] = ClassTag[T](array.getClass.getComponentType) + def length: Int = array.length + def apply(index: Int): T = array(index) + def update(index: Int, elem: T): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofRef[_] => + Array.equals( + this.array.asInstanceOf[Array[AnyRef]], + that.array.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(array, 0, array.length) + else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { + // Type erases to `ManifestFactory.ByteManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Byte.type = ClassTag.Byte + def length: Int = array.length + def apply(index: Int): Byte = array(index) + def update(index: Int, elem: Byte): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) + else new WidenedByteArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { + // Type erases to `ManifestFactory.ShortManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Short.type = ClassTag.Short + def length: Int = array.length + def apply(index: Int): Short = array(index) + def update(index: Int, elem: Short): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) + else new WidenedShortArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { + // Type erases to `ManifestFactory.CharManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Char.type = ClassTag.Char + def length: Int = array.length + def apply(index: Int): Char = array(index) + def update(index: Int, elem: Char): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) + else new WidenedCharArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + val jsb = sb.underlying + if (start.length != 0) jsb.append(start) + val len = array.length + if (len != 0) { + if (sep.isEmpty) jsb.append(array) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(array(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(array(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + sb + } + } + + @SerialVersionUID(3L) + final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { + // Type erases to `ManifestFactory.IntManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Int.type = ClassTag.Int + def length: Int = array.length + def apply(index: Int): Int = array(index) + def update(index: Int, elem: Int): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) + else new IntArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { + // Type erases to `ManifestFactory.LongManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Long.type = ClassTag.Long + def length: Int = array.length + def apply(index: Int): Long = array(index) + def update(index: Int, elem: Long): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) + else new LongArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { + // Type erases to `ManifestFactory.FloatManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Float.type = ClassTag.Float + def length: Int = array.length + def apply(index: Int): Float = array(index) + def update(index: Int, elem: Float): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) + else new WidenedFloatArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { + // Type erases to `ManifestFactory.DoubleManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Double.type = ClassTag.Double + def length: Int = array.length + def apply(index: Int): Double = array(index) + def update(index: Int, elem: Double): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) + else new DoubleArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { + // Type erases to `ManifestFactory.BooleanManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Boolean.type = ClassTag.Boolean + def length: Int = array.length + def apply(index: Int): Boolean = array(index) + def update(index: Int, elem: Boolean): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { + // Type erases to `ManifestFactory.UnitManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Unit.type = ClassTag.Unit + def length: Int = array.length + def apply(index: Int): Unit = array(index) + def update(index: Int, elem: Unit): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofUnit => array.length == that.array.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala b/scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala new file mode 100644 index 000000000000..36b53d1e433b --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala @@ -0,0 +1,888 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.{unchecked => uc} +import scala.annotation.{implicitNotFound, tailrec, unused} +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializationProxy +import scala.runtime.Statics + +/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good + * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality + * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality + * of numeric types is not supported (similar to `AnyRefMap`). + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) + extends AbstractMap[K, V] + with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- + with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- + + private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap + + def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) + + import CollisionProofHashMap.Node + private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] + private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] + + /** The actual hash table. */ + private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + @`inline` private[this] final def computeHash(o: K): Int = { + val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode + h ^ (h >>> 16) + } + + @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) + + override protected def fromSpecific(coll: IterableOnce[(K, V)] @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] + + override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + override def contains(key: K): Boolean = findNode(key) ne null + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + }) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + } + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val nd = findNode(key) + if (nd eq null) default else nd match { + case nd: LLNode @uc => nd.value + case n => n.asInstanceOf[RBNode].value + } + } + + @`inline` private[this] def findNode(elem: K): Node = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case n: LLNode @uc => n.getNode(elem, hash) + case n => n.asInstanceOf[RBNode].getNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) { + if(size == 0) reallocTable(target) + else growTable(target) + } + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + val res = table(idx) match { + case n: RBNode @uc => + insert(n, idx, key, hash, value) + case _old => + val old: LLNode = _old.asInstanceOf[LLNode] + if(old eq null) { + table(idx) = new LLNode(key, hash, value, null) + } else { + var remaining = CollisionProofHashMap.treeifyThreshold + var prev: LLNode = null + var n = old + while((n ne null) && n.hash <= hash && remaining > 0) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return (if(getOld) Some(old) else null) + } + prev = n + n = n.next + remaining -= 1 + } + if(remaining == 0) { + treeify(old, idx) + return put0(key, value, getOld, hash, idx) + } + if(prev eq null) table(idx) = new LLNode(key, hash, value, old) + else prev.next = new LLNode(key, hash, value, prev.next) + } + true + } + if(res) contentSize += 1 + if(res) Some(null.asInstanceOf[V]) else null //TODO + } + + private[this] def treeify(old: LLNode, idx: Int): Unit = { + table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) + var n: LLNode = old.next + while(n ne null) { + val root = table(idx).asInstanceOf[RBNode] + insertIntoExisting(root, idx, n.key, n.hash, n.value, root) + n = n.next + } + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + val k = xs.knownSize + if(k > 0) sizeHint(contentSize + k) + super.addAll(xs) + } + + // returns the old value or Statics.pfMarker if not found + private[this] def remove0(elem: K) : Any = { + val hash = computeHash(elem) + val idx = index(hash) + table(idx) match { + case null => Statics.pfMarker + case t: RBNode @uc => + val v = delete(t, idx, elem, hash) + if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 + v + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd.value + case nd: LLNode @uc => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next.value + } + prev = next + next = next.next + } + Statics.pfMarker + } + } + + private[this] abstract class MapIterator[R] extends AbstractIterator[R] { + protected[this] def extract(node: LLNode): R + protected[this] def extract(node: RBNode): R + + private[this] var i = 0 + private[this] var node: Node = null + private[this] val len = table.length + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + n match { + case null => + case n: RBNode @uc => + node = CollisionProofHashMap.minNodeNonNull(n) + return true + case n: LLNode @uc => + node = n + return true + } + } + false + } + } + + def next(): R = + if(!hasNext) Iterator.empty.next() + else node match { + case n: RBNode @uc => + val r = extract(n) + node = CollisionProofHashMap.successor(n ) + r + case n: LLNode @uc => + val r = extract(n) + node = n.next + r + } + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapIterator[K] { + protected[this] def extract(node: LLNode) = node.key + protected[this] def extract(node: RBNode) = node.key + } + } + + override def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapIterator[(K, V)] { + protected[this] def extract(node: LLNode) = (node.key, node.value) + protected[this] def extract(node: RBNode) = (node.key, node.value) + } + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + table = java.util.Arrays.copyOf(table, newlen) + threshold = newThreshold(table.length) + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) splitBucket(old, i, i + oldlen, oldlen) + i += 1 + } + oldlen *= 2 + } + } + + @`inline` private[this] def reallocTable(newlen: Int) = { + table = new Array(newlen) + threshold = newThreshold(table.length) + } + + @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) + } + + private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + //preLow.next = null + //preHigh.next = null + var lastLow: LLNode = preLow + var lastHigh: LLNode = preHigh + var n = list + while(n ne null) { + val next = n.next + if((n.hash & mask) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(list ne preLow.next) table(lowBucket) = preLow.next + if(preHigh.next ne null) { + table(highBucket) = preHigh.next + lastHigh.next = null + } + } + + private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + var lowCount, highCount = 0 + tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) + if(highCount != 0) { + if(lowCount == 0) { + table(lowBucket) = null + table(highBucket) = tree + } else { + table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) + table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def remove(key: K): Option[V] = { + val v = remove0(key) + if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) + } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) + } + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) + } + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) + + override protected[this] def className = "CollisionProofHashMap" + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val hash = computeHash(key) + val idx = index(hash) + table(idx) match { + case null => () + case n: LLNode @uc => + val nd = n.getNode(key, hash) + if(nd != null) return nd.value + case n => + val nd = n.asInstanceOf[RBNode].getNode(key, hash) + if(nd != null) return nd.value + } + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + + ///////////////////// Overrides code from SortedMapOps + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: + + @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red + @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red + + @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { + val i = hash - node.hash + if(i != 0) i else ordering.compare(key, node.key) + } + + @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ordering.compare(key, node.key) + } + + // ---- insertion ---- + + @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { + val cmp = compare(key, hash, x) + if(cmp == 0) { + x.value = value + false + } else { + val next = if(cmp < 0) x.left else x.right + if(next eq null) { + val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) + if (cmp < 0) x.left = z else x.right = z + table(bucket) = fixAfterInsert(_root, z) + return true + } + else insertIntoExisting(_root, bucket, key, hash, value, next) + } + } + + private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { + if(tree eq null) { + table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) + true + } else insertIntoExisting(tree, bucket, key, hash, value, tree) + } + + private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { + var root = _root + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + root = rotateLeft(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateRight(root, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + root = rotateRight(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateLeft(root, z.parent.parent) + } + } + } + root.red = false + root + } + + // ---- deletion ---- + + // returns the old value or Statics.pfMarker if not found + private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { + var root = _root + val z = root.getNode(key, hash: Int) + if (z ne null) { + val oldValue = z.value + var y = z + var yIsRed = y.red + var x: RBNode = null + var xParent: RBNode = null + + if (z.left eq null) { + x = z.right + root = transplant(root, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + root = transplant(root, z, z.left) + xParent = z.parent + } + else { + y = CollisionProofHashMap.minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + root = transplant(root, y, y.right) + y.right = z.right + y.right.parent = y + } + root = transplant(root, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) root = fixAfterDelete(root, x, xParent) + if(root ne _root) table(bucket) = root + oldValue + } else Statics.pfMarker + } + + private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { + var root = _root + var x = node + var xParent = parent + while ((x ne root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateLeft(root, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + root = rotateRight(root, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + root = rotateLeft(root, xParent) + x = root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateRight(root, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + root = rotateLeft(root, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + root = rotateRight(root, xParent) + x = root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + root + } + + // ---- helpers ---- + + @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.right + x.right = y.left + + val xp = x.parent + if (y.left ne null) y.left.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.left) xp.left = y + else xp.right = y + + y.left = x + x.parent = y + root + } + + @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.left + x.left = y.right + + val xp = x.parent + if (y.right ne null) y.right.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.right) xp.right = y + else xp.left = y + + y.right = x + x.parent = y + root + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { + var root = _root + if (to.parent eq null) root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + if (from ne null) from.parent = to.parent + root + } + + // building + + def fromNodes(xs: Iterator[Node], size: Int): RBNode = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): RBNode = size match { + case 0 => null + case 1 => + val nn = xs.next() + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val nn = xs.next() + val right = f(level+1, size-1-leftSize) + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + val n = new RBNode(key, hash, value, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + f(1, size) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + */ +@SerialVersionUID(3L) +object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + def from[K : Ordering, V](it: scala.collection.IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it + } + + def empty[K : Ordering, V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + def newBuilder[K : Ordering, V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K : Ordering, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(using ordering) + } + + @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { + val i = hash - node.hash + if(i != 0) i else ord.compare(key, node.key) + } + + @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ord.compare(key, node.key) + } + + private final val treeifyThreshold = 8 + + // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. + // Keeping calls monomorphic where possible and dispatching manually where needed is faster. + sealed abstract class Node + + /////////////////////////// Red-Black Tree Node + + final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { + val cmp = compare(k, h, this) + if (cmp < 0) { + if(left ne null) left.getNode(k, h) else null + } else if (cmp > 0) { + if(right ne null) right.getNode(k, h) else null + } else this + } + + def foreach[U](f: ((K, V)) => U): Unit = { + if(left ne null) left.foreach(f) + f((key, value)) + if(right ne null) right.foreach(f) + } + + def foreachEntry[U](f: (K, V) => U): Unit = { + if(left ne null) left.foreachEntry(f) + f(key, value) + if(right ne null) right.foreachEntry(f) + } + + def foreachNode[U](f: RBNode[K, V] => U): Unit = { + if(left ne null) left.foreachNode(f) + f(this) + if(right ne null) right.foreachNode(f) + } + } + + @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + new RBNode(key, hash, value, red, null, null, parent) + + @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): RBNode[A, B] = nextNode match { + case null => Iterator.empty.next() + case node => + nextNode = successor(node) + node + } + } + + /////////////////////////// Linked List Node + + private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + override def toString = s"LLNode($key, $value, $hash) -> $next" + + private[this] def eq(a: Any, b: Any): Boolean = + if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { + if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this + else if((next eq null) || (hash > h)) null + else next.getNode(k, h) + } + + @tailrec def foreach[U](f: ((K, V)) => U): Unit = { + f((key, value)) + if(next ne null) next.foreach(f) + } + + @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { + f(key, value) + if(next ne null) next.foreachEntry(f) + } + + @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { + f(this) + if(next ne null) next.foreachNode(f) + } + } +} diff --git a/scala2-library-cc/src/scala/Array.scala b/scala2-library-cc/src/scala/Array.scala new file mode 100644 index 000000000000..d2098a76f32f --- /dev/null +++ b/scala2-library-cc/src/scala/Array.scala @@ -0,0 +1,690 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +//import scala.collection.generic._ +import scala.collection.{Factory, immutable, mutable} +import mutable.ArrayBuilder +import immutable.ArraySeq +import scala.language.implicitConversions +import scala.reflect.{ClassTag, classTag} +import scala.runtime.BoxedUnit +import scala.runtime.ScalaRunTime +import scala.runtime.ScalaRunTime.{array_apply, array_update} + +/** Utility methods for operating on arrays. + * For example: + * {{{ + * val a = Array(1, 2) + * val b = Array.ofDim[Int](2) + * val c = Array.concat(a, b) + * }}} + * where the array objects `a`, `b` and `c` have respectively the values + * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. + */ +object Array { + val emptyBooleanArray = new Array[Boolean](0) + val emptyByteArray = new Array[Byte](0) + val emptyCharArray = new Array[Char](0) + val emptyDoubleArray = new Array[Double](0) + val emptyFloatArray = new Array[Float](0) + val emptyIntArray = new Array[Int](0) + val emptyLongArray = new Array[Long](0) + val emptyShortArray = new Array[Short](0) + val emptyObjectArray = new Array[Object](0) + + /** Provides an implicit conversion from the Array object to a collection Factory */ + implicit def toFactory[A : ClassTag](dummy: Array.type): Factory[A, Array[A]] = new ArrayFactory(dummy) + @SerialVersionUID(3L) + private class ArrayFactory[A : ClassTag](dummy: Array.type) extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = Array.from[A](it) + def newBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder[A] + } + + /** + * Returns a new [[scala.collection.mutable.ArrayBuilder]]. + */ + def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T](using t) + + /** Build an array from the iterable collection. + * + * {{{ + * scala> val a = Array.from(Seq(1, 5)) + * val a: Array[Int] = Array(1, 5) + * + * scala> val b = Array.from(Range(1, 5)) + * val b: Array[Int] = Array(1, 2, 3, 4) + * }}} + * + * @param it the iterable collection + * @return an array consisting of elements of the iterable collection + */ + def from[A : ClassTag](it: IterableOnce[A]): Array[A] = it match { + case it: Iterable[A] => it.toArray[A] + case _ => it.iterator.toArray[A] + } + + private def slowcopy(src : AnyRef, + srcPos : Int, + dest : AnyRef, + destPos : Int, + length : Int): Unit = { + var i = srcPos + var j = destPos + val srcUntil = srcPos + length + while (i < srcUntil) { + array_update(dest, j, array_apply(src, i)) + i += 1 + j += 1 + } + } + + /** Copy one array to another. + * Equivalent to Java's + * `System.arraycopy(src, srcPos, dest, destPos, length)`, + * except that this also works for polymorphic and boxed arrays. + * + * Note that the passed-in `dest` array will be modified by this call. + * + * @param src the source array. + * @param srcPos starting position in the source array. + * @param dest destination array. + * @param destPos starting position in the destination array. + * @param length the number of array elements to be copied. + * + * @see `java.lang.System#arraycopy` + */ + def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { + val srcClass = src.getClass + if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) + java.lang.System.arraycopy(src, srcPos, dest, destPos, length) + else + slowcopy(src, srcPos, dest, destPos, length) + } + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength)`, + * except that this works for primitive and object arrays in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyOf[A](original: Array[A], newLength: Int): Array[A] = ((original: @unchecked) match { + case x: Array[BoxedUnit] => newUnitArray(newLength).asInstanceOf[Array[A]] + case x: Array[AnyRef] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Int] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Double] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Long] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Float] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Char] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Byte] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Short] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Boolean] => java.util.Arrays.copyOf(x, newLength) + }).asInstanceOf[Array[A]] + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. The new array can have + * a different type than the original one as long as the values are + * assignment-compatible. When copying between primitive and object arrays, + * boxing and unboxing are supported. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength, newType)`, + * except that this works for all combinations of primitive and object arrays + * in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyAs[A](original: Array[_], newLength: Int)(implicit ct: ClassTag[A]): Array[A] = { + val runtimeClass = ct.runtimeClass + if (runtimeClass == Void.TYPE) newUnitArray(newLength).asInstanceOf[Array[A]] + else { + val destClass = runtimeClass.asInstanceOf[Class[A]] + if (destClass.isAssignableFrom(original.getClass.getComponentType)) { + if (destClass.isPrimitive) copyOf[A](original.asInstanceOf[Array[A]], newLength) + else { + val destArrayClass = java.lang.reflect.Array.newInstance(destClass, 0).getClass.asInstanceOf[Class[Array[AnyRef]]] + java.util.Arrays.copyOf(original.asInstanceOf[Array[AnyRef]], newLength, destArrayClass).asInstanceOf[Array[A]] + } + } else { + val dest = new Array[A](newLength) + Array.copy(original, 0, dest, 0, original.length) + dest + } + } + } + + private def newUnitArray(len: Int): Array[Unit] = { + val result = new Array[Unit](len) + java.util.Arrays.fill(result.asInstanceOf[Array[AnyRef]], ()) + result + } + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = new Array[T](0) + + /** Creates an array with given elements. + * + * @param xs the elements to put in the array + * @return an array containing all elements from xs. + */ + // Subject to a compiler optimization in Cleanup. + // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } + def apply[T: ClassTag](xs: T*): Array[T] = { + val len = xs.length + xs match { + case wa: immutable.ArraySeq[_] if wa.unsafeArray.getClass.getComponentType == classTag[T].runtimeClass => + // We get here in test/files/run/sd760a.scala, `Array[T](t)` for + // a specialized type parameter `T`. While we still pay for two + // copies of the array it is better than before when we also boxed + // each element when populating the result. + ScalaRunTime.array_clone(wa.unsafeArray).asInstanceOf[Array[T]] + case _ => + val array = new Array[T](len) + val iterator = xs.iterator + var i = 0 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + } + + /** Creates an array of `Boolean` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { + val array = new Array[Boolean](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Byte` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Byte, xs: Byte*): Array[Byte] = { + val array = new Array[Byte](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Short` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Short, xs: Short*): Array[Short] = { + val array = new Array[Short](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Char` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Char, xs: Char*): Array[Char] = { + val array = new Array[Char](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Int` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Int, xs: Int*): Array[Int] = { + val array = new Array[Int](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Long` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Long, xs: Long*): Array[Long] = { + val array = new Array[Long](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Float` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Float, xs: Float*): Array[Float] = { + val array = new Array[Float](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Double` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Double, xs: Double*): Array[Double] = { + val array = new Array[Double](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Unit` objects */ + def apply(x: Unit, xs: Unit*): Array[Unit] = { + val array = new Array[Unit](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = + new Array[T](n1) + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = { + val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]]) + for (i <- 0 until n1) arr(i) = new Array[T](n2) + arr + // tabulate(n1)(_ => ofDim[T](n2)) + } + /** Creates a 3-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] = + tabulate(n1)(_ => ofDim[T](n2, n3)) + /** Creates a 4-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4)) + /** Creates a 5-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5)) + + /** Concatenates all arrays into a single array. + * + * @param xss the given arrays + * @return the array created from concatenating `xss` + */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = { + val b = newBuilder[T] + b.sizeHint(xss.map(_.length).sum) + for (xs <- xss) b ++= xs + b.result() + } + + /** Returns an array that contains the results of some element computation a number + * of times. + * + * Note that this means that `elem` is computed a total of n times: + * {{{ + * scala> Array.fill(3){ math.random } + * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306) + * }}} + * + * @param n the number of elements desired + * @param elem the element computation + * @return an Array of size n, where each element contains the result of computing + * `elem`. + */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = elem + i += 1 + } + array + } + } + + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = + tabulate(n1)(_ => fill(n2)(elem)) + + /** Returns a three-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = + tabulate(n1)(_ => fill(n2, n3)(elem)) + + /** Returns a four-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => fill(n2, n3, n4)(elem)) + + /** Returns a five-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. + * + * @param n The number of elements in the array + * @param f The function computing element values + * @return An `Array` consisting of elements `f(0),f(1), ..., f(n - 1)` + */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = f(i) + i += 1 + } + array + } + } + + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Returns a three-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Returns a four-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Returns a five-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Returns an array containing a sequence of increasing integers in a range. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @return the array with values in range `start, start + 1, ..., end - 1` + * up to, but excluding, `end`. + */ + def range(start: Int, end: Int): Array[Int] = range(start, end, 1) + + /** Returns an array containing equally spaced values in some integer interval. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @param step the increment value of the array (may not be zero) + * @return the array with values in `start, start + step, ...` up to, but excluding `end` + */ + def range(start: Int, end: Int, step: Int): Array[Int] = { + if (step == 0) throw new IllegalArgumentException("zero step") + val array = new Array[Int](immutable.Range.count(start, end, step, isInclusive = false)) + + var n = 0 + var i = start + while (if (step < 0) end < i else i < end) { + array(n) = i + i += step + n += 1 + } + array + } + + /** Returns an array containing repeated applications of a function to a start value. + * + * @param start the start value of the array + * @param len the number of elements returned by the array + * @param f the function that is repeatedly applied + * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { + if (len > 0) { + val array = new Array[T](len) + var acc = start + var i = 1 + array(0) = acc + + while (i < len) { + acc = f(acc) + array(i) = acc + i += 1 + } + array + } else { + empty[T] + } + } + + /** Compare two arrays per element. + * + * A more efficient version of `xs.sameElements(ys)`. + * + * Note that arrays are invariant in Scala, but it may + * be sound to cast an array of arbitrary reference type + * to `Array[AnyRef]`. Arrays on the JVM are covariant + * in their element type. + * + * `Array.equals(xs.asInstanceOf[Array[AnyRef]], ys.asInstanceOf[Array[AnyRef]])` + * + * @param xs an array of AnyRef + * @param ys an array of AnyRef + * @return true if corresponding elements are equal + */ + def equals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = + (xs eq ys) || + (xs.length == ys.length) && { + var i = 0 + while (i < xs.length && xs(i) == ys(i)) i += 1 + i >= xs.length + } + + /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. + * + * @param x the selector value + * @return sequence wrapped in a [[scala.Some]], if `x` is an Array, otherwise `None` + */ + def unapplySeq[T](x: Array[T]): UnapplySeqWrapper[T] = new UnapplySeqWrapper(x) + + final class UnapplySeqWrapper[T](private val a: Array[T]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[T] = this + def lengthCompare(len: Int): Int = a.lengthCompare(len) + def apply(i: Int): T = a(i) + def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 + def toSeq: scala.Seq[T] = a.toSeq // clones the array + } +} + +/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation + * for Java's `T[]`. + * + * {{{ + * val numbers = Array(1, 2, 3, 4) + * val first = numbers(0) // read the first element + * numbers(3) = 100 // replace the 4th array element with 100 + * val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two + * }}} + * + * Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above + * example code. + * Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to + * `update(Int, T)`. + * + * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion + * to [[scala.collection.ArrayOps]] (shown on line 4 of the example above) and a conversion + * to [[scala.collection.mutable.ArraySeq]] (a subtype of [[scala.collection.Seq]]). + * Both types make available many of the standard operations found in the Scala collections API. + * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, + * while the conversion to `ArraySeq` is permanent as all operations return a `ArraySeq`. + * + * The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. For instance, + * consider the following code: + * + * {{{ + * val arr = Array(1, 2, 3) + * val arrReversed = arr.reverse + * val seqReversed : collection.Seq[Int] = arr.reverse + * }}} + * + * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring + * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed + * by converting to `ArraySeq` first and invoking the variant of `reverse` that returns another + * `ArraySeq`. + * + * @see [[https://www.scala-lang.org/files/archive/spec/2.13/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) + * @see [[https://docs.scala-lang.org/sips/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. + * @hideImplicitConversion scala.Predef.booleanArrayOps + * @hideImplicitConversion scala.Predef.byteArrayOps + * @hideImplicitConversion scala.Predef.charArrayOps + * @hideImplicitConversion scala.Predef.doubleArrayOps + * @hideImplicitConversion scala.Predef.floatArrayOps + * @hideImplicitConversion scala.Predef.intArrayOps + * @hideImplicitConversion scala.Predef.longArrayOps + * @hideImplicitConversion scala.Predef.refArrayOps + * @hideImplicitConversion scala.Predef.shortArrayOps + * @hideImplicitConversion scala.Predef.unitArrayOps + * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray + * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray + * @define coll array + * @define Coll `Array` + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define collectExample + * @define undefinedorder + */ +final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { + + /** The length of the array */ + def length: Int = throw new Error() + + /** The element at given index. + * + * Indices start at `0`; `xs.apply(0)` is the first element of array `xs`. + * Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`. + * + * @param i the index + * @return the element at the given index + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def apply(i: Int): T = throw new Error() + + /** Update the element at given index. + * + * Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array. + * Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`. + * + * @param i the index + * @param x the value to be written at index `i` + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def update(i: Int, x: T): Unit = { throw new Error() } + + /** Clone the Array. + * + * @return A clone of the Array. + */ + override def clone(): Array[T] = throw new Error() +} diff --git a/scala2-library-cc/src/scala/collection/ArrayOps.scala b/scala2-library-cc/src/scala/collection/ArrayOps.scala index e8548c12751f..72ec66a0bc86 100644 --- a/scala2-library-cc/src/scala/collection/ArrayOps.scala +++ b/scala2-library-cc/src/scala/collection/ArrayOps.scala @@ -590,7 +590,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { val len = xs.length def boxed = if(len < ArrayOps.MaxStableSortLength) { val a = xs.clone() - Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]]) + Sorting.stableSort(a)(using ord.asInstanceOf[Ordering[A]]) a } else { val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) @@ -1300,7 +1300,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) if (xs.length == 0) bb.result() else { - def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + def mkRowBuilder() = ArrayBuilder.make[B](using ClassTag[B](aClass.getComponentType)) val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) for (xs <- this) { var i = 0 diff --git a/scala2-library-cc/src/scala/collection/Factory.scala b/scala2-library-cc/src/scala/collection/Factory.scala index 99f584b972fc..96f39bafc905 100644 --- a/scala2-library-cc/src/scala/collection/Factory.scala +++ b/scala2-library-cc/src/scala/collection/Factory.scala @@ -675,16 +675,16 @@ object ClassTagIterableFactory { * sound depending on the use of the `ClassTag` by the collection implementation. */ @SerialVersionUID(3L) class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { - def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] - def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] - def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] - override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] - override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]] - override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]] + def empty[A]: CC[A] = delegate.empty(using ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(using ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(using ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(using ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(using ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(using ClassTag.Any).asInstanceOf[CC[A]] } } diff --git a/scala2-library-cc/src/scala/collection/Iterable.scala b/scala2-library-cc/src/scala/collection/Iterable.scala index 5afc14f4ceef..6556f31d378d 100644 --- a/scala2-library-cc/src/scala/collection/Iterable.scala +++ b/scala2-library-cc/src/scala/collection/Iterable.scala @@ -985,9 +985,9 @@ trait SortedSetFactoryDefaults[+A, +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { self: IterableOps[A, WithFilterCC, _] => - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(ordering) - override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](ordering) - override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(ordering) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](using ordering) + override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(using ordering) override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC]^{p} = new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) @@ -1040,9 +1040,9 @@ trait SortedMapFactoryDefaults[K, +V, +UnsortedCC[x, y] <: Map[x, y]] extends SortedMapOps[K, V, CC, CC[K, V @uncheckedVariance]] with MapOps[K, V, UnsortedCC, CC[K, V @uncheckedVariance]] { self: IterableOps[(K, V), WithFilterCC, _] => - override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(ordering) - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(ordering) - override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](ordering) + override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(using ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](using ordering) override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC]^{p} = new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) diff --git a/scala2-library-cc/src/scala/collection/SortedMap.scala b/scala2-library-cc/src/scala/collection/SortedMap.scala index 7b9381ebb078..876a83b2709c 100644 --- a/scala2-library-cc/src/scala/collection/SortedMap.scala +++ b/scala2-library-cc/src/scala/collection/SortedMap.scala @@ -181,16 +181,16 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) - })(ordering) + })(using ordering) /** Alias for `concat` */ @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(using ordering) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(using ordering) } object SortedMapOps { diff --git a/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala b/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala index 9a9e6e367922..411a86c7cc5c 100644 --- a/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala +++ b/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala @@ -34,7 +34,7 @@ trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOp strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = - strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(using ordering)) override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = strictOptimizedCollect(sortedMapFactory.newBuilder, pf) diff --git a/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala b/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala index e36bb77ebdb8..1f0e6164731c 100644 --- a/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala +++ b/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala @@ -78,9 +78,9 @@ private[collection] case object SerializeEnd trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => protected[this] def writeReplace(): AnyRef = { val f: Factory[Any, Any] = this match { - case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](using it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] - case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]]) + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](using it.ordering.asInstanceOf[Ordering[Any]]) case it => it.iterableFactory.iterableFactory } new DefaultSerializationProxy(f, this) diff --git a/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala b/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala index 70762e5b340d..d1c5b5c9ce72 100644 --- a/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala +++ b/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala @@ -46,15 +46,15 @@ sealed abstract class ArraySeq[T] override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = { - val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] + val b = ArrayBuilder.make(using elemTag).asInstanceOf[ArrayBuilder[T]] val s = coll.knownSize if(s > 0) b.sizeHint(s) b ++= coll ArraySeq.make(b.result()) } override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = - ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] - override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) + ArraySeq.newBuilder[T](using elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(using elemTag.asInstanceOf[ClassTag[T]]) /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype diff --git a/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala b/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala index ff3bab1dd818..05c3124a3323 100644 --- a/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala +++ b/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala @@ -768,7 +768,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { @SerialVersionUID(3L) private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it - def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(using ordering) } @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { diff --git a/tests/neg/given-loop-prevention.check b/tests/neg/given-loop-prevention.check new file mode 100644 index 000000000000..460adf03be49 --- /dev/null +++ b/tests/neg/given-loop-prevention.check @@ -0,0 +1,14 @@ +-- Error: tests/neg/given-loop-prevention.scala:10:36 ------------------------------------------------------------------ +10 | given List[Foo] = List(summon[Foo]) // error + | ^ + | Result of implicit search for Foo will change. + | Current result Baz.given_Foo will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: No Matching Implicit. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that Baz.given_Foo comes earlier, + | - use an explicit argument. diff --git a/tests/neg/given-loop-prevention.scala b/tests/neg/given-loop-prevention.scala new file mode 100644 index 000000000000..9d404b8c6d8e --- /dev/null +++ b/tests/neg/given-loop-prevention.scala @@ -0,0 +1,12 @@ + +class Foo + +object Bar { + given Foo with {} + given List[Foo] = List(summon[Foo]) // ok +} + +object Baz { + given List[Foo] = List(summon[Foo]) // error + given Foo with {} +} diff --git a/tests/neg/i6716.check b/tests/neg/i6716.check index 4684842e73fe..0144f539f53c 100644 --- a/tests/neg/i6716.check +++ b/tests/neg/i6716.check @@ -1,5 +1,5 @@ --- Warning: tests/neg/i6716.scala:12:39 -------------------------------------------------------------------------------- -12 | given Monad[Bar] = summon[Monad[Foo]] // warn +-- Error: tests/neg/i6716.scala:11:39 ---------------------------------------------------------------------------------- +11 | given Monad[Bar] = summon[Monad[Foo]] // error | ^ | Result of implicit search for Monad[Foo] will change. | Current result Bar.given_Monad_Bar will be no longer eligible @@ -12,5 +12,3 @@ | - use a `given ... with` clause as the enclosing given, | - rearrange definitions so that Bar.given_Monad_Bar comes earlier, | - use an explicit argument. - | This will be an error in Scala 3.5 and later. -No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i6716.scala b/tests/neg/i6716.scala index 311209fd9006..8b37d4e223ac 100644 --- a/tests/neg/i6716.scala +++ b/tests/neg/i6716.scala @@ -1,4 +1,3 @@ -//> using options -Xfatal-warnings trait Monad[T]: def id: String @@ -9,11 +8,10 @@ object Foo { opaque type Bar = Foo object Bar { - given Monad[Bar] = summon[Monad[Foo]] // warn + given Monad[Bar] = summon[Monad[Foo]] // error } object Test extends App { println(summon[Monad[Foo]].id) println(summon[Monad[Bar]].id) } -// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) \ No newline at end of file diff --git a/tests/neg/i7294-a.check b/tests/neg/i7294-a.check deleted file mode 100644 index c33735258ad0..000000000000 --- a/tests/neg/i7294-a.check +++ /dev/null @@ -1,27 +0,0 @@ --- [E007] Type Mismatch Error: tests/neg/i7294-a.scala:10:20 ----------------------------------------------------------- -10 | case x: T => x.g(10) // error - | ^^^^^^^ - | Found: Any - | Required: T - | - | where: T is a type in given instance f with bounds <: foo.Foo - | - | longer explanation available when compiling with `-explain` --- Warning: tests/neg/i7294-a.scala:10:12 ------------------------------------------------------------------------------ -10 | case x: T => x.g(10) // error - | ^ - | Result of implicit search for scala.reflect.TypeTest[Nothing, T] will change. - | Current result foo.Test.f will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: No Matching Implicit. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that foo.Test.f comes earlier, - | - use an explicit argument. - | This will be an error in Scala 3.5 and later. - | - | where: T is a type in given instance f with bounds <: foo.Foo -No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i7294-a.scala b/tests/neg/i7294-a.scala deleted file mode 100644 index a5193097e941..000000000000 --- a/tests/neg/i7294-a.scala +++ /dev/null @@ -1,14 +0,0 @@ -//> using options -Xfatal-warnings - -package foo - -trait Foo { def g(x: Int): Any } - -object Test: - - inline given f[T <: Foo]: T = ??? match { - case x: T => x.g(10) // error - } - - @main def Test = f -// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i7294-b.scala b/tests/neg/i7294-b.scala deleted file mode 100644 index 17cd7f07c3f7..000000000000 --- a/tests/neg/i7294-b.scala +++ /dev/null @@ -1,12 +0,0 @@ -//> using options -Xfatal-warnings - -package foo - -trait Foo { def g(x: Any): Any } - -inline given f[T <: Foo]: T = ??? match { - case x: T => x.g(10) // error -} - -@main def Test = f -// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i7294.check b/tests/neg/i7294.check new file mode 100644 index 000000000000..d6e559997f78 --- /dev/null +++ b/tests/neg/i7294.check @@ -0,0 +1,25 @@ +-- Error: tests/neg/i7294.scala:7:10 ----------------------------------------------------------------------------------- +7 | case x: T => x.g(10) // error // error + | ^ + | Result of implicit search for scala.reflect.TypeTest[Nothing, T] will change. + | Current result foo.f will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: No Matching Implicit. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that foo.f comes earlier, + | - use an explicit argument. + | + | where: T is a type in given instance f with bounds <: foo.Foo +-- [E007] Type Mismatch Error: tests/neg/i7294.scala:7:18 -------------------------------------------------------------- +7 | case x: T => x.g(10) // error // error + | ^^^^^^^ + | Found: Any + | Required: T + | + | where: T is a type in given instance f with bounds <: foo.Foo + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i7294.scala b/tests/neg/i7294.scala new file mode 100644 index 000000000000..fbb00f9b7e89 --- /dev/null +++ b/tests/neg/i7294.scala @@ -0,0 +1,10 @@ + +package foo + +trait Foo { def g(x: Any): Any } + +inline given f[T <: Foo]: T = ??? match { + case x: T => x.g(10) // error // error +} + +@main def Test = f diff --git a/tests/neg/looping-givens.check b/tests/neg/looping-givens.check new file mode 100644 index 000000000000..1e7ee08d79df --- /dev/null +++ b/tests/neg/looping-givens.check @@ -0,0 +1,48 @@ +-- Error: tests/neg/looping-givens.scala:9:22 -------------------------------------------------------------------------- +9 | given aa: A = summon // error + | ^ + | Result of implicit search for T will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: a. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | + | where: T is a type variable with constraint <: A +-- Error: tests/neg/looping-givens.scala:10:22 ------------------------------------------------------------------------- +10 | given bb: B = summon // error + | ^ + | Result of implicit search for T will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: b. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | + | where: T is a type variable with constraint <: B +-- Error: tests/neg/looping-givens.scala:11:28 ------------------------------------------------------------------------- +11 | given ab: (A & B) = summon // error + | ^ + | Result of implicit search for T will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: Search Failure: joint(ab, ab). + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | + | where: T is a type variable with constraint <: A & B diff --git a/tests/neg/looping-givens.scala b/tests/neg/looping-givens.scala new file mode 100644 index 000000000000..57dc95f99aab --- /dev/null +++ b/tests/neg/looping-givens.scala @@ -0,0 +1,11 @@ +//> options -source 3.4 + +class A +class B + +given joint(using a: A, b: B): (A & B) = ??? + +def foo(using a: A, b: B) = + given aa: A = summon // error + given bb: B = summon // error + given ab: (A & B) = summon // error diff --git a/tests/pos-deep-subtype/CollectionStrawMan6.scala b/tests/pos-deep-subtype/CollectionStrawMan6.scala index 9f189afbcf3a..99f634a66622 100644 --- a/tests/pos-deep-subtype/CollectionStrawMan6.scala +++ b/tests/pos-deep-subtype/CollectionStrawMan6.scala @@ -754,11 +754,11 @@ object CollectionStrawMan6 extends LowPriority { def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](elemTag) + protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](using elemTag) def fromIterable[B: ClassTag](coll: Iterable[B]): Array[B] = coll.toArray[B] - protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag)) + protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(using elemTag)) override def knownSize = xs.length diff --git a/tests/pos/extmethods.scala b/tests/pos/extmethods.scala index 368b4f439916..40683c56c694 100644 --- a/tests/pos/extmethods.scala +++ b/tests/pos/extmethods.scala @@ -17,7 +17,7 @@ object CollectionStrawMan { def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag)) + protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(using elemTag)) } } diff --git a/tests/pos/given-loop-prevention.scala b/tests/pos/given-loop-prevention.scala deleted file mode 100644 index 0bae0bb24fed..000000000000 --- a/tests/pos/given-loop-prevention.scala +++ /dev/null @@ -1,14 +0,0 @@ -//> using options -Xfatal-warnings - -class Foo - -object Bar { - given Foo with {} - given List[Foo] = List(summon[Foo]) // ok -} - -object Baz { - @annotation.nowarn - given List[Foo] = List(summon[Foo]) // gives a warning, which is suppressed - given Foo with {} -} diff --git a/tests/pos/i17245.scala b/tests/pos/i17245.scala index 3b5b3a74108d..8609a8293670 100644 --- a/tests/pos/i17245.scala +++ b/tests/pos/i17245.scala @@ -14,7 +14,7 @@ type OnChannel = Channel => Any val case1: OnChannel = Mockito.mock[OnChannel] val case2: OnChannel = Mockito.mock val case3 = Mockito.mock[OnChannel] - val case4: OnChannel = Mockito.mock[OnChannel](summon[ClassTag[OnChannel]]) + val case4: OnChannel = Mockito.mock[OnChannel](using summon[ClassTag[OnChannel]]) // not a regressive case, but an added improvement with the fix for the above val case5: Channel => Any = Mockito.mock[Channel => Any] diff --git a/tests/pos/i9967.scala b/tests/pos/i9967.scala index 4e915a27bfbf..d8cbf99b9d6e 100644 --- a/tests/pos/i9967.scala +++ b/tests/pos/i9967.scala @@ -1,6 +1,6 @@ import collection.mutable class MaxSizeMap[K, V](maxSize: Int)(using o: Ordering[K]): - val sortedMap: mutable.TreeMap[K, V] = mutable.TreeMap.empty[K, V](o) + val sortedMap: mutable.TreeMap[K, V] = mutable.TreeMap.empty[K, V](using o) export sortedMap._ diff --git a/tests/pos/t5643.scala b/tests/pos/t5643.scala index 1ce34ba36226..9866f8d399c2 100644 --- a/tests/pos/t5643.scala +++ b/tests/pos/t5643.scala @@ -13,7 +13,7 @@ object TupledEvidenceTest { def f[T : GetResult] = "" - f[(String,String)](getTuple[(String, String)]) + f[(String,String)](using getTuple[(String, String)]) f[(String,String)] } diff --git a/tests/run/colltest6/CollectionStrawMan6_1.scala b/tests/run/colltest6/CollectionStrawMan6_1.scala index bed5c476b96d..0bf0cbddffc9 100644 --- a/tests/run/colltest6/CollectionStrawMan6_1.scala +++ b/tests/run/colltest6/CollectionStrawMan6_1.scala @@ -755,11 +755,11 @@ object CollectionStrawMan6 extends LowPriority { def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](elemTag) + protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](using elemTag) def fromIterable[B: ClassTag](coll: Iterable[B]): Array[B] = coll.toArray[B] - protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag)) + protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(using elemTag)) override def knownSize = xs.length diff --git a/tests/run/i502.scala b/tests/run/i502.scala index 71176d9660cd..20ed1f43b840 100644 --- a/tests/run/i502.scala +++ b/tests/run/i502.scala @@ -6,13 +6,13 @@ object Test extends App { Array[Int](1, 2) try { - Array[Int](1, 2)(null) + Array[Int](1, 2)(using null) ??? } catch { case _: NullPointerException => println("Ok") } - Array[Int](1, 2)({println("foo"); summon[ClassTag[Int]]}) + Array[Int](1, 2)(using {println("foo"); summon[ClassTag[Int]]}) - Array[Int](1, 2)(ClassTag.apply({ println("bar"); classOf[Int]})) + Array[Int](1, 2)(using ClassTag.apply({ println("bar"); classOf[Int]})) } diff --git a/tests/run/t2029.scala b/tests/run/t2029.scala index d4ab0f02b67f..d5bc478fa0b3 100644 --- a/tests/run/t2029.scala +++ b/tests/run/t2029.scala @@ -5,7 +5,7 @@ object Test{ val mainSet = TreeSet(1 to 5 :_*) var compareCalled = false; - val smallerSet = TreeSet(2 to 4 :_*)(Ordering[Int].reverse) + val smallerSet = TreeSet(2 to 4 :_*)(using Ordering[Int].reverse) println(mainSet.mkString(",")) println(smallerSet.mkString(",")) diff --git a/tests/run/t3326.scala b/tests/run/t3326.scala index 3d7d83068f92..1f8c04394682 100644 --- a/tests/run/t3326.scala +++ b/tests/run/t3326.scala @@ -28,8 +28,8 @@ object Test { def testCollectionSorted(): Unit = { import collection.* val order = implicitly[Ordering[Int]].reverse - var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order) - var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order) + var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) + var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) m1 ++= List(1 -> "World") m1 ++= List(2 -> "Hello") @@ -49,8 +49,8 @@ object Test { def testImmutableSorted(): Unit = { import collection.immutable.* val order = implicitly[Ordering[Int]].reverse - var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order) - var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order) + var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) + var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) m1 += (1 -> "World") m1 += (2 -> "Hello") diff --git a/tests/semanticdb/expect/InventedNames.expect.scala b/tests/semanticdb/expect/InventedNames.expect.scala index 7c5b008209c2..b92e9aa940a7 100644 --- a/tests/semanticdb/expect/InventedNames.expect.scala +++ b/tests/semanticdb/expect/InventedNames.expect.scala @@ -32,7 +32,7 @@ given [T/*<-givens::InventedNames$package.given_Z_T#[T]*/]: Z/*->givens::Z#*/[T/ val a/*<-givens::InventedNames$package.a.*/ = intValue/*->givens::InventedNames$package.intValue.*/ val b/*<-givens::InventedNames$package.b.*/ = given_String/*->givens::InventedNames$package.given_String.*/ -val c/*<-givens::InventedNames$package.c.*/ = given_Double/*->givens::InventedNames$package.given_Double().*/ +//val c = given_Double val d/*<-givens::InventedNames$package.d.*/ = given_List_T/*->givens::InventedNames$package.given_List_T().*/[Int/*->scala::Int#*/] val e/*<-givens::InventedNames$package.e.*/ = given_Char/*->givens::InventedNames$package.given_Char.*/ val f/*<-givens::InventedNames$package.f.*/ = given_Float/*->givens::InventedNames$package.given_Float.*/ diff --git a/tests/semanticdb/expect/InventedNames.scala b/tests/semanticdb/expect/InventedNames.scala index 42c14c90e370..61baae46c832 100644 --- a/tests/semanticdb/expect/InventedNames.scala +++ b/tests/semanticdb/expect/InventedNames.scala @@ -32,7 +32,7 @@ given [T]: Z[T] with val a = intValue val b = given_String -val c = given_Double +//val c = given_Double val d = given_List_T[Int] val e = given_Char val f = given_Float diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 84c3e7c6a110..98657f122255 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2093,16 +2093,15 @@ Schema => SemanticDB v4 Uri => InventedNames.scala Text => empty Language => Scala -Symbols => 45 entries -Occurrences => 66 entries -Synthetics => 3 entries +Symbols => 44 entries +Occurrences => 64 entries +Synthetics => 2 entries Symbols: -givens/InventedNames$package. => final package object givens extends Object { self: givens.type => +24 decls } +givens/InventedNames$package. => final package object givens extends Object { self: givens.type => +23 decls } givens/InventedNames$package.`* *`. => final implicit lazy val given method * * Long givens/InventedNames$package.a. => val method a Int givens/InventedNames$package.b. => val method b String -givens/InventedNames$package.c. => val method c Double givens/InventedNames$package.d. => val method d List[Int] givens/InventedNames$package.e. => val method e Char givens/InventedNames$package.f. => val method f Float @@ -2193,8 +2192,6 @@ Occurrences: [32:8..32:16): intValue -> givens/InventedNames$package.intValue. [33:4..33:5): b <- givens/InventedNames$package.b. [33:8..33:20): given_String -> givens/InventedNames$package.given_String. -[34:4..34:5): c <- givens/InventedNames$package.c. -[34:8..34:20): given_Double -> givens/InventedNames$package.given_Double(). [35:4..35:5): d <- givens/InventedNames$package.d. [35:8..35:20): given_List_T -> givens/InventedNames$package.given_List_T(). [35:21..35:24): Int -> scala/Int# @@ -2214,7 +2211,6 @@ Occurrences: Synthetics: [24:0..24:0): => *(x$1) -[34:8..34:20):given_Double => *(intValue) [40:8..40:15):given_Y => *(given_X) expect/Issue1749.scala diff --git a/tests/warn/context-bounds-migration.scala b/tests/warn/context-bounds-migration.scala deleted file mode 100644 index cdd3eca62b5c..000000000000 --- a/tests/warn/context-bounds-migration.scala +++ /dev/null @@ -1,9 +0,0 @@ - -class C[T] -def foo[X: C] = () - -given [T]: C[T] = C[T]() - -def Test = - foo(C[Int]()) // warning - foo(using C[Int]()) // ok diff --git a/tests/warn/i15474.scala b/tests/warn/i15474.scala index d7c41130a1bb..0d8fc111ac6a 100644 --- a/tests/warn/i15474.scala +++ b/tests/warn/i15474.scala @@ -1,4 +1,4 @@ - +//> using options -source 3.4 import scala.language.implicitConversions diff --git a/tests/warn/looping-givens.check b/tests/warn/looping-givens.check new file mode 100644 index 000000000000..eec348c19d11 --- /dev/null +++ b/tests/warn/looping-givens.check @@ -0,0 +1,45 @@ +-- Warning: tests/warn/looping-givens.scala:9:22 ----------------------------------------------------------------------- +9 | given aa: A = summon // warn + | ^ + | Result of implicit search for A & B will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: a. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. +-- Warning: tests/warn/looping-givens.scala:10:22 ---------------------------------------------------------------------- +10 | given bb: B = summon // warn + | ^ + | Result of implicit search for A & B will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: b. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. +-- Warning: tests/warn/looping-givens.scala:11:28 ---------------------------------------------------------------------- +11 | given ab: (A & B) = summon // warn + | ^ + | Result of implicit search for A & B will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: joint. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. diff --git a/tests/warn/looping-givens.scala b/tests/warn/looping-givens.scala index 6b6a32002331..2f737206f64e 100644 --- a/tests/warn/looping-givens.scala +++ b/tests/warn/looping-givens.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 + class A class B From 6c75005b627512f6aeee96120b19862f94bd501b Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Tue, 11 Jun 2024 14:52:24 +0100 Subject: [PATCH 400/465] Disable ClasspathTests.unglobClasspathVerifyTest (#20551) cc @bishabosha @Gedochao [test_scala2_library_tasty] [test_windows_full] [test_java8] --- compiler/test/dotty/tools/scripting/ClasspathTests.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index 5107af5eee43..24c6c297a777 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -77,6 +77,7 @@ class ClasspathTests: /* * verify classpath is unglobbed by MainGenericRunner. */ + @Ignore @Test def unglobClasspathVerifyTest = { val testScriptName = "unglobClasspath.sc" val testScript = scripts("/scripting").find { _.name.matches(testScriptName) } match From aac98c9df002ae061fc95f45d6568035c7be7e4e Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 14 Jun 2024 09:30:43 +0100 Subject: [PATCH 401/465] Adapt the release workflow to SIP-46 (#20565) --- .github/workflows/ci.yaml | 185 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 176 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b606e6ae1732..92df4a190ec7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -748,13 +748,35 @@ jobs: - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - - - name: Prepare Release - run: | + # Extract the release tag + - name: Extract the release tag + run : echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV + # BUILD THE SDKs + - name: Build and pack the SDK (universal) + run : | ./project/scripts/sbt dist/packArchive sha256sum dist/target/scala3-* > dist/target/sha256sum.txt - echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV - + - name: Build and pack the SDK (linux x86-64) + run : | + ./project/scripts/sbt dist-linux-x86_64/packArchive + sha256sum dist/linux-x86_64/target/scala3-* > dist/linux-x86_64/target/sha256sum.txt + - name: Build and pack the SDK (linux aarch64) + run : | + ./project/scripts/sbt dist-linux-aarch64/packArchive + sha256sum dist/linux-aarch64/target/scala3-* > dist/linux-aarch64/target/sha256sum.txt + - name: Build and pack the SDK (mac x86-64) + run : | + ./project/scripts/sbt dist-mac-x86_64/packArchive + sha256sum dist/mac-x86_64/target/scala3-* > dist/mac-x86_64/target/sha256sum.txt + - name: Build and pack the SDK (mac aarch64) + run : | + ./project/scripts/sbt dist-mac-aarch64/packArchive + sha256sum dist/mac-aarch64/target/scala3-* > dist/mac-aarch64/target/sha256sum.txt + - name: Build and pack the SDK (win x86-64) + run : | + ./project/scripts/sbt dist-win-x86_64/packArchive + sha256sum dist/win-x86_64/target/scala3-* > dist/win-x86_64/target/sha256sum.txt + # Create the GitHub release - name: Create GitHub Release id: create_gh_release uses: actions/create-release@latest @@ -767,7 +789,7 @@ jobs: draft: true prerelease: ${{ contains(env.RELEASE_TAG, '-') }} - - name: Upload zip archive to GitHub Release + - name: Upload zip archive to GitHub Release (universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -776,8 +798,7 @@ jobs: asset_path: ./dist/target/scala3-${{ env.RELEASE_TAG }}.zip asset_name: scala3-${{ env.RELEASE_TAG }}.zip asset_content_type: application/zip - - - name: Upload tar.gz archive to GitHub Release + - name: Upload tar.gz archive to GitHub Release (universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -787,7 +808,103 @@ jobs: asset_name: scala3-${{ env.RELEASE_TAG }}.tar.gz asset_content_type: application/gzip - - name: Upload SHA256 sum of the release artefacts to GitHub Release + - name: Upload zip archive to GitHub Release (linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (mac x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (mac x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (mac aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (mac aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (win x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (win x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_content_type: application/gzip + + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -797,6 +914,56 @@ jobs: asset_name: sha256sum.txt asset_content_type: text/plain + - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-x86_64/target/sha256sum.txt + asset_name: sha256sum-x86_64-pc-linux.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-aarch64/target/sha256sum-aarch64-pc-linux.txt + asset_name: sha256sum.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-x86_64/target/sha256sum.txt + asset_name: sha256sum-x86_64-apple-darwin.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-aarch64/target/sha256sum.txt + asset_name: sha256sum-aarch64-apple-darwin.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (win x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/win-x86_64/target/sha256sum.txt + asset_name: sha256sum-x86_64-pc-win32.txt + asset_content_type: text/plain + - name: Publish Release run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleRelease" From edbb7c4fcde5e53c43dcb508d64b82f8902c5449 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 17 Jun 2024 13:13:45 +0200 Subject: [PATCH 402/465] Adapts the workflow to the changes in #20351 --- .github/workflows/publish-sdkman.yml | 69 +++++++++++++++++++++ .github/workflows/releases.yml | 57 ++++++++--------- .github/workflows/scripts/publish-sdkman.sh | 50 --------------- 3 files changed, 98 insertions(+), 78 deletions(-) create mode 100644 .github/workflows/publish-sdkman.yml delete mode 100755 .github/workflows/scripts/publish-sdkman.sh diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml new file mode 100644 index 000000000000..2126a3237d83 --- /dev/null +++ b/.github/workflows/publish-sdkman.yml @@ -0,0 +1,69 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO PUBLISH SCALA TO SDKMAN! ### +### HOW TO USE: ### +### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### +### - IT WILL PUBLISH TO SDKMAN! THE BINARIES TO EACH SUPPORTED PLATFORM AND A UNIVERSAL JAR ### +### - IT CHANGES THE DEFAULT VERSION IN SDKMAN! ### +### ### +### NOTE: ### +### - WE SHOULD KEEP IN SYNC THE NAME OF THE ARCHIVES WITH THE ACTUAL BUILD ### +### - WE SHOULD KEEP IN SYNC THE URL OF THE RELEASE ### +################################################################################################### + + +name: Publish Scala to SDKMAN! +run-name: Publish Scala ${{ inputs.version }} to SDKMAN! + +on: + workflow_call: + inputs: + version: + required: true + type: string + secrets: + CONSUMER-KEY: + required: true + CONSUMER-TOKEN: + required: true + +env: + RELEASE-URL: 'https://github.com/scala/scala3/releases/download/${{ inputs.version }}' + +jobs: + publish: + runs-on: ubuntu-latest + strategy: + matrix: + include: + - platform: LINUX_64 + archive : 'scala3-${{ inputs.version }}-x86_64-pc-linux.tar.gz' + - platform: LINUX_ARM64 + archive : 'scala3-${{ inputs.version }}-aarch64-pc-linux.tar.gz' + - platform: MAC_OSX + archive : 'scala3-${{ inputs.version }}-x86_64-apple-darwin.tar.gz' + - platform: MAC_ARM64 + archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.tar.gz' + - platform: WINDOWS_64 + archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.tar.gz' + - platform: UNIVERSAL + archive : 'scala3-${{ inputs.version }}.zip' + steps: + - uses: hamzaremmal/sdkman-release-action@7e437233a6bd79bc4cb0fa9071b685e94bdfdba6 + with: + CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} + CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} + CANDIDATE : scala + VERSION : ${{ inputs.version }} + URL : '${{ env.RELEASE-URL }}/${{ matrix.archive }}' + PLATFORM : ${{ matrix.platform }} + + default: + runs-on: ubuntu-latest + needs: publish + steps: + - uses: hamzaremmal/sdkman-default-action@866bc79fc5bd397eeb48f9cedda2f15221c8515d + with: + CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} + CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} + CANDIDATE : scala + VERSION : ${{ inputs.version }} diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index dde8b0372d52..4b75dd1b737d 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -1,32 +1,33 @@ -name: Releases +################################################################################################### +### OFFICIAL RELEASE WORKFLOW ### +### HOW TO USE: ### +### - THIS WORKFLOW WILL NEED TO BE TRIGGERED MANUALLY ### +### ### +### NOTE: ### +### - THIS WORKFLOW SHOULD ONLY BE RUN ON STABLE RELEASES ### +### - IT ASSUMES THAT THE PRE-RELEASE WORKFLOW WAS PREVIOUSLY EXECUTED ### +### ### +################################################################################################### + +name: Official release of Scala +run-name: Official release of Scala ${{ inputs.version }} + on: workflow_dispatch: - -permissions: - contents: read + inputs: + version: + description: 'The version to officially release' + required: true + type: string jobs: - publish_release: - runs-on: [self-hosted, Linux] - container: - image: lampepfl/dotty:2021-03-22 - options: --cpu-shares 4096 - - env: - SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} - SDKMAN_TOKEN: ${{ secrets.SDKMAN_TOKEN }} - - steps: - - name: Reset existing repo - run: | - git config --global --add safe.directory /__w/dotty/dotty - git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - - - name: Cleanup - run: .github/workflows/cleanup.sh - - - name: Git Checkout - uses: actions/checkout@v4 - - - name: Publish to SDKMAN - run: .github/workflows/scripts/publish-sdkman.sh + # TODO: ADD JOB TO SWITCH THE GITHUB RELEASE FROM DRAFT TO LATEST + publish-sdkman: + uses: ./.github/workflows/publish-sdkman.yml + with: + version: ${{ inputs.version }} + secrets: + CONSUMER-KEY: ${{ secrets.SDKMAN_KEY }} + CONSUMER-TOKEN: ${{ secrets.SDKMAN_TOKEN }} + + # TODO: ADD RELEASE WORKFLOW TO CHOCOLATEY AND OTHER PACKAGE MANAGERS HERE \ No newline at end of file diff --git a/.github/workflows/scripts/publish-sdkman.sh b/.github/workflows/scripts/publish-sdkman.sh deleted file mode 100755 index f959c426e9d8..000000000000 --- a/.github/workflows/scripts/publish-sdkman.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash - -# This is script for publishing scala on SDKMAN. -# Script resolves the latest stable version of scala and then send REST request to SDKMAN Vendor API. -# It's releasing and announcing the release of scala on SDKMAN. -# -# Requirement: -# - the latest stable version of scala should be available in github artifacts - -set -u - -# latest stable dotty version -DOTTY_VERSION=$(curl -s https://api.github.com/repos/scala/scala3/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') -DOTTY_URL="https://github.com/scala/scala3/releases/download/$DOTTY_VERSION/scala3-$DOTTY_VERSION.zip" - -# checking if dotty version is available -if ! curl --output /dev/null --silent --head --fail "$DOTTY_URL"; then - echo "URL doesn't exist: $DOTTY_URL" - exit 1 -fi - -# Release a new Candidate Version -curl --silent --show-error --fail \ - -X POST \ - -H "Consumer-Key: $SDKMAN_KEY" \ - -H "Consumer-Token: $SDKMAN_TOKEN" \ - -H "Content-Type: application/json" \ - -H "Accept: application/json" \ - -d '{"candidate": "scala", "version": "'"$DOTTY_VERSION"'", "url": "'"$DOTTY_URL"'"}' \ - https://vendors.sdkman.io/release - -if [[ $? -ne 0 ]]; then - echo "Fail sending POST request to releasing scala on SDKMAN." - exit 1 -fi - -# Set DOTTY_VERSION as Default for Candidate -curl --silent --show-error --fail \ - -X PUT \ - -H "Consumer-Key: $SDKMAN_KEY" \ - -H "Consumer-Token: $SDKMAN_TOKEN" \ - -H "Content-Type: application/json" \ - -H "Accept: application/json" \ - -d '{"candidate": "scala", "version": "'"$DOTTY_VERSION"'"}' \ - https://vendors.sdkman.io/default - -if [[ $? -ne 0 ]]; then - echo "Fail sending PUT request to announcing the release of scala on SDKMAN." - exit 1 -fi From e005369f41f05bc2224650958207480667329b4e Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 28 May 2024 20:38:39 +0200 Subject: [PATCH 403/465] Avoid stacked thisCall contexts AddImplicitArgs can recursively add several implicit parameter lists. We need to make sure we don't perform a thisCallContext search in another thisCall context in this case. Fixes #20483 The original code would back out further and further in the context chain for every implicit parameter section on the secondary constructor. Eventually (in this case after 3 times) bad things happen. --- .../src/dotty/tools/dotc/core/Contexts.scala | 2 +- .../dotty/tools/dotc/typer/Implicits.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 27 ++++++++++++------- tests/pos/i20483.scala | 13 +++++++++ 4 files changed, 32 insertions(+), 12 deletions(-) create mode 100644 tests/pos/i20483.scala diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index a5b0e2dba254..79a0b279aefe 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -477,7 +477,7 @@ object Contexts { /** Is the flexible types option set? */ def flexibleTypes: Boolean = base.settings.YexplicitNulls.value && !base.settings.YnoFlexibleTypes.value - + /** Is the best-effort option set? */ def isBestEffort: Boolean = base.settings.YbestEffort.value diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 54821444aed6..74bd59d4992f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1067,7 +1067,7 @@ trait Implicits: trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { record("inferImplicit") assert(ctx.phase.allowsImplicitSearch, - if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase.phaseName}" + if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase}" else i"type error: ${argument.tpe} does not conform to $pt${err.whyNoMatchStr(argument.tpe, pt)}") val usableForInference = pt.exists && !pt.unusableForInference diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ae50d626cb1f..ae62ebbc4a3f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4058,7 +4058,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def dummyArg(tp: Type) = untpd.Ident(nme.???).withTypeUnchecked(tp) - def addImplicitArgs(using Context) = { + val origCtx = ctx + + def addImplicitArgs(using Context) = def hasDefaultParams = methPart(tree).symbol.hasDefaultParams def implicitArgs(formals: List[Type], argIndex: Int, pt: Type): List[Tree] = formals match case Nil => Nil @@ -4181,15 +4183,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => retyped else issueErrors(tree, args) } - else tree match { - case tree: Block => - readaptSimplified(tpd.Block(tree.stats, tpd.Apply(tree.expr, args))) - case tree: NamedArg => - readaptSimplified(tpd.NamedArg(tree.name, tpd.Apply(tree.arg, args))) - case _ => - readaptSimplified(tpd.Apply(tree, args)) - } - } + else + inContext(origCtx): + // Reset context in case it was set to a supercall context before. + // otherwise the invariant for taking another this or super call context is not met. + // Test case is i20483.scala + tree match + case tree: Block => + readaptSimplified(tpd.Block(tree.stats, tpd.Apply(tree.expr, args))) + case tree: NamedArg => + readaptSimplified(tpd.NamedArg(tree.name, tpd.Apply(tree.arg, args))) + case _ => + readaptSimplified(tpd.Apply(tree, args)) + end addImplicitArgs + pt.revealIgnored match { case pt: FunProto if pt.applyKind == ApplyKind.Using => // We can end up here if extension methods are called with explicit given arguments. diff --git a/tests/pos/i20483.scala b/tests/pos/i20483.scala new file mode 100644 index 000000000000..a01a77327181 --- /dev/null +++ b/tests/pos/i20483.scala @@ -0,0 +1,13 @@ + +class Foo + (x: Option[String]) + (using Boolean) + (using Int) + (using Double): + + def this + (x: String) + (using Boolean) + (using Int) + (using Double) = + this(Some(x)) \ No newline at end of file From 665bd20cd3ea0b5c69faa86e272cb1cf6bbf15a3 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 11 Jun 2024 18:42:42 +0900 Subject: [PATCH 404/465] Bundle scala cli in scala command (#20351) fixes #20098 Proposed changes to zip/targz archive: - in the `/bin` directory store an extra launcher for Scala CLI (either JAR, or native per platform). - `/bin/scala[.bat]` is modified to invoke Scala CLI stored in `/bin` - new `/maven2` directory, which stores all the Jars and POM files necessary (in maven repo style) for scala-cli to invoke scala compiler offline (using the `-r` launcher option). - CHOICE: either replace jar files in `/lib` by aliases to the corresponding jar in `/maven2`, OR delete `/lib` and update references from scripts. (Looks like symlinks are not portable, so probably we should encode the classpath in a file, or adjust slightly how we build the toolchain) - add platform specific suffixes to artefacts: - e.g. `scala-3.5.0-x86_64-pc-linux.tar.gz` (for the artefact that bundles the x64 linux launcher) --------- Co-authored-by: Hamza REMMAL --- .github/workflows/ci.yaml | 14 +- .github/workflows/launchers.yml | 96 +++++ bin/common | 9 +- bin/common-platform | 63 +++ bin/scala | 35 +- bin/scalac | 2 +- bin/scaladoc | 2 +- build.sbt | 5 + .../src/dotty/tools/MainGenericRunner.scala | 2 +- .../scripting/argfileClasspath.sc | 9 - ...hReport.sc => classpathReport_scalacli.sc} | 2 +- .../scripting/cpArgumentsFile.txt | 1 - compiler/test-resources/scripting/envtest.sc | 2 + .../scripting/envtest_scalacli.sc | 3 + compiler/test-resources/scripting/hashBang.sc | 2 +- .../test-resources/scripting/hashBang.scala | 4 +- .../test-resources/scripting/scriptName.scala | 2 +- .../test-resources/scripting/scriptPath.sc | 2 +- .../scripting/scriptPath_scalacli.sc | 13 + compiler/test-resources/scripting/showArgs.sc | 2 +- .../scripting/showArgs_scalacli.sc | 7 + .../test-resources/scripting/sqlDateError.sc | 2 +- .../scripting/sqlDateError_scalacli.sc | 6 + .../test-resources/scripting/touchFile.sc | 2 +- .../scripting/unglobClasspath.sc | 6 - .../scripting/unglobClasspath_scalacli.sc | 9 + .../test/dotty/tools/io/ClasspathTest.scala | 4 +- .../tools/scripting/BashExitCodeTests.scala | 10 +- .../tools/scripting/BashScriptsTests.scala | 20 +- .../tools/scripting/ClasspathTests.scala | 18 +- .../tools/scripting/ExpressionTest.scala | 4 + .../dotty/tools/scripting/ScriptTestEnv.scala | 67 ++- .../tools/scripting/ScriptingTests.scala | 6 +- compiler/test/dotty/tools/utils.scala | 13 +- dist/bin-native-overrides/cli-common-platform | 16 + .../cli-common-platform.bat | 18 + dist/bin/cli-common-platform | 3 + dist/bin/cli-common-platform.bat | 5 + dist/bin/common | 132 +----- dist/bin/common-shared | 139 +++++++ dist/bin/scala | 10 +- dist/bin/scala.bat | 20 +- project/Build.scala | 80 +++- project/RepublishPlugin.scala | 388 +++++++++++++----- project/scripts/bootstrappedOnlyCmdTests | 17 +- project/scripts/buildScalaBinary | 12 + project/scripts/cmdTestsCommon.inc.sh | 17 + project/scripts/echoArgs.sc | 6 + project/scripts/native-integration/bashTests | 84 ++++ .../reportScalaVersion.scala | 4 + .../scripts/native-integration/winTests.bat | 19 + project/scripts/winCmdTests | 2 +- project/scripts/winCmdTests.bat | 2 +- .../src/main/scala/a/zz.scala | 6 + 54 files changed, 1088 insertions(+), 336 deletions(-) create mode 100644 .github/workflows/launchers.yml create mode 100755 bin/common-platform delete mode 100755 compiler/test-resources/scripting/argfileClasspath.sc rename compiler/test-resources/scripting/{classpathReport.sc => classpathReport_scalacli.sc} (91%) delete mode 100755 compiler/test-resources/scripting/cpArgumentsFile.txt create mode 100755 compiler/test-resources/scripting/envtest_scalacli.sc create mode 100755 compiler/test-resources/scripting/scriptPath_scalacli.sc create mode 100755 compiler/test-resources/scripting/showArgs_scalacli.sc create mode 100755 compiler/test-resources/scripting/sqlDateError_scalacli.sc delete mode 100755 compiler/test-resources/scripting/unglobClasspath.sc create mode 100755 compiler/test-resources/scripting/unglobClasspath_scalacli.sc create mode 100644 dist/bin-native-overrides/cli-common-platform create mode 100644 dist/bin-native-overrides/cli-common-platform.bat create mode 100644 dist/bin/cli-common-platform create mode 100644 dist/bin/cli-common-platform.bat create mode 100644 dist/bin/common-shared create mode 100755 project/scripts/buildScalaBinary create mode 100644 project/scripts/echoArgs.sc create mode 100755 project/scripts/native-integration/bashTests create mode 100644 project/scripts/native-integration/reportScalaVersion.scala create mode 100755 project/scripts/native-integration/winTests.bat create mode 100644 tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 92df4a190ec7..cad7caec490d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -141,7 +141,8 @@ jobs: - name: Cmd Tests run: | - ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" + ./project/scripts/buildScalaBinary + ./project/scripts/sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests @@ -221,7 +222,7 @@ jobs: shell: cmd - name: build binary - run: sbt "dist/pack" & bash -version + run: sbt "dist-win-x86_64/pack" & bash -version shell: cmd - name: cygwin tests @@ -254,8 +255,12 @@ jobs: - name: Git Checkout uses: actions/checkout@v4 + - name: build binary + run: sbt "dist-win-x86_64/pack" + shell: cmd + - name: Test - run: sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test" + run: sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test" shell: cmd - name: Scala.js Test @@ -581,7 +586,8 @@ jobs: - name: Test run: | - ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*" + ./project/scripts/buildScalaBinary + ./project/scripts/sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests diff --git a/.github/workflows/launchers.yml b/.github/workflows/launchers.yml new file mode 100644 index 000000000000..818e3b72b06b --- /dev/null +++ b/.github/workflows/launchers.yml @@ -0,0 +1,96 @@ +name: Test CLI Launchers on all the platforms +on: + pull_request: + workflow_dispatch: + +jobs: + linux-x86_64: + name: Deploy and Test on Linux x64 architecture + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-linux-x86_64" + + linux-aarch64: + name: Deploy and Test on Linux ARM64 architecture + runs-on: macos-latest + if: ${{ false }} + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + # https://github.com/actions/runner-images/issues/9369 + - name: Install sbt + run: brew install sbt + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-linux-aarch64" + + mac-x86_64: + name: Deploy and Test on Mac x64 architecture + runs-on: macos-13 + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + # https://github.com/actions/runner-images/issues/9369 + - name: Install sbt + run: brew install sbt + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-mac-x86_64" + + mac-aarch64: + name: Deploy and Test on Mac ARM64 architecture + runs-on: macos-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + # https://github.com/actions/runner-images/issues/9369 + - name: Install sbt + run: brew install sbt + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-mac-aarch64" + + win-x86_64: + name: Deploy and Test on Windows x64 architecture + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + - name: Build the launcher command + run: sbt "dist-win-x86_64/pack" + - name: Run the launcher command tests + run: './project/scripts/native-integration/winTests.bat' + shell: cmd diff --git a/bin/common b/bin/common index 7d3aa7148265..37b2ebd1ff93 100755 --- a/bin/common +++ b/bin/common @@ -9,15 +9,18 @@ target="$1" shift # Mutates $@ by deleting the first element ($1) +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + # Marker file used to obtain the date of latest call to sbt-back -version="$ROOT/dist/target/pack/VERSION" +version="$ROOT/$DIST_DIR/target/pack/VERSION" # Create the target if absent or if file changed in ROOT/compiler new_files="$(find "$ROOT/compiler" \( -iname "*.scala" -o -iname "*.java" \) -newer "$version" 2> /dev/null)" if [ ! -f "$version" ] || [ ! -z "$new_files" ]; then echo "Building Dotty..." - (cd $ROOT && sbt "dist/pack") + (cd $ROOT && sbt "$DIST_PROJECT/pack") fi -"$target" "$@" +"$ROOT/$DIST_DIR/target/pack/bin/$target" "$@" diff --git a/bin/common-platform b/bin/common-platform new file mode 100755 index 000000000000..648e0195e7e6 --- /dev/null +++ b/bin/common-platform @@ -0,0 +1,63 @@ +#!/usr/bin/env bash + +unset cygwin mingw msys darwin + +# COLUMNS is used together with command line option '-pageWidth'. +if command -v tput >/dev/null 2>&1; then + export COLUMNS="$(tput -Tdumb cols)" +fi + +case "`uname`" in + CYGWIN*) cygwin=true + ;; + MINGW*) mingw=true + ;; + MSYS*) msys=true + ;; + Darwin*) darwin=true + ;; +esac + +unset DIST_PROJECT DIST_DIR + +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + DIST_PROJECT="dist-win-x86_64" + DIST_DIR="dist/win-x86_64" +else + # OS and arch logic taken from https://github.com/VirtusLab/scala-cli/blob/main/scala-cli.sh + unset arch ARCH_NORM + arch=$(uname -m) + if [[ "$arch" == "aarch64" ]] || [[ "$arch" == "x86_64" ]]; then + ARCH_NORM="$arch" + elif [[ "$arch" == "amd64" ]]; then + ARCH_NORM="x86_64" + elif [[ "$arch" == "arm64" ]]; then + ARCH_NORM="aarch64" + else + ARCH_NORM="unknown" + fi + + if [ "$(expr substr $(uname -s) 1 5 2>/dev/null)" == "Linux" ]; then + if [[ "$ARCH_NORM" == "unknown" ]]; then + echo >&2 "unknown Linux CPU architecture, defaulting to JVM launcher" + DIST_PROJECT="dist" + DIST_DIR="dist" + else + DIST_PROJECT="dist-linux-$ARCH_NORM" + DIST_DIR="dist/linux-$ARCH_NORM" + fi + elif [ "$(uname)" == "Darwin" ]; then + if [[ "$ARCH_NORM" == "unknown" ]]; then + echo >&2 "unknown Darwin CPU architecture, defaulting to JVM launcher" + DIST_PROJECT="dist" + DIST_DIR="dist" + else + DIST_PROJECT="dist-mac-$ARCH_NORM" + DIST_DIR="dist/mac-$ARCH_NORM" + fi + else + echo >&2 "unknown OS, defaulting to JVM launcher" + DIST_PROJECT="dist" + DIST_DIR="dist" + fi +fi diff --git a/bin/scala b/bin/scala index 85c1ac91d08f..e87c4391806b 100755 --- a/bin/scala +++ b/bin/scala @@ -2,4 +2,37 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scala" "--power" "$@" "--offline" "--server=false" +scala_args() { + + declare -a CLI_ARGS + declare -a SCRIPT_ARGS + declare DISABLE_BLOOP=1 + + while (( "$#" )); do + case "$1" in + "--") + shift + SCRIPT_ARGS+=("--") + SCRIPT_ARGS+=("$@") + break + ;; + "clean" | "version" | "--version" | "-version" | "help" | "--help" | "-help") + CLI_ARGS+=("$1") + DISABLE_BLOOP=0 # clean command should not add --offline --server=false + shift + ;; + *) + CLI_ARGS+=("$1") + shift + ;; + esac + done + + if [ $DISABLE_BLOOP -eq 1 ]; then + CLI_ARGS+=("--offline" "--server=false") + fi + + echo "--power ${CLI_ARGS[@]} ${SCRIPT_ARGS[@]}" +} + +"$ROOT/bin/common" "scala" $(scala_args "$@") diff --git a/bin/scalac b/bin/scalac index faeb48d92d87..d141b9a6c6bb 100755 --- a/bin/scalac +++ b/bin/scalac @@ -2,4 +2,4 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scalac" "$@" +"$ROOT/bin/common" "scalac" "$@" diff --git a/bin/scaladoc b/bin/scaladoc index 11a754c6579f..02decabb9ae3 100755 --- a/bin/scaladoc +++ b/bin/scaladoc @@ -2,4 +2,4 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scaladoc" "$@" +"$ROOT/bin/common" "scaladoc" "$@" diff --git a/build.sbt b/build.sbt index 1bc74e5e23fb..f357044c91ca 100644 --- a/build.sbt +++ b/build.sbt @@ -28,6 +28,11 @@ val `scaladoc-js-main` = Build.`scaladoc-js-main` val `scaladoc-js-contributors` = Build.`scaladoc-js-contributors` val `scala3-bench-run` = Build.`scala3-bench-run` val dist = Build.dist +val `dist-mac-x86_64` = Build.`dist-mac-x86_64` +val `dist-mac-aarch64` = Build.`dist-mac-aarch64` +val `dist-win-x86_64` = Build.`dist-win-x86_64` +val `dist-linux-x86_64` = Build.`dist-linux-x86_64` +val `dist-linux-aarch64` = Build.`dist-linux-aarch64` val `community-build` = Build.`community-build` val `sbt-community-build` = Build.`sbt-community-build` val `scala3-presentation-compiler` = Build.`scala3-presentation-compiler` diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala index 5b238693a135..bf477f019cba 100644 --- a/compiler/src/dotty/tools/MainGenericRunner.scala +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -270,7 +270,7 @@ object MainGenericRunner { val ranByCoursierBootstrap = sys.props.isDefinedAt("coursier.mainJar") - || sys.props.get("bootstrap.mainClass").filter(_ == "dotty.tools.MainGenericRunner").isDefined + || sys.props.get("bootstrap.mainClass").contains("dotty.tools.MainGenericRunner") val silenced = sys.props.get("scala.use_legacy_launcher") == Some("true") diff --git a/compiler/test-resources/scripting/argfileClasspath.sc b/compiler/test-resources/scripting/argfileClasspath.sc deleted file mode 100755 index c31371ba8934..000000000000 --- a/compiler/test-resources/scripting/argfileClasspath.sc +++ /dev/null @@ -1,9 +0,0 @@ -#!dist/target/pack/bin/scala @compiler/test-resources/scripting/cpArgumentsFile.txt - -import java.nio.file.Paths - -def main(args: Array[String]): Unit = - val cwd = Paths.get(".").toAbsolutePath.toString.replace('\\', '/').replaceAll("/$", "") - printf("cwd: %s\n", cwd) - printf("classpath: %s\n", sys.props("java.class.path")) - diff --git a/compiler/test-resources/scripting/classpathReport.sc b/compiler/test-resources/scripting/classpathReport_scalacli.sc similarity index 91% rename from compiler/test-resources/scripting/classpathReport.sc rename to compiler/test-resources/scripting/classpathReport_scalacli.sc index cc68c4b1d52e..0b2552b3ac84 100755 --- a/compiler/test-resources/scripting/classpathReport.sc +++ b/compiler/test-resources/scripting/classpathReport_scalacli.sc @@ -1,5 +1,5 @@ #!/usr/bin/env bin/scala - +// This file is a Scala CLI script. import java.nio.file.Paths // def main(args: Array[String]): Unit = // MIGRATION: Scala CLI expects `*.sc` files to be straight-line code diff --git a/compiler/test-resources/scripting/cpArgumentsFile.txt b/compiler/test-resources/scripting/cpArgumentsFile.txt deleted file mode 100755 index 73037eb7d9bc..000000000000 --- a/compiler/test-resources/scripting/cpArgumentsFile.txt +++ /dev/null @@ -1 +0,0 @@ --classpath dist/target/pack/lib/* diff --git a/compiler/test-resources/scripting/envtest.sc b/compiler/test-resources/scripting/envtest.sc index b2fde1b32339..724580449229 100755 --- a/compiler/test-resources/scripting/envtest.sc +++ b/compiler/test-resources/scripting/envtest.sc @@ -1,2 +1,4 @@ +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class + def main(args: Array[String]): Unit = println("Hello " + util.Properties.propOrNull("key")) diff --git a/compiler/test-resources/scripting/envtest_scalacli.sc b/compiler/test-resources/scripting/envtest_scalacli.sc new file mode 100755 index 000000000000..993ea1691640 --- /dev/null +++ b/compiler/test-resources/scripting/envtest_scalacli.sc @@ -0,0 +1,3 @@ +// This file is a Scala CLI script. + +println("Hello " + util.Properties.propOrNull("key")) diff --git a/compiler/test-resources/scripting/hashBang.sc b/compiler/test-resources/scripting/hashBang.sc index d767bd1a1592..98884bc050c0 100755 --- a/compiler/test-resources/scripting/hashBang.sc +++ b/compiler/test-resources/scripting/hashBang.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +#!/usr/bin/env fake-program-to-test-hashbang-removal # comment STUFF=nada !# diff --git a/compiler/test-resources/scripting/hashBang.scala b/compiler/test-resources/scripting/hashBang.scala index 1aab26269f86..b7bf6b541854 100755 --- a/compiler/test-resources/scripting/hashBang.scala +++ b/compiler/test-resources/scripting/hashBang.scala @@ -1,8 +1,8 @@ -#!/usr/bin/env scala +#!/usr/bin/env fake-program-to-test-hashbang-removal # comment STUFF=nada !# - +// everything above this point should be ignored by the compiler def main(args: Array[String]): Unit = System.err.printf("mainClassFromStack: %s\n",mainFromStack) assert(mainFromStack.contains("hashBang"),s"fromStack[$mainFromStack]") diff --git a/compiler/test-resources/scripting/scriptName.scala b/compiler/test-resources/scripting/scriptName.scala index 21aec32fe0bb..7e479197d567 100755 --- a/compiler/test-resources/scripting/scriptName.scala +++ b/compiler/test-resources/scripting/scriptName.scala @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class def main(args: Array[String]): Unit = val name = Option(sys.props("script.name")) match { diff --git a/compiler/test-resources/scripting/scriptPath.sc b/compiler/test-resources/scripting/scriptPath.sc index 46cd5e8a7385..e29e659d09d4 100755 --- a/compiler/test-resources/scripting/scriptPath.sc +++ b/compiler/test-resources/scripting/scriptPath.sc @@ -1,4 +1,4 @@ -#!dist/target/pack/bin/scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class def main(args: Array[String]): Unit = args.zipWithIndex.foreach { case (arg,i) => printf("arg %d: [%s]\n",i,arg) } diff --git a/compiler/test-resources/scripting/scriptPath_scalacli.sc b/compiler/test-resources/scripting/scriptPath_scalacli.sc new file mode 100755 index 000000000000..c13888d0e4b1 --- /dev/null +++ b/compiler/test-resources/scripting/scriptPath_scalacli.sc @@ -0,0 +1,13 @@ +#!/usr/bin/env bin/scala + +// THIS FILE IS RAN WITH SCALA CLI, which wraps scripts exposing scriptPath and args variables + +args.zipWithIndex.foreach { case (arg,i) => printf("arg %d: [%s]\n",i,arg) } + +if !scriptPath.endsWith("scriptPath_scalacli.sc") then + printf( s"incorrect script.path defined as [$scriptPath]") +else + printf("scriptPath: %s\n", scriptPath) // report the value + +extension(s: String) + def norm: String = s.replace('\\', '/') diff --git a/compiler/test-resources/scripting/showArgs.sc b/compiler/test-resources/scripting/showArgs.sc index 8ef08f8962b0..69d552b9cf5f 100755 --- a/compiler/test-resources/scripting/showArgs.sc +++ b/compiler/test-resources/scripting/showArgs.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env bin/scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class // precise output format expected by BashScriptsTests.scala def main(args: Array[String]): Unit = diff --git a/compiler/test-resources/scripting/showArgs_scalacli.sc b/compiler/test-resources/scripting/showArgs_scalacli.sc new file mode 100755 index 000000000000..4591ac159345 --- /dev/null +++ b/compiler/test-resources/scripting/showArgs_scalacli.sc @@ -0,0 +1,7 @@ +#!/usr/bin/env bin/scala + +// This file is a Scala CLI script. + +// precise output format expected by BashScriptsTests.scala +for (a,i) <- args.zipWithIndex do + printf(s"arg %2d:[%s]\n",i,a) diff --git a/compiler/test-resources/scripting/sqlDateError.sc b/compiler/test-resources/scripting/sqlDateError.sc index 35160fd6fcd5..e7c3a623c6c1 100755 --- a/compiler/test-resources/scripting/sqlDateError.sc +++ b/compiler/test-resources/scripting/sqlDateError.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env bin/scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class def main(args: Array[String]): Unit = { println(new java.sql.Date(100L)) diff --git a/compiler/test-resources/scripting/sqlDateError_scalacli.sc b/compiler/test-resources/scripting/sqlDateError_scalacli.sc new file mode 100755 index 000000000000..10b58821a6e4 --- /dev/null +++ b/compiler/test-resources/scripting/sqlDateError_scalacli.sc @@ -0,0 +1,6 @@ +#!/usr/bin/env bin/scala + +// This file is a Scala CLI script. + +println(new java.sql.Date(100L)) +System.err.println("SCALA_OPTS="+Option(System.getenv("SCALA_OPTS")).getOrElse("")) diff --git a/compiler/test-resources/scripting/touchFile.sc b/compiler/test-resources/scripting/touchFile.sc index 974f8a64d192..b46b3c99d786 100755 --- a/compiler/test-resources/scripting/touchFile.sc +++ b/compiler/test-resources/scripting/touchFile.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class import java.io.File diff --git a/compiler/test-resources/scripting/unglobClasspath.sc b/compiler/test-resources/scripting/unglobClasspath.sc deleted file mode 100755 index deab2b8982ac..000000000000 --- a/compiler/test-resources/scripting/unglobClasspath.sc +++ /dev/null @@ -1,6 +0,0 @@ -// won't compile unless classpath is set correctly -import dotty.tools.tasty.TastyFormat - -// def main(args: Array[String]) = // MIGRATION: Scala CLI expects `*.sc` files to be straight-line code - val cp = sys.props("java.class.path") - printf("unglobbed classpath: %s\n", cp) diff --git a/compiler/test-resources/scripting/unglobClasspath_scalacli.sc b/compiler/test-resources/scripting/unglobClasspath_scalacli.sc new file mode 100755 index 000000000000..ccc4cf667085 --- /dev/null +++ b/compiler/test-resources/scripting/unglobClasspath_scalacli.sc @@ -0,0 +1,9 @@ +// This file is a Scala CLI script. + +import dotty.tools.tasty.TastyFormat +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// not visible on default classpath, "compiler/test/dotty/tools/scripting/ClasspathTests.scala" +// adds it to classpath via a compiler argument `-classpath 'org/scala-lang/tasty-core_3/$VERSION/*'` + +val cp = sys.props("java.class.path") +printf("unglobbed classpath: %s\n", cp) diff --git a/compiler/test/dotty/tools/io/ClasspathTest.scala b/compiler/test/dotty/tools/io/ClasspathTest.scala index a0fef65afdec..333f2b8062b0 100755 --- a/compiler/test/dotty/tools/io/ClasspathTest.scala +++ b/compiler/test/dotty/tools/io/ClasspathTest.scala @@ -15,6 +15,8 @@ class ClasspathTest { def pathsep = sys.props("path.separator") + def isWindows: Boolean = scala.util.Properties.isWin + // // Cope with wildcard classpath entries, exercised with -classpath // @@ -23,7 +25,7 @@ class ClasspathTest { @Test def testWildcards(): Unit = val outDir = Files.createTempDirectory("classpath-test") try - val compilerLib = "dist/target/pack/lib" + val compilerLib = s"${if isWindows then "dist-win-x86_64" else "dist"}/target/pack/lib" val libdir = Paths.get(compilerLib).toFile if libdir.exists then val libjarFiles = libdir.listFiles.toList.take(5) diff --git a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala index 90a8d80330b4..857f5ef378e7 100644 --- a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala +++ b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala @@ -16,7 +16,11 @@ import ScriptTestEnv.* class BashExitCodeTests: private var myTmpDir: String | Null = null private lazy val tmpDir = { myTmpDir = Files.createTempDirectory("exit-code-tests").toFile.absPath; myTmpDir } - @After def cleanup(): Unit = if myTmpDir != null then io.Directory(myTmpDir).deleteRecursively() + @After def cleanup(): Unit = { + if myTmpDir != null then io.Directory(myTmpDir).deleteRecursively() + + cleanupScalaCLIDirs() + } /** Verify the exit code of running `cmd args*`. */ def verifyExit(cmd: String, args: String*)(expectedExitCode: Int): Unit = @@ -28,8 +32,8 @@ class BashExitCodeTests: s"expected $expectedExitCode but got $exitCode${pp("out", stdout)}${pp("err", stderr)}" }, expectedExitCode, exitCode) - // Helpers for running scala, scalac, and scalac without the the output directory ("raw") - def scala(args: String*) = verifyExit(scalaPath, ("--power" +: "--offline" +: "--server=false" +: args)*) + // Helpers for running scala, scalac, and scalac without the output directory ("raw") + def scala(args: String*) = verifyExit(scalaPath, ("--power" +: args :+ "--offline" :+ "--server=false")*) def scalacRaw(args: String*) = verifyExit(scalacPath, args*) def scalac(args: String*) = scalacRaw(("-d" +: tmpDir +: args)*) diff --git a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala index 25bc54e2dcbe..6af863f0fccd 100644 --- a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala +++ b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala @@ -25,11 +25,13 @@ object BashScriptsTests: def testFiles = scripts("/scripting") @AfterClass def cleanup: Unit = { + cleanupScalaCLIDirs() + val af = argsfile.toFile - if (af.exists) { + if af.exists then af.delete() - } } + printf("osname[%s]\n", osname) printf("uname[%s]\n", ostypeFull) printf("using JAVA_HOME=%s\n", envJavaHome) @@ -50,7 +52,7 @@ object BashScriptsTests: val testScriptArgs = Seq( "a", "b", "c", "-repl", "-run", "-script", "-debug" ) - val Seq(showArgsScript, showArgsScalaCli) = Seq("showArgs.sc", "showArgsNu.sc").map { name => + val Seq(showArgsScript, showArgsScalaCli) = Seq("showArgs.sc", "showArgs_scalacli.sc").map { name => testFiles.find(_.getName == name).get.absPath } @@ -66,7 +68,7 @@ object BashScriptsTests: } file - val Seq(envtestNuSc, envtestScala) = Seq("envtestNu.sc", "envtest.scala").map { testFile(_) } + val Seq(envtestNuSc, envtestScala) = Seq("envtest_scalacli.sc", "envtest.scala").map { testFile(_) } // create command line with given options, execute specified script, return stdout def callScript(tag: String, script: String, keyPre: String): String = @@ -173,13 +175,13 @@ class BashScriptsTests: assert(stdout == expectedOutput) /* - * verify that scriptPathNu.sc sees a valid script.path property, - * and that it's value is the path to "scriptPathNu.sc". + * verify that scriptPath_scalacli.sc sees a valid script.path property, + * and that it's value is the path to "scriptPath_scalacli.sc". */ @Category(Array(classOf[BootstrappedOnlyTests])) @Test def verifyScriptPathProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptFile = testFiles.find(_.getName == "scriptPathNu.sc").get + val scriptFile = testFiles.find(_.getName == "scriptPath_scalacli.sc").get val expected = s"${scriptFile.getName}" printf("===> verify valid system property script.path is reported by script [%s]\n", scriptFile.getName) printf("calling scriptFile: %s\n", scriptFile) @@ -196,7 +198,7 @@ class BashScriptsTests: */ @Test def verifyScalaOpts = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptFile = testFiles.find(_.getName == "classpathReport.sc").get + val scriptFile = testFiles.find(_.getName == "classpathReport_scalacli.sc").get printf("===> verify SCALA_OPTS='@argsfile' is properly handled by `dist/bin/scala`\n") val envPairs = List(("SCALA_OPTS", s"@$argsfile")) val (validTest, exitCode, stdout, stderr) = bashCommand(scriptFile.absPath, envPairs) @@ -219,7 +221,7 @@ class BashScriptsTests: */ @Test def sqlDateTest = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptBase = "sqlDateErrorNu" + val scriptBase = "sqlDateError_scalacli" val scriptFile = testFiles.find(_.getName == s"$scriptBase.sc").get val testJar = testFile(s"$scriptBase.jar") // jar should not be created when scriptFile runs val tj = Paths.get(testJar).toFile diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index 24c6c297a777..a946e509aeb3 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -11,8 +11,12 @@ import org.junit.{Test, Ignore, AfterClass} import vulpix.TestConfiguration import ScriptTestEnv.* -/** Test java command line generated by bin/scala and bin/scalac */ +object ClasspathTests: + @AfterClass def cleanup: Unit = { + cleanupScalaCLIDirs() + } +/** Test java command line generated by bin/scala and bin/scalac */ class ClasspathTests: /* * Test disabled (temporarily). @@ -24,7 +28,7 @@ class ClasspathTests: @Ignore @Test def hashbangClasspathVerifyTest = { // only interested in classpath test scripts - val testScriptName = "classpathReport.sc" + val testScriptName = "classpathReport_scalacli.sc" val testScript = scripts("/scripting").find { _.getName.matches(testScriptName) } match case None => sys.error(s"test script not found: ${testScriptName}") case Some(file) => file @@ -39,7 +43,7 @@ class ClasspathTests: cmd.foreach { printf("[%s]\n", _) } - // classpathReport.sc is expected to produce two lines: + // classpathReport_scalacli.sc is expected to produce two lines: // cwd: // classpath: @@ -51,10 +55,10 @@ class ClasspathTests: // convert scriptCp to a list of files val hashbangJars: List[File] = scriptCp.split(psep).map { _.toFile }.toList val hashbangClasspathJars = hashbangJars.map { _.name }.sorted.distinct // get jar basenames, remove duplicates - val packlibDir: String = ??? /* ??? was s"$scriptCwd/$packLibDir" */ // classpathReport.sc specifies a wildcard classpath in this directory + val packlibDir: String = ??? /* ??? was s"$scriptCwd/$packLibDir" */ // classpathReport_scalacli.sc specifies a wildcard classpath in this directory val packlibJars: List[File] = listJars(packlibDir) // classpath entries expected to have been reported by the script - printf("%d jar files in dist/target/pack/lib\n", packlibJars.size) + printf(s"%d jar files in $packDir/lib\n", packlibJars.size) printf("%d test script jars in classpath\n", hashbangClasspathJars.size) val (diff: Set[File], msg: String) = if (packlibJars.size > hashbangClasspathJars.size) { @@ -63,7 +67,7 @@ class ClasspathTests: (hashbangJars.toSet -- packlibJars.toSet , "only in hashbang classpath") } // verify that the script hasbang classpath setting was effective at supplementing the classpath - // (a minimal subset of jars below dist/target/pack/lib are always be in the classpath) + // (a minimal subset of jars below dist*/target/pack/lib are always be in the classpath) val missingClasspathEntries = if hashbangClasspathJars.size != packlibJars.size then printf("packlib dir [%s]\n", packlibDir) printf("hashbangClasspathJars: %s\n", hashbangJars.map { _.relpath.norm }.mkString("\n ", "\n ", "")) @@ -79,7 +83,7 @@ class ClasspathTests: */ @Ignore @Test def unglobClasspathVerifyTest = { - val testScriptName = "unglobClasspath.sc" + val testScriptName = "unglobClasspath_scalacli.sc" val testScript = scripts("/scripting").find { _.name.matches(testScriptName) } match case None => sys.error(s"test script not found: ${testScriptName}") case Some(file) => file diff --git a/compiler/test/dotty/tools/scripting/ExpressionTest.scala b/compiler/test/dotty/tools/scripting/ExpressionTest.scala index 02963f50ee52..bc42860253b0 100755 --- a/compiler/test/dotty/tools/scripting/ExpressionTest.scala +++ b/compiler/test/dotty/tools/scripting/ExpressionTest.scala @@ -55,6 +55,10 @@ class ExpressionTest: object ExpressionTest: + @AfterClass def cleanup(): Unit = { + cleanupScalaCLIDirs() + } + def main(args: Array[String]): Unit = val tests = new ExpressionTest println("\n=== verifyCommandLineExpression ===") diff --git a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala index a52014f14704..dd1cc04bb58a 100644 --- a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala +++ b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala @@ -5,6 +5,7 @@ package scripting import scala.language.unsafeNulls import java.io.File +import java.util.Locale import java.nio.file.{Path, Paths, Files} import dotty.tools.dotc.config.Properties.* @@ -15,7 +16,7 @@ import scala.jdk.CollectionConverters.* /** * Common Code for supporting scripting tests. * To override the path to the bash executable, set TEST_BASH= - * To specify where `dist/target/pack/bin` resides, set TEST_CWD= + * To specify where `dist[*]/target/pack/bin` resides, set TEST_CWD= * Test scripts run in a bash env, so paths are converted to forward slash via .norm. */ object ScriptTestEnv { @@ -28,6 +29,44 @@ object ScriptTestEnv { def whichJava: String = whichExe("java") def whichBash: String = whichExe("bash") + def cleanupScalaCLIDirs(): Unit = { + val scriptingDir = io.Directory(scriptsDir("/scripting").getPath) + val dottyDir = io.Directory(workingDirectory) + + val residueDirs = Seq( + (scriptingDir / ".bsp"), + (scriptingDir / ".scala-build"), + (dottyDir / ".scala-build") + ) + + for f <- residueDirs do + f.deleteRecursively() + + val bspDir = dottyDir / ".bsp" + (bspDir / "scala.json").delete() + if bspDir.isEmpty then bspDir.delete() + } + + lazy val nativePackDir: Option[String] = { + def nativeDir(os: String, arch: String) = Some(s"dist/$os-$arch/target/pack") + def nativeOs(os: String) = archNorm match + case arch @ ("aarch64" | "x86_64") => nativeDir(os, arch) + case _ => None + + if winshell then nativeDir("win", "x86_64") // assume x86_64 for now + else if linux then nativeOs("linux") + else if mac then nativeOs("mac") + else None + } + + def jvmPackDir() = + println("warning: unknown OS architecture combination, defaulting to JVM launcher.") + "dist/target/pack" + + def packDir: String = nativePackDir.getOrElse(jvmPackDir()) + + def packBinDir: String = s"$packDir/bin" + lazy val workingDirectory: String = { val dirstr = if testCwd.nonEmpty then if verbose then printf("TEST_CWD set to [%s]\n", testCwd) @@ -36,7 +75,7 @@ object ScriptTestEnv { userDir // userDir, if TEST_CWD not set // issue warning if things don't look right - val test = Paths.get(s"$dirstr/dist/target/pack/bin").normalize + val test = Paths.get(s"$dirstr/$packBinDir").normalize if !test.isDirectory then printf("warning: not found below working directory: %s\n", test.norm) @@ -46,7 +85,7 @@ object ScriptTestEnv { def envPath: String = envOrElse("PATH", "") // remove duplicate entries in path - def supplementedPath: String = s"dist/target/pack/bin$psep$envJavaHome/bin$psep$envScalaHome/bin$psep$envPath".norm + def supplementedPath: String = s"$packBinDir$psep$envJavaHome/bin$psep$envScalaHome/bin$psep$envPath".norm def adjustedPathEntries: List[String] = supplementedPath.norm.split(psep).toList.distinct def adjustedPath: String = adjustedPathEntries.mkString(psep) def envPathEntries: List[String] = envPath.split(psep).toList.distinct @@ -55,11 +94,18 @@ object ScriptTestEnv { def unameExe = which("uname") def ostypeFull = if unameExe.nonEmpty then exec(unameExe).mkString else "" - def ostype = ostypeFull.toLowerCase.takeWhile{ cc => cc >= 'a' && cc <='z' || cc >= 'A' && cc <= 'Z' } + def ostype = ostypeFull.toLowerCase(Locale.ROOT).takeWhile{ cc => cc >= 'a' && cc <='z' || cc >= 'A' && cc <= 'Z' } + def archFull = if unameExe.nonEmpty then exec(unameExe, "-m").mkString else "" + def archNorm = archFull match + case "arm64" => "aarch64" + case "amd64" => "x86_64" + case id => id def cygwin = ostype == "cygwin" def mingw = ostype == "mingw" def msys = ostype == "msys" + def linux = ostype == "linux" + def mac = ostype == "darwin" def winshell: Boolean = cygwin || mingw || msys def which(str: String) = @@ -124,10 +170,9 @@ object ScriptTestEnv { } yield line - def packBinDir = "dist/target/pack/bin" - // def packLibDir = "dist/target/pack/lib" // replaced by packMavenDir - def packMavenDir = "dist/target/pack/maven2" - def packVersionFile = "dist/target/pack/VERSION" + // def packLibDir = s"$packDir/lib" // replaced by packMavenDir + def packMavenDir = s"$packDir/maven2" + def packVersionFile = s"$packDir/VERSION" def packBinScalaExists: Boolean = Files.exists(Paths.get(s"$packBinDir/scala")) def packScalaVersion: String = { @@ -248,8 +293,8 @@ object ScriptTestEnv { lazy val cwd: Path = Paths.get(".").toAbsolutePath.normalize lazy val (scalacPath: String, scalaPath: String) = { - val scalac = s"$workingDirectory/dist/target/pack/bin/scalac".toPath.normalize - val scala = s"$workingDirectory/dist/target/pack/bin/scala".toPath.normalize + val scalac = s"$workingDirectory/$packBinDir/scalac".toPath.normalize + val scala = s"$workingDirectory/$packBinDir/scala".toPath.normalize (scalac.norm, scala.norm) } @@ -257,7 +302,7 @@ object ScriptTestEnv { // use optional TEST_BASH if defined, otherwise, bash must be in PATH // envScalaHome is: - // dist/target/pack, if present + // dist[*]/target/pack, if present // else, SCALA_HOME if defined // else, not defined lazy val envScalaHome = diff --git a/compiler/test/dotty/tools/scripting/ScriptingTests.scala b/compiler/test/dotty/tools/scripting/ScriptingTests.scala index 713695b62f4a..4dc193f0efe4 100644 --- a/compiler/test/dotty/tools/scripting/ScriptingTests.scala +++ b/compiler/test/dotty/tools/scripting/ScriptingTests.scala @@ -17,7 +17,11 @@ import org.junit.Assume.assumeFalse /** Runs all tests contained in `compiler/test-resources/scripting/` */ class ScriptingTests: // classpath tests managed by scripting.ClasspathTests.scala - def testFiles = scripts("/scripting").filter { ! _.getName.toLowerCase.contains("classpath") } + def testFiles = scripts("/scripting").filter { sc => + val name = sc.getName.toLowerCase + !name.contains("classpath") + && !name.contains("_scalacli") + } /* * Call .scala scripts without -save option, verify no jar created diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala index a8c480088e08..d17edbaa855e 100644 --- a/compiler/test/dotty/tools/utils.scala +++ b/compiler/test/dotty/tools/utils.scala @@ -20,14 +20,19 @@ import dotc.config.CommandLineParser object Dummy def scripts(path: String): Array[File] = { - val dir = new File(Dummy.getClass.getResource(path).getPath) - assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") + val dir = scriptsDir(path) dir.listFiles.filter { f => val path = if f.isDirectory then f.getPath + "/" else f.getPath Properties.testsFilter.isEmpty || Properties.testsFilter.exists(path.contains) } } +def scriptsDir(path: String): File = { + val dir = new File(Dummy.getClass.getResource(path).getPath) + assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") + dir +} + extension (f: File) def absPath = f.getAbsolutePath.replace('\\', '/') @@ -101,10 +106,10 @@ def toolArgsParse(lines: List[String], filename: Option[String]): List[(String,S case toolArg(name, args) => List((name, args)) case _ => Nil } ++ - lines.flatMap { + lines.flatMap { case directiveOptionsArg(args) => List(("scalac", args)) case directiveJavacOptions(args) => List(("javac", args)) - case _ => Nil + case _ => Nil } import org.junit.Test diff --git a/dist/bin-native-overrides/cli-common-platform b/dist/bin-native-overrides/cli-common-platform new file mode 100644 index 000000000000..1a11c770f91a --- /dev/null +++ b/dist/bin-native-overrides/cli-common-platform @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + SCALA_CLI_VERSION="" + # iterate through lines in VERSION_SRC + while IFS= read -r line; do + # if line starts with "version:=" then extract the version + if [[ "$line" == cli_version:=* ]]; then + SCALA_CLI_VERSION="${line#cli_version:=}" + break + fi + done < "$PROG_HOME/EXTRA_PROPERTIES" + SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"" "--cli-version \"$SCALA_CLI_VERSION\"") +else + SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"") +fi diff --git a/dist/bin-native-overrides/cli-common-platform.bat b/dist/bin-native-overrides/cli-common-platform.bat new file mode 100644 index 000000000000..e0cfa40692b5 --- /dev/null +++ b/dist/bin-native-overrides/cli-common-platform.bat @@ -0,0 +1,18 @@ +@echo off + +setlocal enabledelayedexpansion + +set "_SCALA_CLI_VERSION=" +@rem read for cli_version:=_SCALA_CLI_VERSION in EXTRA_PROPERTIES file +FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\EXTRA_PROPERTIES") DO ( + SET "line=%%G" + IF "!line:~0,13!"=="cli_version:=" ( + SET "_SCALA_CLI_VERSION=!line:~13!" + GOTO :foundCliVersion + ) +) + +:foundCliVersion +endlocal & set "SCALA_CLI_VERSION=%_SCALA_CLI_VERSION%" + +set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" "--cli-version" "%SCALA_CLI_VERSION%" \ No newline at end of file diff --git a/dist/bin/cli-common-platform b/dist/bin/cli-common-platform new file mode 100644 index 000000000000..a5906e882bb4 --- /dev/null +++ b/dist/bin/cli-common-platform @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +SCALA_CLI_CMD_BASH=("\"$JAVACMD\"" "-jar \"$PROG_HOME/bin/scala-cli.jar\"") diff --git a/dist/bin/cli-common-platform.bat b/dist/bin/cli-common-platform.bat new file mode 100644 index 000000000000..99103266c1d9 --- /dev/null +++ b/dist/bin/cli-common-platform.bat @@ -0,0 +1,5 @@ +@echo off + +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" +set SCALA_CLI_CMD_WIN="%_JAVACMD%" "-jar" "%_PROG_HOME%\bin\scala-cli.jar" \ No newline at end of file diff --git a/dist/bin/common b/dist/bin/common index e3e4253938fb..4a0152fbc4cb 100755 --- a/dist/bin/common +++ b/dist/bin/common @@ -1,132 +1,6 @@ #!/usr/bin/env bash -#/*-------------------------------------------------------------------------- -# * Credits: This script is based on the script generated by sbt-pack. -# *--------------------------------------------------------------------------*/ - -# save terminal settings -saved_stty=$(stty -g 2>/dev/null) -# clear on error so we don't later try to restore them -if [[ ! $? ]]; then - saved_stty="" -fi - -# restore stty settings (echo in particular) -function restoreSttySettings() { - stty $saved_stty - saved_stty="" -} - -scala_exit_status=127 -function onExit() { - [[ "$saved_stty" != "" ]] && restoreSttySettings - exit $scala_exit_status -} - -# to reenable echo if we are interrupted before completing. -trap onExit INT TERM EXIT - -unset cygwin mingw msys darwin conemu - -# COLUMNS is used together with command line option '-pageWidth'. -if command -v tput >/dev/null 2>&1; then - export COLUMNS="$(tput -Tdumb cols)" -fi - -case "`uname`" in - CYGWIN*) cygwin=true - ;; - MINGW*) mingw=true - ;; - MSYS*) msys=true - ;; - Darwin*) darwin=true - if [ -z "$JAVA_VERSION" ] ; then - JAVA_VERSION="CurrentJDK" - else - echo "Using Java version: $JAVA_VERSION" 1>&2 - fi - if [ -z "$JAVA_HOME" ] ; then - JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home - fi - JAVACMD="`which java`" - ;; -esac - -unset CYGPATHCMD -if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then - # ConEmu terminal is incompatible with jna-5.*.jar - [[ (${CONEMUANSI-} || ${ConEmuANSI-}) ]] && conemu=true - # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc. - CYGPATHCMD=`which cygpath 2>/dev/null` - case "$TERM" in - rxvt* | xterm* | cygwin*) - stty -icanon min 1 -echo - JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix" - ;; - esac -fi - -# Resolve JAVA_HOME from javac command path -if [ -z "$JAVA_HOME" ]; then - javaExecutable="`which javac`" - if [ -n "$javaExecutable" -a -f "$javaExecutable" -a ! "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then - # readlink(1) is not available as standard on Solaris 10. - readLink=`which readlink` - if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then - javaExecutable="`readlink -f \"$javaExecutable\"`" - javaHome="`dirname \"$javaExecutable\"`" - javaHome=`expr "$javaHome" : '\(.*\)/bin'` - JAVA_HOME="$javaHome" - export JAVA_HOME - fi - fi -fi - -if [ -z "${JAVACMD-}" ] ; then - if [ -n "${JAVA_HOME-}" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" - else - JAVACMD="$JAVA_HOME/bin/java" - fi - else - JAVACMD="`which java`" - fi -fi - -if [ ! -x "$JAVACMD" ] ; then - echo "Error: JAVA_HOME is not defined correctly." - echo " We cannot execute $JAVACMD" - exit 1 -fi - -if [ -z "$JAVA_HOME" ] ; then - echo "Warning: JAVA_HOME environment variable is not set." -fi - -CLASSPATH_SUFFIX="" -# Path separator used in EXTRA_CLASSPATH -PSEP=":" - -# translate paths to Windows-mixed format before running java -if [ -n "${CYGPATHCMD-}" ]; then - [ -n "${PROG_HOME-}" ] && - PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"` - [ -n "$JAVA_HOME" ] && - JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"` - CLASSPATH_SUFFIX=";" - PSEP=";" -elif [[ ${mingw-} || ${msys-} ]]; then - # For Mingw / Msys, convert paths from UNIX format before anything is touched - [ -n "$PROG_HOME" ] && - PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`" - [ -n "$JAVA_HOME" ] && - JAVA_HOME="`(cd "$JAVA_HOME"; pwd -W | sed 's|/|\\\\|g')`" - CLASSPATH_SUFFIX=";" - PSEP=";" -fi +source "$PROG_HOME/bin/common-shared" #/*-------------------------------------------------- # * The code below is for Dotty @@ -205,16 +79,12 @@ ReplMain=dotty.tools.repl.Main ScriptingMain=dotty.tools.scripting.Main declare -a java_args -declare -a scala_args declare -a residual_args declare -a script_args addJava () { java_args+=("'$1'") } -addScala () { - scala_args+=("'$1'") -} addResidual () { residual_args+=("'$1'") } diff --git a/dist/bin/common-shared b/dist/bin/common-shared new file mode 100644 index 000000000000..8c85993a5283 --- /dev/null +++ b/dist/bin/common-shared @@ -0,0 +1,139 @@ +#!/usr/bin/env bash + +# Common options for both scala-cli and java based launchers + +#/*-------------------------------------------------------------------------- +# * Credits: This script is based on the script generated by sbt-pack. +# *--------------------------------------------------------------------------*/ + +# save terminal settings +saved_stty=$(stty -g 2>/dev/null) +# clear on error so we don't later try to restore them +if [[ ! $? ]]; then + saved_stty="" +fi + +# restore stty settings (echo in particular) +function restoreSttySettings() { + stty $saved_stty + saved_stty="" +} + +scala_exit_status=127 +function onExit() { + [[ "$saved_stty" != "" ]] && restoreSttySettings + exit $scala_exit_status +} + +# to reenable echo if we are interrupted before completing. +trap onExit INT TERM EXIT + +unset cygwin mingw msys darwin conemu + +# COLUMNS is used together with command line option '-pageWidth'. +if command -v tput >/dev/null 2>&1; then + export COLUMNS="$(tput -Tdumb cols)" +fi + +case "`uname`" in + CYGWIN*) cygwin=true + ;; + MINGW*) mingw=true + ;; + MSYS*) msys=true + ;; + Darwin*) darwin=true + if [ -z "$JAVA_VERSION" ] ; then + JAVA_VERSION="CurrentJDK" + else + echo "Using Java version: $JAVA_VERSION" 1>&2 + fi + if [ -z "$JAVA_HOME" ] ; then + JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home + fi + JAVACMD="`which java`" + ;; +esac + +unset CYGPATHCMD +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + # ConEmu terminal is incompatible with jna-5.*.jar + [[ (${CONEMUANSI-} || ${ConEmuANSI-}) ]] && conemu=true + # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc. + CYGPATHCMD=`which cygpath 2>/dev/null` + case "$TERM" in + rxvt* | xterm* | cygwin*) + stty -icanon min 1 -echo + JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix" + ;; + esac +fi + +# Resolve JAVA_HOME from javac command path +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" -a -f "$javaExecutable" -a ! "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + javaExecutable="`readlink -f \"$javaExecutable\"`" + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "${JAVACMD-}" ] ; then + if [ -n "${JAVA_HOME-}" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." + echo " We cannot execute $JAVACMD" + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSPATH_SUFFIX="" +# Path separator used in EXTRA_CLASSPATH +PSEP=":" +PROG_HOME_URI="file://$PROG_HOME" + +# translate paths to Windows-mixed format before running java +if [ -n "${CYGPATHCMD-}" ]; then + [ -n "${PROG_HOME-}" ] && + PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"` + PROG_HOME_URI="file:///$PROG_HOME" # Add extra root dir prefix + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"` + CLASSPATH_SUFFIX=";" + PSEP=";" +elif [[ ${mingw-} || ${msys-} ]]; then + # For Mingw / Msys, convert paths from UNIX format before anything is touched + [ -n "$PROG_HOME" ] && + PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`" + PROG_HOME_URI="file:///$PROG_HOME" # Add extra root dir prefix + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd -W | sed 's|/|\\\\|g')`" + CLASSPATH_SUFFIX=";" + PSEP=";" +fi + +declare -a scala_args +addScala () { + scala_args+=("'$1'") +} diff --git a/dist/bin/scala b/dist/bin/scala index 3040c5a9a0f3..71747a8e9e20 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -26,7 +26,8 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/cli-common" +source "$PROG_HOME/bin/common-shared" +source "$PROG_HOME/bin/cli-common-platform" SCALA_VERSION="" # iterate through lines in VERSION_SRC @@ -44,7 +45,7 @@ if [ -z "$SCALA_VERSION" ]; then exit 1 fi -MVN_REPOSITORY="file://$PROG_HOME/maven2" +MVN_REPOSITORY="$PROG_HOME_URI/maven2" # escape all script arguments while [[ $# -gt 0 ]]; do @@ -54,8 +55,9 @@ done # exec here would prevent onExit from being called, leaving terminal in unusable state [ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 -eval "\"$JAVACMD\"" \ - "-jar \"$SCALA_CLI_JAR\"" \ + +# SCALA_CLI_CMD_BASH is an array, set by cli-common-platform +eval "${SCALA_CLI_CMD_BASH[@]}" \ "--prog-name scala" \ "--cli-default-scala-version \"$SCALA_VERSION\"" \ "-r \"$MVN_REPOSITORY\"" \ diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index 78336272055b..d473facbbb1c 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -19,10 +19,11 @@ if not %_EXITCODE%==0 goto end call :setScalaOpts -@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat -set "_JAVACMD=!_JAVACMD:%%=%%%%!" +call "%_PROG_HOME%\bin\cli-common-platform.bat" + +@rem SCALA_CLI_CMD_WIN is an array, set in cli-common-platform.bat +call %SCALA_CLI_CMD_WIN% "--prog-name" "scala" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* -call "%_JAVACMD%" "-jar" "%SCALA_CLI_JAR%" "--prog-name" "scala" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* if not %ERRORLEVEL%==0 ( set _EXITCODE=1& goto end ) goto end @@ -42,19 +43,8 @@ if not "%char%"==":" ( goto :findColon ) -@REM set _PROG_HOME to the substring from the first colon to the end -set "_PROG_HOME_SUB=!_PROG_HOME:~%index%!" -@REM strip initial character -set "_PROG_HOME_SUB=!_PROG_HOME_SUB:~1!" - -@REM set drive to substring from 0 to the first colon -set "_PROG_HOME_DRIVE=!_PROG_HOME:~0,%index%!" - - - set "_SCALA_VERSION=" -set "MVN_REPOSITORY=file://%_PROG_HOME_DRIVE%\%_PROG_HOME_SUB:\=/%/maven2" -set "SCALA_CLI_JAR=%_PROG_HOME%\etc\scala-cli.jar" +set "MVN_REPOSITORY=file:///%_PROG_HOME:\=/%/maven2" @rem read for version:=_SCALA_VERSION in VERSION_FILE FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\VERSION") DO ( diff --git a/project/Build.scala b/project/Build.scala index 0876353a6a2f..99871c4c87e8 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -28,7 +28,6 @@ import sbttastymima.TastyMiMaPlugin import sbttastymima.TastyMiMaPlugin.autoImport._ import scala.util.Properties.isJavaAtLeast -import scala.collection.mutable import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._ import org.scalajs.linker.interface.{ModuleInitializer, StandardConfig} @@ -119,7 +118,11 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.3.1" + val scalaCliLauncherVersion = "1.3.2" + /** Version of Scala CLI to download (on Windows - last known validated version) */ + val scalaCliLauncherVersionWindows = "1.3.2" + /** Version of Coursier to download for initializing the local maven repo of Scala command */ + val coursierJarVersion = "2.1.10" object CompatMode { final val BinaryCompatible = 0 @@ -2121,22 +2124,72 @@ object Build { packMain := Map(), publishArtifact := false, packGenerateMakefile := false, - packArchiveName := "scala3-" + dottyVersion, republishRepo := target.value / "republish", - republishLaunchers := { - val cliV = scalaCliLauncherVersion - Seq( - ("scala-cli.jar", cliV, url(s"https://github.com/VirtusLab/scala-cli/releases/download/v$cliV/scala-cli.jar")) - ) - }, + packResourceDir += (republishRepo.value / "bin" -> "bin"), + packResourceDir += (republishRepo.value / "maven2" -> "maven2"), Compile / pack := (Compile / pack).dependsOn(republish).value, ) lazy val dist = project.asDist(Bootstrapped) .settings( - packResourceDir += (baseDirectory.value / "bin" -> "bin"), - packResourceDir += (republishRepo.value / "maven2" -> "maven2"), - packResourceDir += (republishRepo.value / "etc" -> "etc"), + packArchiveName := "scala3-" + dottyVersion, + republishBinDir := baseDirectory.value / "bin", + republishCoursier += + ("coursier.jar" -> s"https://github.com/coursier/coursier/releases/download/v$coursierJarVersion/coursier.jar"), + republishLaunchers += + ("scala-cli.jar" -> s"https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli.jar"), + ) + + lazy val `dist-mac-x86_64` = project.in(file("dist/mac-x86_64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-x86_64-apple-darwin", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-apple-darwin.gz") + ) + + lazy val `dist-mac-aarch64` = project.in(file("dist/mac-aarch64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-aarch64-apple-darwin", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-apple-darwin.gz") + ) + + lazy val `dist-win-x86_64` = project.in(file("dist/win-x86_64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-win32", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishExtraProps += ("cli_version" -> scalaCliLauncherVersion), + mappings += (republishRepo.value / "etc" / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), + republishLaunchers += + ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersionWindows/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") + ) + + lazy val `dist-linux-x86_64` = project.in(file("dist/linux-x86_64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-linux", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-pc-linux.gz") + ) + + lazy val `dist-linux-aarch64` = project.in(file("dist/linux-aarch64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-aarch64-pc-linux", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-pc-linux.gz") ) private def customMimaReportBinaryIssues(issueFilterLocation: String) = mimaReportBinaryIssues := { @@ -2265,8 +2318,7 @@ object Build { settings(scala3PresentationCompilerBuildInfo) def asDist(implicit mode: Mode): Project = project. - enablePlugins(PackPlugin). - enablePlugins(RepublishPlugin). + enablePlugins(PackPlugin, RepublishPlugin). withCommonSettings. settings(commonDistSettings). dependsOn( diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index bd1190dfec88..537c82d62cce 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -12,9 +12,45 @@ import sbt.util.CacheImplicits._ import scala.collection.mutable import java.nio.file.Files +import java.nio.file.attribute.PosixFilePermission +import java.nio.file.{Files, Path} + +import scala.jdk.CollectionConverters._ + /** This local plugin provides ways of publishing a project classpath and library dependencies to * .a local repository */ object RepublishPlugin extends AutoPlugin { + + /** copied from github.com/coursier/coursier */ + private object FileUtil { + + def tryMakeExecutable(path: Path): Boolean = + try { + val perms = Files.getPosixFilePermissions(path).asScala.toSet + + var newPerms = perms + if (perms(PosixFilePermission.OWNER_READ)) + newPerms += PosixFilePermission.OWNER_EXECUTE + if (perms(PosixFilePermission.GROUP_READ)) + newPerms += PosixFilePermission.GROUP_EXECUTE + if (perms(PosixFilePermission.OTHERS_READ)) + newPerms += PosixFilePermission.OTHERS_EXECUTE + + if (newPerms != perms) + Files.setPosixFilePermissions( + path, + newPerms.asJava + ) + + true + } + catch { + case _: UnsupportedOperationException => + false + } + + } + override def trigger = allRequirements override def requires = super.requires && PublishBinPlugin && PackPlugin @@ -24,9 +60,17 @@ object RepublishPlugin extends AutoPlugin { val republishAllResolved = taskKey[Seq[ResolvedArtifacts]]("Resolve the dependencies for the distribution") val republishClasspath = taskKey[Set[File]]("cache the dependencies for the distribution") val republishFetchLaunchers = taskKey[Set[File]]("cache the launcher deps for the distribution") + val republishFetchCoursier = taskKey[File]("cache the coursier.jar for resolving the local maven repo.") + val republishPrepareBin = taskKey[File]("prepare the bin directory, including launchers and scripts.") + val republishWriteExtraProps = taskKey[Option[File]]("write extra properties for the launchers.") + val republishBinDir = settingKey[File]("where to find static files for the bin dir.") + val republishCoursierDir = settingKey[File]("where to download the coursier launcher jar.") + val republishBinOverrides = settingKey[Seq[File]]("files to override those in bin-dir.") val republish = taskKey[File]("cache the dependencies and download launchers for the distribution") val republishRepo = settingKey[File]("the location to store the republished artifacts.") - val republishLaunchers = settingKey[Seq[(String, String, URL)]]("launchers to download. Sequence of (name, version, URL).") + val republishLaunchers = settingKey[Seq[(String, String)]]("launchers to download. Sequence of (name, URL).") + val republishCoursier = settingKey[Seq[(String, String)]]("coursier launcher to download. Sequence of (name, URL).") + val republishExtraProps = settingKey[Seq[(String, String)]]("extra properties for launchers.") } import autoImport._ @@ -34,11 +78,207 @@ object RepublishPlugin extends AutoPlugin { case class SimpleModuleId(org: String, name: String, revision: String) { override def toString = s"$org:$name:$revision" } - case class ResolvedArtifacts(id: SimpleModuleId, jar: File, pom: File) + case class ResolvedArtifacts(id: SimpleModuleId, jar: Option[File], pom: Option[File]) - val isRelease = sys.env.get("RELEASEBUILD") == Some("yes") + private def republishResolvedArtifacts(resolved: Seq[ResolvedArtifacts], mavenRepo: File, logOpt: Option[Logger]): Set[File] = { + IO.createDirectory(mavenRepo) + resolved.map { ra => + for (log <- logOpt) + log.info(s"[republish] publishing ${ra.id} to $mavenRepo...") + val jarOpt = ra.jar + val pomOpt = ra.pom + + assert(jarOpt.nonEmpty || pomOpt.nonEmpty, s"Neither jar nor pom found for ${ra.id}") + + val pathElems = ra.id.org.split('.').toVector :+ ra.id.name :+ ra.id.revision + val artifactDir = pathElems.foldLeft(mavenRepo)(_ / _) + IO.createDirectory(artifactDir) + for (pom <- pomOpt) IO.copyFile(pom, artifactDir / pom.getName) + for (jar <- jarOpt) IO.copyFile(jar, artifactDir / jar.getName) + artifactDir + }.toSet + } + + private def coursierCmd(jar: File, cache: File, args: Seq[String]): Unit = { + val jar0 = jar.getAbsolutePath.toString + val javaHome = sys.props.get("java.home").getOrElse { + throw new MessageOnlyException("java.home property not set") + } + val javaCmd = { + val cmd = if (scala.util.Properties.isWin) "java.exe" else "java" + (file(javaHome) / "bin" / cmd).getAbsolutePath + } + val env = Map("COURSIER_CACHE" -> cache.getAbsolutePath.toString) + val cmdLine = Seq(javaCmd, "-jar", jar0) ++ args + // invoke cmdLine with env + val p = new ProcessBuilder(cmdLine: _*).inheritIO() + p.environment().putAll(env.asJava) + val proc = p.start() + proc.waitFor() + if (proc.exitValue() != 0) + throw new MessageOnlyException(s"Error running coursier.jar with args ${args.mkString(" ")}") + } + + private def coursierFetch(coursierJar: File, log: Logger, cacheDir: File, localRepo: File, libs: Seq[String]): Unit = { + val localRepoArg = { + val path = localRepo.getAbsolutePath + if (scala.util.Properties.isWin) { + val path0 = path.replace('\\', '/') + s"file:///$path0" // extra root slash for Windows paths + } + else + s"file://$path" + } + + IO.createDirectory(cacheDir) + for (lib <- libs) { + log.info(s"[republish] Fetching $lib with coursier.jar...") + coursierCmd(coursierJar, cacheDir, + Seq( + "fetch", + "--repository", localRepoArg, + lib + ) + ) + } + } + + /**Resolve the transitive library dependencies of `libs` to `csrCacheDir`. + */ + private def resolveLibraryDeps( + coursierJar: File, + log: Logger, + csrCacheDir: File, + localRepo: File, + resolvedLocal: Seq[ResolvedArtifacts]): Seq[ResolvedArtifacts] = { + + // publish the local artifacts to the local repo, so coursier can resolve them + republishResolvedArtifacts(resolvedLocal, localRepo, logOpt = None) + + coursierFetch(coursierJar, log, csrCacheDir, localRepo, resolvedLocal.map(_.id.toString)) + + val maven2Root = java.nio.file.Files.walk(csrCacheDir.toPath) + .filter(_.getFileName.toString == "maven2") + .findFirst() + .orElseThrow(() => new MessageOnlyException(s"Could not find maven2 directory in $csrCacheDir")) + + def pathToArtifact(p: Path): ResolvedArtifacts = { + // relative path from maven2Root + val lastAsString = p.getFileName.toString + val relP = maven2Root.relativize(p) + val parts = relP.iterator().asScala.map(_.toString).toVector + val (orgParts :+ name :+ rev :+ _) = parts + val id = SimpleModuleId(orgParts.mkString("."), name, rev) + if (lastAsString.endsWith(".jar")) { + ResolvedArtifacts(id, Some(p.toFile), None) + } else { + ResolvedArtifacts(id, None, Some(p.toFile)) + } + } + + java.nio.file.Files.walk(maven2Root) + .filter(p => { + val lastAsString = p.getFileName.toString + lastAsString.endsWith(".pom") || lastAsString.endsWith(".jar") + }) + .map[ResolvedArtifacts](pathToArtifact(_)) + .iterator() + .asScala + .toSeq + } + + private def fetchFilesTask( + libexecT: Def.Initialize[Task[File]], + srcs: SettingKey[Seq[(String, String)]], + strict: Boolean) = Def.task[Set[File]] { + val s = streams.value + val log = s.log + val repoDir = republishRepo.value + val launcherVersions = srcs.value + val libexec = libexecT.value + + val dlCache = s.cacheDirectory / "republish-launchers" + + val store = s.cacheStoreFactory / "versions" + + def work(name: String, dest: File, launcher: String): File = { + val (launcherURL, workFile, prefix, subPart) = { + if (launcher.startsWith("gz+")) { + IO.createDirectory(dlCache) + val launcherURL = url(launcher.stripPrefix("gz+")) + (launcherURL, dlCache / s"$name.gz", "gz", "") + } else if (launcher.startsWith("zip+")) { + IO.createDirectory(dlCache) + val (urlPart, subPath) = launcher.split("!/") match { + case Array(urlPart, subPath) => (urlPart, subPath) + case _ => + throw new MessageOnlyException(s"[republish] Invalid zip+ URL, expected ! to mark subpath: $launcher") + } + val launcherURL = url(urlPart.stripPrefix("zip+")) + (launcherURL, dlCache / s"$name.zip", "zip", subPath) + } else { + IO.createDirectory(libexec) + (url(launcher), dest, "", "") + } + } + IO.delete(workFile) + Using.urlInputStream(launcherURL) { in => + log.info(s"[republish] Downloading $launcherURL to $workFile...") + IO.transfer(in, workFile) + log.info(s"[republish] Downloaded $launcherURL to $workFile...") + } + if (prefix == "gz") { + IO.delete(dest) + Using.fileInputStream(workFile) { in => + Using.gzipInputStream(in) { gzIn => + IO.transfer(gzIn, dest) + } + } + log.info(s"[republish] uncompressed gz file $workFile to $dest...") + IO.delete(workFile) + } else if (prefix == "zip") { + IO.delete(dest) + val files = IO.unzip(workFile, dlCache, new ExactFilter(subPart)) + val extracted = files.headOption.getOrElse(throw new MessageOnlyException(s"[republish] No files extracted from $workFile matching $subPart")) + log.info(s"[republish] unzipped $workFile to $extracted...") + IO.move(extracted, dest) + log.info(s"[republish] moved $extracted to $dest...") + IO.delete(workFile) + } + FileUtil.tryMakeExecutable(dest.toPath) + dest + } + + val allLaunchers = { + if (strict && launcherVersions.isEmpty) + throw new MessageOnlyException(s"[republish] No launchers to fetch, check the build configuration for ${srcs.key.label}.") + + for ((name, launcher) <- launcherVersions) yield { + val dest = libexec / name + + val id = name.replaceAll("[^a-zA-Z0-9]", "_") + + val fetchAction = Tracked.inputChanged[String, File](store.make(id)) { (inChanged, launcher) => + if (inChanged || !Files.exists(dest.toPath)) { + work(name, dest, launcher) + } else { + log.info(s"[republish] Using cached $name launcher ($launcher).") + dest + } + } + + fetchAction(launcher) + } + } + allLaunchers.toSet + } override val projectSettings: Seq[Def.Setting[_]] = Def.settings( + republishCoursierDir := republishRepo.value / "coursier", + republishLaunchers := Seq.empty, + republishCoursier := Seq.empty, + republishBinOverrides := Seq.empty, + republishExtraProps := Seq.empty, republishLocalResolved / republishProjectRefs := { val proj = thisProjectRef.value val deps = buildDependencies.value @@ -55,7 +295,6 @@ object RepublishPlugin extends AutoPlugin { ids.zip(published).map({ case (id, as) => val simpleId = { - val disabled = CrossVersion.disabled val name0 = id.crossVersion match { case cv: CrossVersion.Binary => // projectID does not add binary suffix @@ -76,122 +315,85 @@ object RepublishPlugin extends AutoPlugin { }) assert(jarOrNull != null, s"Could not find jar for ${id}") assert(pomOrNull != null, s"Could not find pom for ${id}") - ResolvedArtifacts(simpleId, jarOrNull, pomOrNull) + ResolvedArtifacts(simpleId, Some(jarOrNull), Some(pomOrNull)) }) } }.value, republishAllResolved := { - val localResolved = republishLocalResolved.value + val resolvedLocal = republishLocalResolved.value + val coursierJar = republishFetchCoursier.value val report = (thisProjectRef / updateFull).value + val s = streams.value + val lm = (republishAllResolved / dependencyResolution).value + val cacheDir = republishRepo.value - val found = mutable.Map.empty[SimpleModuleId, ResolvedArtifacts] - val evicted = mutable.Set.empty[SimpleModuleId] - - localResolved.foreach({ resolved => - val simpleId = resolved.id - if(isRelease) - evicted += simpleId.copy(revision = simpleId.revision + "-bin-nonbootstrapped") - else - evicted += simpleId.copy(revision = simpleId.revision + "-nonbootstrapped") - found(simpleId) = resolved - }) + val log = s.log + val csrCacheDir = s.cacheDirectory / "csr-cache" + val localRepo = s.cacheDirectory / "localRepo" / "maven2" - report.allModuleReports.foreach { mr => - val simpleId = { - val id = mr.module - SimpleModuleId(id.organization, id.name, id.revision) - } + // resolve the transitive dependencies of the local artifacts + val resolvedLibs = resolveLibraryDeps(coursierJar, log, csrCacheDir, localRepo, resolvedLocal) - if (!found.contains(simpleId) && !evicted(simpleId)) { - var jarOrNull: File = null - var pomOrNull: File = null - mr.artifacts.foreach({ case (a, f) => - if (a.`type` == "jar" || a.`type` == "bundle") { - jarOrNull = f - } else if (a.`type` == "pom") { - pomOrNull = f - } - }) - assert(jarOrNull != null, s"Could not find jar for ${simpleId}") - if (pomOrNull == null) { - val jarPath = jarOrNull.toPath - // we found the jar, so assume we can resolve a sibling pom file - val pomPath = jarPath.resolveSibling(jarPath.getFileName.toString.stripSuffix(".jar") + ".pom") - assert(Files.exists(pomPath), s"Could not find pom for ${simpleId}") - pomOrNull = pomPath.toFile - } - found(simpleId) = ResolvedArtifacts(simpleId, jarOrNull, pomOrNull) - } + // the combination of local artifacts and resolved transitive dependencies + val merged = + (resolvedLocal ++ resolvedLibs).groupBy(_.id).values.map(_.reduce { (ra1, ra2) => + val jar = ra1.jar.orElse(ra2.jar) + val pom = ra1.pom.orElse(ra2.pom) + ResolvedArtifacts(ra1.id, jar, pom) + }) - } - found.values.toSeq + merged.toSeq }, republishClasspath := { val s = streams.value val resolved = republishAllResolved.value val cacheDir = republishRepo.value - - val log = s.log - val mavenRepo = cacheDir / "maven2" - IO.createDirectory(mavenRepo) - resolved.map { ra => - log.info(s"[republish] publishing ${ra.id} to $mavenRepo...") - val jar = ra.jar - val pom = ra.pom - - val pathElems = ra.id.org.split('.').toVector :+ ra.id.name :+ ra.id.revision - val artifactDir = pathElems.foldLeft(mavenRepo)(_ / _) - IO.createDirectory(artifactDir) - IO.copyFile(jar, artifactDir / jar.getName) - IO.copyFile(pom, artifactDir / pom.getName) - artifactDir - }.toSet + republishResolvedArtifacts(resolved, cacheDir / "maven2", logOpt = Some(s.log)) }, republishFetchLaunchers := { - val s = streams.value - val log = s.log + fetchFilesTask(republishPrepareBin, republishLaunchers, strict = true).value + }, + republishFetchCoursier := { + fetchFilesTask(republishCoursierDir.toTask, republishCoursier, strict = true).value.head + }, + republishPrepareBin := { + val baseDir = baseDirectory.value + val srcBin = republishBinDir.value + val overrides = republishBinOverrides.value val repoDir = republishRepo.value - val launcherVersions = republishLaunchers.value - - val etc = repoDir / "etc" - val store = s.cacheStoreFactory / "versions" - - def work(dest: File, launcher: URL) = { - IO.delete(dest) - Using.urlInputStream(launcher) { in => - IO.createDirectory(etc) - log.info(s"[republish] Downloading $launcher to $dest...") - IO.transfer(in, dest) - log.info(s"[republish] Downloaded $launcher to $dest...") - } - dest + val targetBin = repoDir / "bin" + IO.copyDirectory(srcBin, targetBin) + overrides.foreach { dir => + IO.copyDirectory(dir, targetBin, overwrite = true) } - - val allLaunchers = { - for ((name, version, launcher) <- launcherVersions) yield { - val dest = etc / name - - val id = name.replaceAll("[^a-zA-Z0-9]", "_") - - val fetchAction = Tracked.inputChanged[String, File](store.make(id)) { (inChanged, version) => - if (inChanged || !Files.exists(dest.toPath)) { - work(dest, launcher) - } else { - log.info(s"[republish] Using cached $launcher at $dest...") - dest - } + targetBin + }, + republishWriteExtraProps := { + val s = streams.value + val log = s.log + val extraProps = republishExtraProps.value + if (extraProps.isEmpty) { + log.info("[republish] No extra properties to write.") + None + } + else { + val repoDir = republishRepo.value + val propsFile = repoDir / "etc" / "EXTRA_PROPERTIES" + log.info(s"[republish] Writing extra properties to $propsFile...") + Using.fileWriter()(propsFile) { writer => + extraProps.foreach { case (k, v) => + writer.write(s"$k:=$v\n") } - - fetchAction(version) } + Some(propsFile) } - allLaunchers.toSet }, republish := { val cacheDir = republishRepo.value val artifacts = republishClasspath.value val launchers = republishFetchLaunchers.value + val extraProps = republishWriteExtraProps.value cacheDir } ) diff --git a/project/scripts/bootstrappedOnlyCmdTests b/project/scripts/bootstrappedOnlyCmdTests index f3d730f8f494..11c35a7028cc 100755 --- a/project/scripts/bootstrappedOnlyCmdTests +++ b/project/scripts/bootstrappedOnlyCmdTests @@ -15,13 +15,13 @@ echo "testing scala.quoted.Expr.run from sbt scala" grep -qe "val a: scala.Int = 3" "$tmp" # setup for `scalac`/`scala` script tests -"$SBT" dist/pack +"$SBT" "$DIST_PROJECT/pack" -echo "capturing scala version from dist/target/pack/VERSION" -IFS=':=' read -ra versionProps < "$ROOT/dist/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps +echo "capturing scala version from $DIST_DIR/target/pack/VERSION" +IFS=':=' read -ra versionProps < "$ROOT/$DIST_DIR/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps [ ${#versionProps[@]} -eq 3 ] && \ [ ${versionProps[0]} = "version" ] && \ - [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $ROOT/dist/target/pack/VERSION" + [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $ROOT/$DIST_DIR/target/pack/VERSION" scala_version=${versionProps[2]} # check that `scalac` compiles and `scala` runs it @@ -77,7 +77,7 @@ echo "testing sbt scalac with suspension" clear_out "$OUT" "$SBT" "scala3-compiler-bootstrapped/scalac -d $OUT tests/pos-macros/macros-in-same-project-1/Bar.scala tests/pos-macros/macros-in-same-project-1/Foo.scala" > "$tmp" -# echo ":quit" | ./dist/target/pack/bin/scala # not supported by CI +# echo ":quit" | ./$DIST_DIR/target/pack/bin/scala # not supported by CI echo "testing ./bin/scaladoc" clear_out "$OUT1" @@ -101,6 +101,13 @@ grep -qe "See 'scala --help' to read about a specific subcommand." "$t ./bin/scala -d hello.jar tests/run/hello.scala ls hello.jar +clear_cli_dotfiles tests/run + +# check that `scala` runs scripts with args +echo "testing ./bin/scala with arguments" +./bin/scala run project/scripts/echoArgs.sc -- abc true 123 > "$tmp" +test "$EXPECTED_OUTPUT_ARGS" = "$(cat "$tmp")" +clear_cli_dotfiles project/scripts echo "testing i12973" clear_out "$OUT" diff --git a/project/scripts/buildScalaBinary b/project/scripts/buildScalaBinary new file mode 100755 index 000000000000..7fc5275e5d8d --- /dev/null +++ b/project/scripts/buildScalaBinary @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/../.." +SBT="$ROOT/project/scripts/sbt" # if run on CI + +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + +# build the scala/scalac/scaladoc binary, where scala is native for the current platform. +"$SBT" "$DIST_PROJECT/pack" diff --git a/project/scripts/cmdTestsCommon.inc.sh b/project/scripts/cmdTestsCommon.inc.sh index a37ab757c057..bccb4aa56ac1 100644 --- a/project/scripts/cmdTestsCommon.inc.sh +++ b/project/scripts/cmdTestsCommon.inc.sh @@ -9,11 +9,15 @@ SOURCE="tests/pos/HelloWorld.scala" MAIN="HelloWorld" TASTY="HelloWorld.tasty" EXPECTED_OUTPUT="hello world" +EXPECTED_OUTPUT_ARGS="[0:abc],[1:true],[2:123]" OUT=$(mktemp -d) OUT1=$(mktemp -d) tmp=$(mktemp) +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + die () { echo >&2 "$@" exit 1 @@ -24,3 +28,16 @@ clear_out() local out="$1" rm -rf "$out"/* } + +clear_cli_dotfiles() +{ + local out="$1" + rm -rf "$out"/.bsp + rm -rf "$out"/.scala-build + + rm -f "$ROOT"/.bsp/scala.json + if [ -z "$(ls -A "$ROOT"/.bsp)" ]; then + rm -rf "$ROOT"/.bsp + fi + rm -rf "$ROOT"/.scala-build +} diff --git a/project/scripts/echoArgs.sc b/project/scripts/echoArgs.sc new file mode 100644 index 000000000000..cb9acbb6ad2e --- /dev/null +++ b/project/scripts/echoArgs.sc @@ -0,0 +1,6 @@ +// This is a Scala CLI script + +val formatted = + (for (arg, i) <- args.zipWithIndex yield + s"[$i:$arg]").mkString(",") +println(formatted) diff --git a/project/scripts/native-integration/bashTests b/project/scripts/native-integration/bashTests new file mode 100755 index 000000000000..5fb77355238c --- /dev/null +++ b/project/scripts/native-integration/bashTests @@ -0,0 +1,84 @@ +#!/usr/bin/env bash + +set -eux + +#/*---------------*\ +# * SETUP VARS *# +# *---------------*/ + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/../../.." + +SBT="$ROOT/project/scripts/sbt" # if run on CI +# SBT="sbt" # if run locally + +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + +die () { + echo >&2 "$@" + exit 1 +} + +PROG_HOME="$DIST_DIR/target/pack" + +SOURCE="$ROOT/tests/pos/HelloWorld.scala" +SOURCE_VERSION="$ROOT/project/scripts/native-integration/reportScalaVersion.scala" + +clear_cli_dotfiles() +{ + local out="$1" + rm -rf "$out"/.bsp + rm -rf "$out"/.scala-build + + rm -f "$ROOT"/.bsp/scala.json + if [ -z "$(ls -A "$ROOT"/.bsp)" ]; then + rm -rf "$ROOT"/.bsp + fi + rm -rf "$ROOT"/.scala-build +} + +#/*---------------*\ +# * INITIALIZE *# +# *---------------*/ + +# build the distribution +"$SBT" "$DIST_PROJECT/pack" + +SCALA_VERSION="" +# iterate through lines in VERSION_SRC +while IFS= read -r line; do + # if line starts with "version:=" then extract the version + if [[ "$line" == version:=* ]]; then + SCALA_VERSION="${line#version:=}" + break + fi +done < "$PROG_HOME/VERSION" + +if [ -z "$SCALA_VERSION" ]; then + die "Could not find scala version in $PROG_HOME/VERSION" +fi + +#/*-------------------*\ +# * TESTING BEGINS *# +# *-------------------*/ + +echo "assert native launcher matches expected version" +if [ -z "$LAUNCHER_EXPECTED_PROJECT" ]; then + die "LAUNCHER_EXPECTED_PROJECT is not set in the environment" +fi +test "$LAUNCHER_EXPECTED_PROJECT" = "$DIST_PROJECT" + +echo "testing version output (default)" +std_output=$("$PROG_HOME/bin/scala" version --scala-version) +test "$SCALA_VERSION" = "$std_output" + +echo "testing run command" +std_output=$("$PROG_HOME/bin/scala" run "$SOURCE" --power --offline --server=false) +test "hello world" = "$std_output" +clear_cli_dotfiles "$ROOT/tests/pos" + +echo "testing run command (-with-compiler)" +std_output=$("$PROG_HOME/bin/scala" run "$SOURCE_VERSION" -with-compiler --power --offline --server=false) +test "$SCALA_VERSION" = "$std_output" +clear_cli_dotfiles "$ROOT/project/scripts/native-integration" + diff --git a/project/scripts/native-integration/reportScalaVersion.scala b/project/scripts/native-integration/reportScalaVersion.scala new file mode 100644 index 000000000000..dc6e93708a48 --- /dev/null +++ b/project/scripts/native-integration/reportScalaVersion.scala @@ -0,0 +1,4 @@ +// To be ran by Scala CLI (requires -with-compiler command line option) + +@main def reportScalaVersion: Unit = + println(dotty.tools.dotc.config.Properties.versionNumberString) diff --git a/project/scripts/native-integration/winTests.bat b/project/scripts/native-integration/winTests.bat new file mode 100755 index 000000000000..a85b2c8c2531 --- /dev/null +++ b/project/scripts/native-integration/winTests.bat @@ -0,0 +1,19 @@ +@echo off +setlocal + +@rem paths are relative to the root project directory +set "_PREFIX=dist\win-x86_64\target\pack" +set "_SOURCE=tests\pos\HelloWorld.scala" +set "_OUT_DIR=out" + +@rem if-tests mimic the non-existing bash instruction 'set -e'. +call "%_PREFIX%\bin\scalac.bat" "@project\scripts\options" "%_SOURCE%" +if not %ERRORLEVEL%==0 endlocal& exit /b 1 + +call "%_PREFIX%\bin\scalac.bat" -d "%_OUT_DIR%" "%_SOURCE%" +if not %ERRORLEVEL%==0 endlocal& exit /b 1 + +call "%_PREFIX%\bin\scala.bat" --power -classpath "%_OUT_DIR%" -M HelloWorld --offline --server=false +if not %ERRORLEVEL%==0 endlocal& exit /b 1 + +endlocal diff --git a/project/scripts/winCmdTests b/project/scripts/winCmdTests index 2dffff5b196a..fe6a43c7f68f 100644 --- a/project/scripts/winCmdTests +++ b/project/scripts/winCmdTests @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -e -PREFIX="dist/target/pack" +PREFIX="dist/win-x86_64/target/pack" SOURCE="tests/pos/HelloWorld.scala" $PREFIX/bin/scalac @project/scripts/options "$SOURCE" $PREFIX/bin/scalac -d out "$SOURCE" diff --git a/project/scripts/winCmdTests.bat b/project/scripts/winCmdTests.bat index d9b594d560ab..903f74d7ab98 100644 --- a/project/scripts/winCmdTests.bat +++ b/project/scripts/winCmdTests.bat @@ -2,7 +2,7 @@ setlocal @rem paths are relative to the root project directory -set "_PREFIX=dist\target\pack" +set "_PREFIX=dist\win-x86_64\target\pack" set "_SOURCE=tests\pos\HelloWorld.scala" set "_OUT_DIR=out" set "_SITE_DIR=_site" diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala b/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala new file mode 100644 index 000000000000..17a7488ccb1a --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala @@ -0,0 +1,6 @@ +package a + +object Foo: // note that `Foo` is defined in `zz.scala` + class Local + inline def foo(using Local): Nothing = + ??? From 3d18e9841da5fde3417b83014ae0c28e6b0478f2 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Fri, 21 Jun 2024 08:14:09 +0200 Subject: [PATCH 405/465] Revert "Disable windows tests for RC1" This reverts commit 95e53df0b360849efc49f724125094869eaf98b3. --- .github/workflows/ci.yaml | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index cad7caec490d..974866930c68 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -205,7 +205,16 @@ jobs: test_windows_fast: runs-on: [self-hosted, Windows] - if: false + if: "( + github.event_name == 'push' + && github.ref != 'refs/heads/main' + ) + || github.event_name == 'merge_group' + || ( + github.event_name == 'pull_request' + && !contains(github.event.pull_request.body, '[skip ci]') + && !contains(github.event.pull_request.body, '[skip test_windows_fast]') + )" steps: - name: Reset existing repo @@ -243,7 +252,13 @@ jobs: test_windows_full: runs-on: [self-hosted, Windows] - if: false + if: "github.event_name == 'schedule' && github.repository == 'scala/scala3' + || github.event_name == 'push' + || ( + github.event_name == 'pull_request' + && !contains(github.event.pull_request.body, '[skip ci]') + && contains(github.event.pull_request.body, '[test_windows_full]') + )" steps: - name: Reset existing repo From df91f071631c4521994a71f9f72a42714c6d3273 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Fri, 21 Jun 2024 12:07:30 +0200 Subject: [PATCH 406/465] Fix incorrect paths to sha256 check sum files in release workflow --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 974866930c68..2747830fb7d6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -951,8 +951,8 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/sha256sum-aarch64-pc-linux.txt - asset_name: sha256sum.txt + asset_path: ./dist/linux-aarch64/target/sha256sum.txt + asset_name: sha256sum-aarch64-pc-linux.txt asset_content_type: text/plain - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac x86-64) From 1520e88314bccf9bb42efd47ad3616c6e758548d Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Fri, 21 Jun 2024 09:58:22 +0200 Subject: [PATCH 407/465] Add changelog for 3.5.0-RC2 --- changelogs/3.5.0-RC2.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 changelogs/3.5.0-RC2.md diff --git a/changelogs/3.5.0-RC2.md b/changelogs/3.5.0-RC2.md new file mode 100644 index 000000000000..f3bb8b52c73c --- /dev/null +++ b/changelogs/3.5.0-RC2.md @@ -0,0 +1,25 @@ +# Backported fixes + +- Bundle scala-cli in scala command [#20351](https://github.com/scala/scala3/pull/20351) +- Avoid stacked thisCall contexts [#20488](https://github.com/scala/scala3/pull/20488) +- Adapt the workflow to release on SDKMAN! [#20535](https://github.com/scala/scala3/pull/20535) +- Adapt the release workflow to SIP-46 [#20565](https://github.com/scala/scala3/pull/20565) +- Disable ClasspathTests.unglobClasspathVerifyTest [#20551](https://github.com/scala/scala3/pull/20551) +- Set default source version to 3.5 [#20441](https://github.com/scala/scala3/pull/20441) +- Bring back ambiguity filter when we report an implicit not found error [#20368](https://github.com/scala/scala3/pull/20368) +- Treat 3.5-migration the same as 3.5 for a warning about implicit priority change [#20436](https://github.com/scala/scala3/pull/20436) +- Avoid forcing whole package when using -experimental [#20409](https://github.com/scala/scala3/pull/20409) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC1..3.5.0-RC2` these are: + +``` + 4 Hamza Remmal + 4 Wojciech Mazur + 3 Martin Odersky + 1 Jamie Thompson + 1 Guillaume Martres +``` From 828c03e236bfca6c3bd260eea5fabe6c9dddad5f Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Fri, 21 Jun 2024 09:59:05 +0200 Subject: [PATCH 408/465] Release 3.5.0-RC2 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 99871c4c87e8..6ff07701c06b 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,7 @@ object Build { val referenceVersion = "3.4.2" - val baseVersion = "3.5.0-RC1" + val baseVersion = "3.5.0-RC2" // LTS or Next val versionLine = "Next" From ecf5a2e365d367804fb9b840ea042cb0128020cf Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 19 Jun 2024 15:21:30 +0100 Subject: [PATCH 409/465] Release .zip instead of .tar.gz for windows in sdkman --- .github/workflows/publish-sdkman.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 2126a3237d83..02e00bcbf03d 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -44,7 +44,7 @@ jobs: - platform: MAC_ARM64 archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.tar.gz' - platform: WINDOWS_64 - archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.tar.gz' + archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' - platform: UNIVERSAL archive : 'scala3-${{ inputs.version }}.zip' steps: From 3ecd98200055c545f0adb68b92f7afd3bdc9f810 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 19 Jun 2024 16:49:31 +0100 Subject: [PATCH 410/465] Do not release to the UNIVERSAL platform in sdkman --- .github/workflows/publish-sdkman.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 02e00bcbf03d..d4238b9371e4 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -45,8 +45,6 @@ jobs: archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.tar.gz' - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' - - platform: UNIVERSAL - archive : 'scala3-${{ inputs.version }}.zip' steps: - uses: hamzaremmal/sdkman-release-action@7e437233a6bd79bc4cb0fa9071b685e94bdfdba6 with: From 0a7b7fe63efd4837b9aad66a0df77b6555cc15b2 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 21 Jun 2024 11:12:34 +0100 Subject: [PATCH 411/465] Upload zip files to sdkman instead of .tar.gz --- .github/workflows/publish-sdkman.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index d4238b9371e4..de12f81426b5 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -36,13 +36,13 @@ jobs: matrix: include: - platform: LINUX_64 - archive : 'scala3-${{ inputs.version }}-x86_64-pc-linux.tar.gz' + archive : 'scala3-${{ inputs.version }}-x86_64-pc-linux.zip' - platform: LINUX_ARM64 - archive : 'scala3-${{ inputs.version }}-aarch64-pc-linux.tar.gz' + archive : 'scala3-${{ inputs.version }}-aarch64-pc-linux.zip' - platform: MAC_OSX - archive : 'scala3-${{ inputs.version }}-x86_64-apple-darwin.tar.gz' + archive : 'scala3-${{ inputs.version }}-x86_64-apple-darwin.zip' - platform: MAC_ARM64 - archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.tar.gz' + archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.zip' - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: From a9af5ccbfd4441c29e0c2b2b5e818bf2b53d875e Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 19 Jun 2024 12:21:16 +0200 Subject: [PATCH 412/465] replace pack command, do not produce lib directory, write classpath to file --- dist/bin/common | 63 ++--------- dist/bin/common.bat | 16 +-- dist/bin/scalac | 0 dist/bin/scalac.bat | 43 ++++---- dist/bin/scaladoc | 57 +--------- dist/bin/scaladoc.bat | 66 +++--------- project/Build.scala | 7 +- project/RepublishPlugin.scala | 193 ++++++++++++++++++++++++++++------ 8 files changed, 217 insertions(+), 228 deletions(-) mode change 100755 => 100644 dist/bin/common mode change 100644 => 100755 dist/bin/scalac diff --git a/dist/bin/common b/dist/bin/common old mode 100755 new mode 100644 index 4a0152fbc4cb..1ff0ca66274c --- a/dist/bin/common +++ b/dist/bin/common @@ -6,62 +6,21 @@ source "$PROG_HOME/bin/common-shared" # * The code below is for Dotty # *-------------------------------------------------*/ -find_lib () { - for lib in "$PROG_HOME"/lib/$1 ; do - if [[ -f "$lib" ]]; then - if [ -n "$CYGPATHCMD" ]; then - "$CYGPATHCMD" -am "$lib" - elif [[ $mingw || $msys ]]; then - echo "$lib" | sed 's|/|\\\\|g' - else - echo "$lib" - fi - return +load_classpath () { + command="$1" + psep_pattern="$2" + __CLASS_PATH="" + while IFS= read -r line; do + if ! [[ ( -n ${conemu-} || -n ${msys-}) && "$line" == "*jna-5*" ]]; then + # jna-5 only appropriate for some combinations + __CLASS_PATH+="$PROG_HOME/maven2/$line$psep_pattern" fi - done + done < "$PROG_HOME/etc/$command.classpath" + echo "$__CLASS_PATH" } -DOTTY_COMP=$(find_lib "*scala3-compiler*") -DOTTY_INTF=$(find_lib "*scala3-interfaces*") -DOTTY_LIB=$(find_lib "*scala3-library*") -DOTTY_STAGING=$(find_lib "*scala3-staging*") -DOTTY_TASTY_INSPECTOR=$(find_lib "*scala3-tasty-inspector*") -TASTY_CORE=$(find_lib "*tasty-core*") -SCALA_ASM=$(find_lib "*scala-asm*") -SCALA_LIB=$(find_lib "*scala-library*") -SBT_INTF=$(find_lib "*compiler-interface*") -JLINE_READER=$(find_lib "*jline-reader-3*") -JLINE_TERMINAL=$(find_lib "*jline-terminal-3*") -JLINE_TERMINAL_JNA=$(find_lib "*jline-terminal-jna-3*") - -# jna-5 only appropriate for some combinations -[[ ${conemu-} && ${msys-} ]] || JNA=$(find_lib "*jna-5*") - compilerJavaClasspathArgs () { - # echo "dotty-compiler: $DOTTY_COMP" - # echo "dotty-interface: $DOTTY_INTF" - # echo "dotty-library: $DOTTY_LIB" - # echo "tasty-core: $TASTY_CORE" - # echo "scala-asm: $SCALA_ASM" - # echo "scala-lib: $SCALA_LIB" - # echo "sbt-intface: $SBT_INTF" - - toolchain="" - toolchain+="$SCALA_LIB$PSEP" - toolchain+="$DOTTY_LIB$PSEP" - toolchain+="$SCALA_ASM$PSEP" - toolchain+="$SBT_INTF$PSEP" - toolchain+="$DOTTY_INTF$PSEP" - toolchain+="$DOTTY_COMP$PSEP" - toolchain+="$TASTY_CORE$PSEP" - toolchain+="$DOTTY_STAGING$PSEP" - toolchain+="$DOTTY_TASTY_INSPECTOR$PSEP" - - # jine - toolchain+="$JLINE_READER$PSEP" - toolchain+="$JLINE_TERMINAL$PSEP" - toolchain+="$JLINE_TERMINAL_JNA$PSEP" - [ -n "${JNA-}" ] && toolchain+="$JNA$PSEP" + toolchain="$(load_classpath "scala" "$PSEP")" if [ -n "${jvm_cp_args-}" ]; then jvm_cp_args="$toolchain$jvm_cp_args" diff --git a/dist/bin/common.bat b/dist/bin/common.bat index 7aef606d5509..f9c35e432b36 100644 --- a/dist/bin/common.bat +++ b/dist/bin/common.bat @@ -38,20 +38,6 @@ if not defined _PROG_HOME ( set _EXITCODE=1 goto :eof ) -set "_LIB_DIR=%_PROG_HOME%\lib" +set "_ETC_DIR=%_PROG_HOME%\etc" set _PSEP=; - -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-compiler*"') do set "_SCALA3_COMP=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-interfaces*"') do set "_SCALA3_INTF=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-library*"') do set "_SCALA3_LIB=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-staging*"') do set "_SCALA3_STAGING=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-tasty-inspector*"') do set "_SCALA3_TASTY_INSPECTOR=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*tasty-core*"') do set "_TASTY_CORE=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala-asm*"') do set "_SCALA_ASM=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala-library*"') do set "_SCALA_LIB=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*compiler-interface*"') do set "_SBT_INTF=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-reader-3*"') do set "_JLINE_READER=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-terminal-3*"') do set "_JLINE_TERMINAL=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-terminal-jna-3*"') do set "_JLINE_TERMINAL_JNA=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jna-5*"') do set "_JNA=%_LIB_DIR%\%%f" diff --git a/dist/bin/scalac b/dist/bin/scalac old mode 100644 new mode 100755 diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index c8cd0babe60b..fe6d7e3fad4d 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -88,29 +88,10 @@ goto :eof @rem output parameter: _JVM_CP_ARGS :compilerJavaClasspathArgs -@rem echo scala3-compiler: %_SCALA3_COMP% -@rem echo scala3-interface: %_SCALA3_INTF% -@rem echo scala3-library: %_SCALA3_LIB% -@rem echo tasty-core: %_TASTY_CORE% -@rem echo scala-asm: %_SCALA_ASM% -@rem echo scala-lib: %_SCALA_LIB% -@rem echo sbt-intface: %_SBT_INTF% - -set "__TOOLCHAIN=%_SCALA_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_ASM%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SBT_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_COMP%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_TASTY_CORE%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_STAGING%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_TASTY_INSPECTOR%%_PSEP%" - -@rem # jline -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_READER%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL_JNA%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JNA%%_PSEP%" + +call :loadClasspathFromFile + +set "__TOOLCHAIN=%_CLASS_PATH%" if defined _SCALA_CPATH ( set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" @@ -119,6 +100,22 @@ if defined _SCALA_CPATH ( ) goto :eof +@REM concatentate every line in "%_ETC_DIR%\scala.classpath" with _PSEP +:loadClasspathFromFile +set _CLASS_PATH= +if exist "%_ETC_DIR%\scala.classpath" ( + for /f "usebackq delims=" %%i in ("%_ETC_DIR%\scala.classpath") do ( + set "_LIB=%_PROG_HOME%\maven2\%%i" + set "_LIB=!_LIB:/=\!" + if not defined _CLASS_PATH ( + set "_CLASS_PATH=!_LIB!" + ) else ( + set "_CLASS_PATH=!_CLASS_PATH!%_PSEP%!_LIB!" + ) + ) +) +goto :eof + @rem ######################################################################### @rem ## Cleanups diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index 8b9ec41a7f8c..15bc0813f93a 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -53,62 +53,7 @@ addScrip() { } classpathArgs () { - CLASS_PATH="" - CLASS_PATH+="$(find_lib "*scaladoc*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-compiler*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-interfaces*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-library*")$PSEP" - CLASS_PATH+="$(find_lib "*tasty-core*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-tasty-inspector*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-0*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-anchorlink*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-autolink*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-emoji*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-strikethrough*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-tasklist*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-wikilink*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-yaml-front-matter*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-tables*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-ins*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-superscript*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-ast*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-data*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-dependency*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-misc*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-format*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-sequence*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-builder*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-collection*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-visitor*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-options*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-html*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ast*")$PSEP" - CLASS_PATH+="$(find_lib "*liqp*")$PSEP" - CLASS_PATH+="$(find_lib "*jsoup*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-dataformat-yaml*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-datatype-jsr310*")$PSEP" - CLASS_PATH+="$(find_lib "*strftime4j*")$PSEP" - CLASS_PATH+="$(find_lib "*scala-asm*")$PSEP" - CLASS_PATH+="$(find_lib "*compiler-interface*")$PSEP" - CLASS_PATH+="$(find_lib "*jline-reader*")$PSEP" - CLASS_PATH+="$(find_lib "*jline-terminal-3*")$PSEP" - CLASS_PATH+="$(find_lib "*jline-terminal-jna*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" - CLASS_PATH+="$(find_lib "*autolink-0.6*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-jira-converter*")$PSEP" - CLASS_PATH+="$(find_lib "*antlr4*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-annotations*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-core*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-databind*")$PSEP" - CLASS_PATH+="$(find_lib "*snakeyaml*")$PSEP" - CLASS_PATH+="$(find_lib "*scala-library*")$PSEP" - CLASS_PATH+="$(find_lib "*protobuf-java*")$PSEP" - CLASS_PATH+="$(find_lib "*util-interface*")$PSEP" - CLASS_PATH+="$(find_lib "*jna-5*")$PSEP" - CLASS_PATH+="$(find_lib "*antlr4-runtime*")$PSEP" + CLASS_PATH="$(load_classpath "scaladoc" "$PSEP")" jvm_cp_args="-classpath \"$CLASS_PATH\"" } diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index c30a4689244c..16433a83f501 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -105,60 +105,24 @@ goto :eof @rem output parameter: _CLASS_PATH :classpathArgs -set "_LIB_DIR=%_PROG_HOME%\lib" -set _CLASS_PATH= +set "_ETC_DIR=%_PROG_HOME%\etc" @rem keep list in sync with bash script `bin\scaladoc` ! -call :updateClasspath "scaladoc" -call :updateClasspath "scala3-compiler" -call :updateClasspath "scala3-interfaces" -call :updateClasspath "scala3-library" -call :updateClasspath "tasty-core" -call :updateClasspath "scala3-tasty-inspector" -call :updateClasspath "flexmark-0" -call :updateClasspath "flexmark-html-parser" -call :updateClasspath "flexmark-ext-anchorlink" -call :updateClasspath "flexmark-ext-autolink" -call :updateClasspath "flexmark-ext-emoji" -call :updateClasspath "flexmark-ext-gfm-strikethrough" -call :updateClasspath "flexmark-ext-gfm-tables" -call :updateClasspath "flexmark-ext-gfm-tasklist" -call :updateClasspath "flexmark-ext-wikilink" -call :updateClasspath "flexmark-ext-yaml-front-matter" -call :updateClasspath "liqp" -call :updateClasspath "jsoup" -call :updateClasspath "jackson-dataformat-yaml" -call :updateClasspath "jackson-datatype-jsr310" -call :updateClasspath "strftime4j" -call :updateClasspath "scala-asm" -call :updateClasspath "compiler-interface" -call :updateClasspath "jline-reader" -call :updateClasspath "jline-terminal-3" -call :updateClasspath "jline-terminal-jna" -call :updateClasspath "flexmark-util" -call :updateClasspath "flexmark-formatter" -call :updateClasspath "autolink-0.6" -call :updateClasspath "flexmark-jira-converter" -call :updateClasspath "antlr4" -call :updateClasspath "jackson-annotations" -call :updateClasspath "jackson-core" -call :updateClasspath "jackson-databind" -call :updateClasspath "snakeyaml" -call :updateClasspath "scala-library" -call :updateClasspath "protobuf-java" -call :updateClasspath "util-interface" -call :updateClasspath "jna-5" -call :updateClasspath "flexmark-ext-tables" -call :updateClasspath "flexmark-ext-ins" -call :updateClasspath "flexmark-ext-superscript" -call :updateClasspath "antlr4-runtime" +call :loadClasspathFromFile goto :eof -@rem input parameter: %1=pattern for library file -@rem output parameter: _CLASS_PATH -:updateClasspath -set "__PATTERN=%~1" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*%__PATTERN%*" 2^>NUL') do ( - set "_CLASS_PATH=!_CLASS_PATH!%_LIB_DIR%\%%f%_PSEP%" +@REM concatentate every line in "%_ETC_DIR%\scaladoc.classpath" with _PSEP +:loadClasspathFromFile +set _CLASS_PATH= +if exist "%_ETC_DIR%\scaladoc.classpath" ( + for /f "usebackq delims=" %%i in ("%_ETC_DIR%\scaladoc.classpath") do ( + set "_LIB=%_PROG_HOME%\maven2\%%i" + set "_LIB=!_LIB:/=\!" + if not defined _CLASS_PATH ( + set "_CLASS_PATH=!_LIB!" + ) else ( + set "_CLASS_PATH=!_CLASS_PATH!%_PSEP%!_LIB!" + ) + ) ) goto :eof diff --git a/project/Build.scala b/project/Build.scala index 6ff07701c06b..3ce365fac9f1 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2127,7 +2127,12 @@ object Build { republishRepo := target.value / "republish", packResourceDir += (republishRepo.value / "bin" -> "bin"), packResourceDir += (republishRepo.value / "maven2" -> "maven2"), - Compile / pack := (Compile / pack).dependsOn(republish).value, + packResourceDir += (republishRepo.value / "etc" -> "etc"), + republishCommandLibs += + ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-staging", "scala3-tasty-inspector")), + republishCommandLibs += + ("scaladoc" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-tasty-inspector", "scaladoc")), + Compile / pack := republishPack.value, ) lazy val dist = project.asDist(Bootstrapped) diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 537c82d62cce..6ce83c2f0abf 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -2,6 +2,7 @@ package dotty.tools.sbtplugin import sbt._ import xerial.sbt.pack.PackPlugin +import xerial.sbt.pack.PackPlugin.autoImport.{packResourceDir, packDir} import sbt.Keys._ import sbt.AutoPlugin import sbt.PublishBinPlugin @@ -66,7 +67,9 @@ object RepublishPlugin extends AutoPlugin { val republishBinDir = settingKey[File]("where to find static files for the bin dir.") val republishCoursierDir = settingKey[File]("where to download the coursier launcher jar.") val republishBinOverrides = settingKey[Seq[File]]("files to override those in bin-dir.") + val republishCommandLibs = settingKey[Seq[(String, List[String])]]("libraries needed for each command.") val republish = taskKey[File]("cache the dependencies and download launchers for the distribution") + val republishPack = taskKey[File]("do the pack command") val republishRepo = settingKey[File]("the location to store the republished artifacts.") val republishLaunchers = settingKey[Seq[(String, String)]]("launchers to download. Sequence of (name, URL).") val republishCoursier = settingKey[Seq[(String, String)]]("coursier launcher to download. Sequence of (name, URL).") @@ -99,7 +102,7 @@ object RepublishPlugin extends AutoPlugin { }.toSet } - private def coursierCmd(jar: File, cache: File, args: Seq[String]): Unit = { + private def coursierCmd(jar: File, cache: File): Seq[String] => List[String] = { val jar0 = jar.getAbsolutePath.toString val javaHome = sys.props.get("java.home").getOrElse { throw new MessageOnlyException("java.home property not set") @@ -108,38 +111,88 @@ object RepublishPlugin extends AutoPlugin { val cmd = if (scala.util.Properties.isWin) "java.exe" else "java" (file(javaHome) / "bin" / cmd).getAbsolutePath } - val env = Map("COURSIER_CACHE" -> cache.getAbsolutePath.toString) - val cmdLine = Seq(javaCmd, "-jar", jar0) ++ args - // invoke cmdLine with env - val p = new ProcessBuilder(cmdLine: _*).inheritIO() - p.environment().putAll(env.asJava) - val proc = p.start() - proc.waitFor() - if (proc.exitValue() != 0) - throw new MessageOnlyException(s"Error running coursier.jar with args ${args.mkString(" ")}") + val env = Map("COURSIER_CACHE" -> cache.getAbsolutePath.toString).asJava + val cmdLine0 = Seq(javaCmd, "-jar", jar0) + args => + val cmdLine = cmdLine0 ++ args + // invoke cmdLine with env, but also capture the output + val p = new ProcessBuilder(cmdLine: _*) + .directory(cache) + .inheritIO() + .redirectOutput(ProcessBuilder.Redirect.PIPE) + p.environment().putAll(env) + + val proc = p.start() + val in = proc.getInputStream + val output = { + try { + val src = scala.io.Source.fromInputStream(in) + try src.getLines().toList + finally src.close() + } finally { + in.close() + } + } + + proc.waitFor() + + if (proc.exitValue() != 0) + throw new MessageOnlyException(s"Error running coursier.jar with args ${args.mkString(" ")}") + + output + } + + private def resolveMaven2(repo: File): Path = { + java.nio.file.Files.walk(repo.toPath) + .filter(_.getFileName.toString == "maven2") + .findFirst() + .orElseThrow(() => new MessageOnlyException(s"Could not find maven2 directory in $repo")) + .toAbsolutePath() } - private def coursierFetch(coursierJar: File, log: Logger, cacheDir: File, localRepo: File, libs: Seq[String]): Unit = { + private def coursierFetch( + coursierJar: File, log: Logger, cacheDir: File, localRepo: File, libs: Seq[String]): Map[String, List[String]] = { + val localRepoPath = localRepo.getAbsolutePath val localRepoArg = { - val path = localRepo.getAbsolutePath - if (scala.util.Properties.isWin) { - val path0 = path.replace('\\', '/') - s"file:///$path0" // extra root slash for Windows paths + val uriPart = { + if (scala.util.Properties.isWin) { + s"/${localRepoPath.replace('\\', '/')}" // extra root slash for Windows paths + } + else { + localRepoPath // no change needed for Unix paths + } } - else - s"file://$path" + s"file://$uriPart" } - IO.createDirectory(cacheDir) - for (lib <- libs) { + val cacheDirPath = cacheDir.getAbsolutePath + lazy val maven2RootLocal = resolveMaven2(localRepo) + lazy val maven2RootCache = resolveMaven2(cacheDir) // lazy because cache dir isn't populated until after fetch + val cmd = coursierCmd(coursierJar, cacheDir) + val resolved = for (lib <- libs) yield { log.info(s"[republish] Fetching $lib with coursier.jar...") - coursierCmd(coursierJar, cacheDir, + val out = cmd( Seq( "fetch", + "--no-default", + "--repository", "central", "--repository", localRepoArg, lib ) ) + lib -> out.collect { + case s if s.startsWith(localRepoPath) => + maven2RootLocal.relativize(java.nio.file.Paths.get(s)).toString().replace('\\', '/') // format as uri + case s if s.startsWith(cacheDirPath) => + maven2RootCache.relativize(java.nio.file.Paths.get(s)).toString().replace('\\', '/') // format as uri + } + } + resolved.toMap + } + + private def fuzzyFind[V](map: Map[String, V], key: String): V = { + map.collectFirst({ case (k, v) if k.contains(key) => v }).getOrElse { + throw new MessageOnlyException(s"Could not find key $key in map $map") } } @@ -148,28 +201,34 @@ object RepublishPlugin extends AutoPlugin { private def resolveLibraryDeps( coursierJar: File, log: Logger, + republishDir: File, csrCacheDir: File, localRepo: File, - resolvedLocal: Seq[ResolvedArtifacts]): Seq[ResolvedArtifacts] = { + resolvedLocal: Seq[ResolvedArtifacts], + commandLibs: Seq[(String, List[String])]): Seq[ResolvedArtifacts] = { // publish the local artifacts to the local repo, so coursier can resolve them republishResolvedArtifacts(resolvedLocal, localRepo, logOpt = None) - coursierFetch(coursierJar, log, csrCacheDir, localRepo, resolvedLocal.map(_.id.toString)) + val classpaths = coursierFetch(coursierJar, log, csrCacheDir, localRepo, resolvedLocal.map(_.id.toString)) - val maven2Root = java.nio.file.Files.walk(csrCacheDir.toPath) - .filter(_.getFileName.toString == "maven2") - .findFirst() - .orElseThrow(() => new MessageOnlyException(s"Could not find maven2 directory in $csrCacheDir")) + if (commandLibs.nonEmpty) { + IO.createDirectory(republishDir / "etc") + for ((command, libs) <- commandLibs) { + val entries = libs.map(fuzzyFind(classpaths, _)).reduce(_ ++ _).distinct + IO.write(republishDir / "etc" / s"$command.classpath", entries.mkString("\n")) + } + } + + val maven2Root = resolveMaven2(csrCacheDir) def pathToArtifact(p: Path): ResolvedArtifacts = { // relative path from maven2Root - val lastAsString = p.getFileName.toString val relP = maven2Root.relativize(p) val parts = relP.iterator().asScala.map(_.toString).toVector - val (orgParts :+ name :+ rev :+ _) = parts + val (orgParts :+ name :+ rev :+ artifact) = parts val id = SimpleModuleId(orgParts.mkString("."), name, rev) - if (lastAsString.endsWith(".jar")) { + if (artifact.endsWith(".jar")) { ResolvedArtifacts(id, Some(p.toFile), None) } else { ResolvedArtifacts(id, None, Some(p.toFile)) @@ -279,6 +338,7 @@ object RepublishPlugin extends AutoPlugin { republishCoursier := Seq.empty, republishBinOverrides := Seq.empty, republishExtraProps := Seq.empty, + republishCommandLibs := Seq.empty, republishLocalResolved / republishProjectRefs := { val proj = thisProjectRef.value val deps = buildDependencies.value @@ -326,13 +386,15 @@ object RepublishPlugin extends AutoPlugin { val s = streams.value val lm = (republishAllResolved / dependencyResolution).value val cacheDir = republishRepo.value + val commandLibs = republishCommandLibs.value val log = s.log val csrCacheDir = s.cacheDirectory / "csr-cache" val localRepo = s.cacheDirectory / "localRepo" / "maven2" // resolve the transitive dependencies of the local artifacts - val resolvedLibs = resolveLibraryDeps(coursierJar, log, csrCacheDir, localRepo, resolvedLocal) + val resolvedLibs = resolveLibraryDeps( + coursierJar, log, cacheDir, csrCacheDir, localRepo, resolvedLocal, commandLibs) // the combination of local artifacts and resolved transitive dependencies val merged = @@ -395,6 +457,77 @@ object RepublishPlugin extends AutoPlugin { val launchers = republishFetchLaunchers.value val extraProps = republishWriteExtraProps.value cacheDir + }, + republishPack := { + val cacheDir = republish.value + val s = streams.value + val log = s.log + val distDir = target.value / packDir.value + val progVersion = version.value + + IO.createDirectory(distDir) + for ((path, dir) <- packResourceDir.value) { + val target = distDir / dir + IO.copyDirectory(path, target) + } + + locally { + // everything in this block is copied from sbt-pack plugin + import scala.util.Try + import java.time.format.DateTimeFormatterBuilder + import java.time.format.SignStyle + import java.time.temporal.ChronoField.* + import java.time.ZoneId + import java.time.Instant + import java.time.ZonedDateTime + import java.time.ZonedDateTime + import java.util.Locale + import java.util.Date + val base: File = new File(".") // Using the working directory as base for readability + + def write(path: String, content: String) { + val p = distDir / path + IO.write(p, content) + } + + val humanReadableTimestampFormatter = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 2) + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2) + .appendLiteral(' ') + .appendValue(HOUR_OF_DAY, 2) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .appendOffset("+HHMM", "Z") + .toFormatter(Locale.US) + + // Retrieve build time + val systemZone = ZoneId.systemDefault().normalized() + val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(new Date().getTime), systemZone) + val buildTime = humanReadableTimestampFormatter.format(timestamp) + + // Check the current Git revision + val gitRevision: String = Try { + if ((base / ".git").exists()) { + log.info("[republish] Checking the git revision of the current project") + sys.process.Process("git rev-parse HEAD").!! + } else { + "unknown" + } + }.getOrElse("unknown").trim + + + // Output the version number and Git revision + write("VERSION", s"version:=${progVersion}\nrevision:=${gitRevision}\nbuildTime:=${buildTime}\n") + } + + + distDir } ) } From f7e72afa143aba492a03ff10bd1666fed7da7e60 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 21 Jun 2024 14:13:57 +0200 Subject: [PATCH 413/465] add back in copy of mapped sequence --- dist/bin-native-overrides/cli-common-platform.bat | 6 +++++- project/Build.scala | 2 +- project/RepublishPlugin.scala | 14 ++++++++++++-- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/dist/bin-native-overrides/cli-common-platform.bat b/dist/bin-native-overrides/cli-common-platform.bat index e0cfa40692b5..d1c4f1c4716b 100644 --- a/dist/bin-native-overrides/cli-common-platform.bat +++ b/dist/bin-native-overrides/cli-common-platform.bat @@ -12,7 +12,11 @@ FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\EXTRA_PROPERTIES") DO ( ) ) +@REM we didn't find it, so we should fail +echo "ERROR: cli_version not found in EXTRA_PROPERTIES file" +exit /b 1 + :foundCliVersion endlocal & set "SCALA_CLI_VERSION=%_SCALA_CLI_VERSION%" -set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" "--cli-version" "%SCALA_CLI_VERSION%" \ No newline at end of file +set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" "--cli-version" "%SCALA_CLI_VERSION%" diff --git a/project/Build.scala b/project/Build.scala index 3ce365fac9f1..b72715970fb1 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2172,7 +2172,7 @@ object Build { republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishExtraProps += ("cli_version" -> scalaCliLauncherVersion), - mappings += (republishRepo.value / "etc" / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), + mappings += (republishRepo.value / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), republishLaunchers += ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersionWindows/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") ) diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 6ce83c2f0abf..a0a8ce7dae74 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -114,7 +114,7 @@ object RepublishPlugin extends AutoPlugin { val env = Map("COURSIER_CACHE" -> cache.getAbsolutePath.toString).asJava val cmdLine0 = Seq(javaCmd, "-jar", jar0) args => - val cmdLine = cmdLine0 ++ args + val cmdLine = cmdLine0 ++ args // invoke cmdLine with env, but also capture the output val p = new ProcessBuilder(cmdLine: _*) .directory(cache) @@ -441,7 +441,7 @@ object RepublishPlugin extends AutoPlugin { } else { val repoDir = republishRepo.value - val propsFile = repoDir / "etc" / "EXTRA_PROPERTIES" + val propsFile = repoDir / "EXTRA_PROPERTIES" log.info(s"[republish] Writing extra properties to $propsFile...") Using.fileWriter()(propsFile) { writer => extraProps.foreach { case (k, v) => @@ -485,6 +485,16 @@ object RepublishPlugin extends AutoPlugin { import java.util.Date val base: File = new File(".") // Using the working directory as base for readability + // Copy explicitly added dependencies + val mapped: Seq[(File, String)] = mappings.value + log.info("[republish] Copying explicit dependencies:") + val explicitDepsJars = for ((file, path) <- mapped) yield { + log.info(file.getPath) + val dest = distDir / path + IO.copyFile(file, dest, true) + dest + } + def write(path: String, content: String) { val p = distDir / path IO.write(p, content) From 81e3cc4a3427a68586abd4a4b0bef87851f56938 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 21 Jun 2024 14:58:42 +0200 Subject: [PATCH 414/465] read last line, split-off with-compiler classpath --- dist/bin/common | 16 ++++++++++++---- dist/bin/scalac.bat | 27 ++++++++++++++++++--------- project/Build.scala | 4 +++- project/RepublishPlugin.scala | 8 +++++++- 4 files changed, 40 insertions(+), 15 deletions(-) diff --git a/dist/bin/common b/dist/bin/common index 1ff0ca66274c..63e598d70d7e 100644 --- a/dist/bin/common +++ b/dist/bin/common @@ -10,10 +10,13 @@ load_classpath () { command="$1" psep_pattern="$2" __CLASS_PATH="" - while IFS= read -r line; do + while IFS= read -r line || [ -n "$line" ]; do + # jna-5 only appropriate for some combinations if ! [[ ( -n ${conemu-} || -n ${msys-}) && "$line" == "*jna-5*" ]]; then - # jna-5 only appropriate for some combinations - __CLASS_PATH+="$PROG_HOME/maven2/$line$psep_pattern" + if [ -n "$__CLASS_PATH" ]; then + __CLASS_PATH+="$psep_pattern" + fi + __CLASS_PATH+="$PROG_HOME/maven2/$line" fi done < "$PROG_HOME/etc/$command.classpath" echo "$__CLASS_PATH" @@ -21,11 +24,16 @@ load_classpath () { compilerJavaClasspathArgs () { toolchain="$(load_classpath "scala" "$PSEP")" + toolchain_extra="$(load_classpath "with_compiler" "$PSEP")" + + if [ -n "$toolchain_extra" ]; then + toolchain+="$PSEP$toolchain_extra" + fi if [ -n "${jvm_cp_args-}" ]; then jvm_cp_args="$toolchain$jvm_cp_args" else - jvm_cp_args="$toolchain$PSEP" + jvm_cp_args="$toolchain" fi } diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index fe6d7e3fad4d..dbcbaf11b8e2 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -89,9 +89,16 @@ goto :eof @rem output parameter: _JVM_CP_ARGS :compilerJavaClasspathArgs -call :loadClasspathFromFile +set "CP_FILE=%_ETC_DIR%\scala.classpath" +call :loadClasspathFromFile %CP_FILE% +set "__TOOLCHAIN=%_CLASS_PATH_RESULT%" -set "__TOOLCHAIN=%_CLASS_PATH%" +set "CP_FILE=%_ETC_DIR%\with_compiler.classpath" +call :loadClasspathFromFile %CP_FILE% + +if defined _CLASS_PATH_RESULT ( + set "__TOOLCHAIN=%__TOOLCHAIN%%_PSEP%%_CLASS_PATH_RESULT%" +) if defined _SCALA_CPATH ( set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" @@ -100,17 +107,19 @@ if defined _SCALA_CPATH ( ) goto :eof -@REM concatentate every line in "%_ETC_DIR%\scala.classpath" with _PSEP +@REM concatentate every line in "%_ARG_FILE%" with _PSEP +@REM arg 1 - file to read :loadClasspathFromFile -set _CLASS_PATH= -if exist "%_ETC_DIR%\scala.classpath" ( - for /f "usebackq delims=" %%i in ("%_ETC_DIR%\scala.classpath") do ( +set _ARG_FILE=%1 +set _CLASS_PATH_RESULT= +if exist "%_ARG_FILE%" ( + for /f "usebackq delims=" %%i in ("%_ARG_FILE%") do ( set "_LIB=%_PROG_HOME%\maven2\%%i" set "_LIB=!_LIB:/=\!" - if not defined _CLASS_PATH ( - set "_CLASS_PATH=!_LIB!" + if not defined _CLASS_PATH_RESULT ( + set "_CLASS_PATH_RESULT=!_LIB!" ) else ( - set "_CLASS_PATH=!_CLASS_PATH!%_PSEP%!_LIB!" + set "_CLASS_PATH_RESULT=!_CLASS_PATH_RESULT!%_PSEP%!_LIB!" ) ) ) diff --git a/project/Build.scala b/project/Build.scala index b72715970fb1..d8f10019c1d7 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2129,7 +2129,9 @@ object Build { packResourceDir += (republishRepo.value / "maven2" -> "maven2"), packResourceDir += (republishRepo.value / "etc" -> "etc"), republishCommandLibs += - ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-staging", "scala3-tasty-inspector")), + ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), + republishCommandLibs += + ("with_compiler" -> List("scala3-staging", "scala3-tasty-inspector", "^!scala3-interfaces", "^!scala3-compiler", "^!scala3-library", "^!tasty-core")), republishCommandLibs += ("scaladoc" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-tasty-inspector", "scaladoc")), Compile / pack := republishPack.value, diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index a0a8ce7dae74..e4bf40545a6b 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -215,7 +215,13 @@ object RepublishPlugin extends AutoPlugin { if (commandLibs.nonEmpty) { IO.createDirectory(republishDir / "etc") for ((command, libs) <- commandLibs) { - val entries = libs.map(fuzzyFind(classpaths, _)).reduce(_ ++ _).distinct + val (negated, actual) = libs.partition(_.startsWith("^!")) + val subtractions = negated.map(_.stripPrefix("^!")) + + def compose(libs: List[String]): List[String] = + libs.map(fuzzyFind(classpaths, _)).reduceOption(_ ++ _).map(_.distinct).getOrElse(Nil) + + val entries = compose(actual).diff(compose(subtractions)) IO.write(republishDir / "etc" / s"$command.classpath", entries.mkString("\n")) } } From e74d681ca64c20eff33cf4e34dda6b7f97ebe1e9 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Mon, 1 Jul 2024 11:55:12 +0200 Subject: [PATCH 415/465] Bump scala-cli to 1.4.0 (#20859) --- project/Build.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 6ff07701c06b..3fa01ca4337d 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -118,9 +118,9 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.3.2" + val scalaCliLauncherVersion = "1.4.0" /** Version of Scala CLI to download (on Windows - last known validated version) */ - val scalaCliLauncherVersionWindows = "1.3.2" + val scalaCliLauncherVersionWindows = "1.4.0" /** Version of Coursier to download for initializing the local maven repo of Scala command */ val coursierJarVersion = "2.1.10" From d470b7782efa759b21c771ab71ab3c5d8941b9f1 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 1 Jul 2024 22:48:33 +0200 Subject: [PATCH 416/465] Fix failing CompletionScalaCliSuite tests due to circe releasing Scala Native 0.5 artifacts --- .../completion/CompletionScalaCliSuite.scala | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala index 0d86922d4e70..0a74aed35f48 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala @@ -28,7 +28,8 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |// //> using lib ??? |//> using lib io.circe::circe-core_native0.4 |package A - |""".stripMargin + |""".stripMargin, + assertSingleItem = false ) @Test def `version-sort` = @@ -51,6 +52,9 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|circe-core_native0.4_2.12 |circe-core_native0.4_2.13 |circe-core_native0.4_3 + |circe-core_native0.5_2.12 + |circe-core_native0.5_2.13 + |circe-core_native0.5_3 |""".stripMargin ) @@ -78,7 +82,9 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|//> using lib "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, - "circe-core_native0.4" + """circe-core_native0.4 + |circe-core_native0.5 + |""".stripMargin ) @Test def `script` = @@ -92,6 +98,9 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|circe-core_native0.4_2.12 |circe-core_native0.4_2.13 |circe-core_native0.4_3 + |circe-core_native0.5_2.12 + |circe-core_native0.5_2.13 + |circe-core_native0.5_3 |""".stripMargin, filename = "script.sc.scala", enablePackageWrap = false @@ -138,7 +147,8 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|//> using libs "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, - "circe-core_native0.4" + """circe-core_native0.4 + |circe-core_native0.5""".stripMargin ) private def scriptWrapper(code: String, filename: String): String = From edc8cbc552e61ef963375174686cae0407b4dd14 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Tue, 2 Jul 2024 12:35:01 +0200 Subject: [PATCH 417/465] Ignore failing tests instead of expecting for completions for both 0.4 and 0.5 SN versins - the outputs seems be non deterministic in the CI --- .../completion/CompletionScalaCliSuite.scala | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala index 0a74aed35f48..79d35944c84d 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala @@ -3,6 +3,7 @@ package dotty.tools.pc.tests.completion import dotty.tools.pc.base.BaseCompletionSuite import org.junit.Test +import org.junit.Ignore class CompletionScalaCliSuite extends BaseCompletionSuite: @@ -44,6 +45,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |""".stripMargin, ) + @Ignore @Test def `single-colon` = check( """|//> using lib "io.circe:circe-core_na@@ @@ -52,9 +54,6 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|circe-core_native0.4_2.12 |circe-core_native0.4_2.13 |circe-core_native0.4_3 - |circe-core_native0.5_2.12 - |circe-core_native0.5_2.13 - |circe-core_native0.5_3 |""".stripMargin ) @@ -77,16 +76,16 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |""".stripMargin, ) + @Ignore @Test def `multiple-libs` = check( """|//> using lib "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, - """circe-core_native0.4 - |circe-core_native0.5 - |""".stripMargin + "circe-core_native0.4" ) + @Ignore @Test def `script` = check( scriptWrapper( @@ -98,9 +97,6 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|circe-core_native0.4_2.12 |circe-core_native0.4_2.13 |circe-core_native0.4_3 - |circe-core_native0.5_2.12 - |circe-core_native0.5_2.13 - |circe-core_native0.5_3 |""".stripMargin, filename = "script.sc.scala", enablePackageWrap = false @@ -142,13 +138,13 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |io.circul""".stripMargin ) + @Ignore @Test def `multiple-deps2` = check( """|//> using libs "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, - """circe-core_native0.4 - |circe-core_native0.5""".stripMargin + "circe-core_native0.4" ) private def scriptWrapper(code: String, filename: String): String = From 1591ac9efbfce9efba46b85f2a2385eff94bcdeb Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 1 Jul 2024 17:02:28 +0200 Subject: [PATCH 418/465] fix issue 20901: etaCollapse context bound type --- .../src/dotty/tools/dotc/typer/Typer.scala | 7 +- tests/pos/i20901/Foo.scala | 6 + tests/pos/i20901/Foo.tastycheck | 124 ++++++++++++++++++ 3 files changed, 134 insertions(+), 3 deletions(-) create mode 100644 tests/pos/i20901/Foo.scala create mode 100644 tests/pos/i20901/Foo.tastycheck diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ae62ebbc4a3f..4cb695a15966 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2366,13 +2366,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedContextBoundTypeTree(tree: untpd.ContextBoundTypeTree)(using Context): Tree = val tycon = typedType(tree.tycon) - val tyconSplice = untpd.TypedSplice(tycon) + def spliced(tree: Tree) = untpd.TypedSplice(tree) val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) if tycon.tpe.typeParams.nonEmpty then - typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) + val tycon0 = tycon.withType(tycon.tpe.etaCollapse) + typed(untpd.AppliedTypeTree(spliced(tycon0), tparam :: Nil)) else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) - typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) + typed(untpd.RefinedTypeTree(spliced(tycon), List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else def selfNote = if Feature.enabled(modularity) then diff --git a/tests/pos/i20901/Foo.scala b/tests/pos/i20901/Foo.scala new file mode 100644 index 000000000000..c1277781db38 --- /dev/null +++ b/tests/pos/i20901/Foo.scala @@ -0,0 +1,6 @@ +//> using options -Ytest-pickler-check + +import reflect.ClassTag + +class Foo: + def mkArray[T: ClassTag] = ??? diff --git a/tests/pos/i20901/Foo.tastycheck b/tests/pos/i20901/Foo.tastycheck new file mode 100644 index 000000000000..0201bfec2056 --- /dev/null +++ b/tests/pos/i20901/Foo.tastycheck @@ -0,0 +1,124 @@ +Header: + version: + tooling: + UUID: + +Names (276 bytes, starting from ): + 0: ASTs + 1: + 2: scala + 3: reflect + 4: scala[Qualified . reflect] + 5: ClassTag + 6: Foo + 7: + 8: java + 9: lang + 10: java[Qualified . lang] + 11: Object + 12: java[Qualified . lang][Qualified . Object] + 13: [Signed Signature(List(),java.lang.Object) @] + 14: Unit + 15: mkArray + 16: T + 17: Nothing + 18: Any + 19: evidence$ + 20: [Unique evidence$ 1] + 21: ??? + 22: Predef + 23: SourceFile + 24: annotation + 25: scala[Qualified . annotation] + 26: internal + 27: scala[Qualified . annotation][Qualified . internal] + 28: scala[Qualified . annotation][Qualified . internal][Qualified . SourceFile] + 29: String + 30: java[Qualified . lang][Qualified . String] + 31: [Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @] + 32: + 33: Positions + 34: Comments + 35: Attributes + +Trees (94 bytes, starting from ): + 0: PACKAGE(92) + 2: TERMREFpkg 1 [] + 4: IMPORT(4) + 6: TERMREFpkg 4 [scala[Qualified . reflect]] + 8: IMPORTED 5 [ClassTag] + 10: TYPEDEF(82) 6 [Foo] + 13: TEMPLATE(61) + 15: APPLY(10) + 17: SELECTin(8) 13 [[Signed Signature(List(),java.lang.Object) @]] + 20: NEW + 21: TYPEREF 11 [Object] + 23: TERMREFpkg 10 [java[Qualified . lang]] + 25: SHAREDtype 21 + 27: DEFDEF(7) 7 [] + 30: EMPTYCLAUSE + 31: TYPEREF 14 [Unit] + 33: TERMREFpkg 2 [scala] + 35: STABLE + 36: DEFDEF(38) 15 [mkArray] + 39: TYPEPARAM(11) 16 [T] + 42: TYPEBOUNDStpt(8) + 44: TYPEREF 17 [Nothing] + 46: SHAREDtype 33 + 48: TYPEREF 18 [Any] + 50: SHAREDtype 33 + 52: PARAM(14) 20 [[Unique evidence$ 1]] + 55: APPLIEDtpt(10) + 57: IDENTtpt 5 [ClassTag] + 59: TYPEREF 5 [ClassTag] + 61: SHAREDtype 6 + 63: IDENTtpt 16 [T] + 65: TYPEREFdirect 39 + 67: IMPLICIT + 68: SHAREDtype 44 + 70: TERMREF 21 [???] + 72: TERMREF 22 [Predef] + 74: SHAREDtype 33 + 76: ANNOTATION(16) + 78: TYPEREF 23 [SourceFile] + 80: TERMREFpkg 27 [scala[Qualified . annotation][Qualified . internal]] + 82: APPLY(10) + 84: SELECTin(6) 31 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] + 87: NEW + 88: SHAREDtype 78 + 90: SHAREDtype 78 + 92: STRINGconst 32 [] + 94: + +Positions (72 bytes, starting from ): + lines: 7 + line sizes: + 38, 0, 23, 0, 10, 32, 0 + positions: + 0: 40 .. 108 + 4: 40 .. 63 + 6: 47 .. 54 + 8: 55 .. 63 + 10: 65 .. 108 + 13: 78 .. 108 + 21: 71 .. 71 + 27: 78 .. 78 + 31: 78 .. 78 + 36: 78 .. 108 + 39: 90 .. 101 + 44: 93 .. 93 + 48: 93 .. 93 + 52: 93 .. 101 + 57: 93 .. 101 + 63: 93 .. 101 + 68: 102 .. 102 + 70: 105 .. 108 + 82: 65 .. 108 + 88: 65 .. 65 + 92: 65 .. 65 + + source paths: + 0: 32 [] + +Attributes (2 bytes, starting from ): + SOURCEFILEattr 32 [] From f8a2e563159c85f312f7d2cb48909ee08ba25f24 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 1 Jul 2024 21:39:14 +0200 Subject: [PATCH 419/465] update semanticdb test (restore references) --- tests/semanticdb/expect/Methods.expect.scala | 2 +- tests/semanticdb/metac.expect | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/semanticdb/expect/Methods.expect.scala b/tests/semanticdb/expect/Methods.expect.scala index 4ec723ad584e..e1fcfa6880e1 100644 --- a/tests/semanticdb/expect/Methods.expect.scala +++ b/tests/semanticdb/expect/Methods.expect.scala @@ -15,7 +15,7 @@ class Methods/*<-example::Methods#*/[T/*<-example::Methods#[T]*/] { def m6/*<-example::Methods#m6().*/(x/*<-example::Methods#m6().(x)*/: Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+1).*/(x/*<-example::Methods#m6(+1).(x)*/: List/*->example::Methods#List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+2).*/(x/*<-example::Methods#m6(+2).(x)*/: scala.List/*->scala::package.List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ - def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ + def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->scala::math::Ordering#*//*->example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ def `m8()./*<-example::Methods#`m8().`().*/`() = ???/*->scala::Predef.`???`().*/ class `m9()./*<-example::Methods#`m9().`#*/` def m9/*<-example::Methods#m9().*/(x/*<-example::Methods#m9().(x)*/: `m9().`/*->example::Methods#`m9().`#*/) = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 98657f122255..9dc2fd8a44c9 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2584,7 +2584,7 @@ Uri => Methods.scala Text => empty Language => Scala Symbols => 82 entries -Occurrences => 156 entries +Occurrences => 157 entries Symbols: example/Methods# => class Methods [typeparam T ] extends Object { self: Methods[T] => +44 decls } @@ -2728,6 +2728,7 @@ Occurrences: [16:29..16:32): ??? -> scala/Predef.`???`(). [17:6..17:8): m7 <- example/Methods#m7(). [17:9..17:10): U <- example/Methods#m7().[U] +[17:12..17:20): Ordering -> scala/math/Ordering# [17:12..17:20): Ordering -> example/Methods#m7().[U] [17:12..17:12): <- example/Methods#m7().(evidence$1) [17:22..17:23): c <- example/Methods#m7().(c) From a5c74e79e0b34e9597e4e6725b58196d7dcdb1f6 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 26 Feb 2024 14:32:18 -0800 Subject: [PATCH 420/465] use Scala 2.13.13 stdlib (was .12) --- community-build/community-projects/stdLib213 | 2 +- project/Build.scala | 8 ++++---- project/Scala2LibraryBootstrappedMiMaFilters.scala | 3 --- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/community-build/community-projects/stdLib213 b/community-build/community-projects/stdLib213 index 6243e902928c..fcc67cd56c67 160000 --- a/community-build/community-projects/stdLib213 +++ b/community-build/community-projects/stdLib213 @@ -1 +1 @@ -Subproject commit 6243e902928c344fb0e82e21120bb257f08a2af2 +Subproject commit fcc67cd56c67851bf31019ec25ccb09d08b9561b diff --git a/project/Build.scala b/project/Build.scala index 8dbc691136d6..0f32d892e51a 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -144,8 +144,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.12" - case Bootstrapped => "2.13.12" + case NonBootstrapped => "2.13.13" + case Bootstrapped => "2.13.13" } /** Version of the scala-library for which we will generate TASTy. @@ -155,7 +155,7 @@ object Build { * We can use nightly versions to tests the future compatibility in development. * Nightly versions: https://scala-ci.typesafe.com/ui/native/scala-integration/org/scala-lang */ - val stdlibBootstrappedVersion = "2.13.12" + val stdlibBootstrappedVersion = "2.13.13" val dottyOrganization = "org.scala-lang" val dottyGithubUrl = "https://github.com/scala/scala3" @@ -1358,7 +1358,7 @@ object Build { "io.get-coursier" % "interface" % "1.0.18", "org.scalameta" % "mtags-interfaces" % mtagsVersion, ), - libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.12" % mtagsVersion % SourceDeps), + libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.13" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings diff --git a/project/Scala2LibraryBootstrappedMiMaFilters.scala b/project/Scala2LibraryBootstrappedMiMaFilters.scala index bd149d5a910b..0d2b5a7fd945 100644 --- a/project/Scala2LibraryBootstrappedMiMaFilters.scala +++ b/project/Scala2LibraryBootstrappedMiMaFilters.scala @@ -78,9 +78,6 @@ object Scala2LibraryBootstrappedMiMaFilters { "scala.collection.IterableOnceOps#Maximized.this", // New in 2.13.11: private inner class "scala.util.Properties.", "scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5", - // New in 2.13.12 -- can be removed once scala/scala#10549 lands in 2.13.13 - // and we take the upgrade here - "scala.collection.immutable.MapNodeRemoveAllSetNodeIterator.next", ).map(ProblemFilters.exclude[DirectMissingMethodProblem]) } ) From b357bc93e512c93e6b70ed0eed906b4daf7febf6 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 1 Jul 2024 13:03:41 +0200 Subject: [PATCH 421/465] Upgrade Scala 2 to 2.13.14 --- project/Build.scala | 8 ++++---- project/Scala2LibraryBootstrappedMiMaFilters.scala | 4 ++++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 0f32d892e51a..9dc75838ba15 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -144,8 +144,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.13" - case Bootstrapped => "2.13.13" + case NonBootstrapped => "2.13.14" + case Bootstrapped => "2.13.14" } /** Version of the scala-library for which we will generate TASTy. @@ -155,7 +155,7 @@ object Build { * We can use nightly versions to tests the future compatibility in development. * Nightly versions: https://scala-ci.typesafe.com/ui/native/scala-integration/org/scala-lang */ - val stdlibBootstrappedVersion = "2.13.13" + val stdlibBootstrappedVersion = "2.13.14" val dottyOrganization = "org.scala-lang" val dottyGithubUrl = "https://github.com/scala/scala3" @@ -1358,7 +1358,7 @@ object Build { "io.get-coursier" % "interface" % "1.0.18", "org.scalameta" % "mtags-interfaces" % mtagsVersion, ), - libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.13" % mtagsVersion % SourceDeps), + libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.14" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings diff --git a/project/Scala2LibraryBootstrappedMiMaFilters.scala b/project/Scala2LibraryBootstrappedMiMaFilters.scala index 0d2b5a7fd945..102a2a50e9d4 100644 --- a/project/Scala2LibraryBootstrappedMiMaFilters.scala +++ b/project/Scala2LibraryBootstrappedMiMaFilters.scala @@ -172,6 +172,10 @@ object Scala2LibraryBootstrappedMiMaFilters { "scala.collection.mutable.LinkedHashSet.defaultLoadFactor", // private[collection] final def "scala.collection.mutable.LinkedHashSet.defaultinitialSize", // private[collection] final def "scala.collection.mutable.OpenHashMap.nextPositivePowerOfTwo", // private[mutable] def + // New in 2.13.13 + "scala.collection.mutable.ArrayBuffer.resizeUp", // private[mutable] def + // New in 2.13.14 + "scala.util.Properties.consoleIsTerminal", // private[scala] lazy val ).map(ProblemFilters.exclude[DirectMissingMethodProblem]) ++ Seq( // MissingFieldProblem: static field ... in object ... does not have a correspondent in other version "scala.Array.UnapplySeqWrapper", From dcf708ca50511ff6dc7a1f4a6ef3dbd1e8b5fd9c Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 1 Jul 2024 13:05:24 +0200 Subject: [PATCH 422/465] Phiscally remove the ignored Scala 2 library-aux files instead of filtering them out in `Compile / sources` (not reliable, for some reasone the AnyRef.scala was still compiled) --- project/Build.scala | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 9dc75838ba15..41e5f3e082f5 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1124,19 +1124,23 @@ object Build { IO.createDirectory(trgDir) IO.unzip(scalaLibrarySourcesJar, trgDir) - ((trgDir ** "*.scala") +++ (trgDir ** "*.java")).get.toSet + val (ignoredSources, sources) = + ((trgDir ** "*.scala") +++ (trgDir ** "*.java")).get.toSet + .partition{file => + // sources from https://github.com/scala/scala/tree/2.13.x/src/library-aux + val path = file.getPath.replace('\\', '/') + path.endsWith("scala-library-src/scala/Any.scala") || + path.endsWith("scala-library-src/scala/AnyVal.scala") || + path.endsWith("scala-library-src/scala/AnyRef.scala") || + path.endsWith("scala-library-src/scala/Nothing.scala") || + path.endsWith("scala-library-src/scala/Null.scala") || + path.endsWith("scala-library-src/scala/Singleton.scala") + } + // These sources should be never compiled, filtering them out was not working correctly sometimes + ignoredSources.foreach(_.delete()) + sources } (Set(scalaLibrarySourcesJar)).toSeq }.taskValue, - (Compile / sources) ~= (_.filterNot { file => - // sources from https://github.com/scala/scala/tree/2.13.x/src/library-aux - val path = file.getPath.replace('\\', '/') - path.endsWith("scala-library-src/scala/Any.scala") || - path.endsWith("scala-library-src/scala/AnyVal.scala") || - path.endsWith("scala-library-src/scala/AnyRef.scala") || - path.endsWith("scala-library-src/scala/Nothing.scala") || - path.endsWith("scala-library-src/scala/Null.scala") || - path.endsWith("scala-library-src/scala/Singleton.scala") - }), (Compile / sources) := { val files = (Compile / sources).value val overwrittenSourcesDir = (Compile / scalaSource).value From c042e57d2238e87cc8f91f1aef36270d659f5be8 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 1 Jul 2024 10:29:16 +0200 Subject: [PATCH 423/465] Add --skip-cli-updates by default to the scala command --- dist/bin/scala | 1 + dist/bin/scala.bat | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/dist/bin/scala b/dist/bin/scala index 71747a8e9e20..35efdfc38d96 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -59,6 +59,7 @@ done # SCALA_CLI_CMD_BASH is an array, set by cli-common-platform eval "${SCALA_CLI_CMD_BASH[@]}" \ "--prog-name scala" \ + "--skip-cli-updates" \ "--cli-default-scala-version \"$SCALA_VERSION\"" \ "-r \"$MVN_REPOSITORY\"" \ "${scala_args[@]}" diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index d473facbbb1c..7418909da263 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -21,8 +21,9 @@ call :setScalaOpts call "%_PROG_HOME%\bin\cli-common-platform.bat" -@rem SCALA_CLI_CMD_WIN is an array, set in cli-common-platform.bat -call %SCALA_CLI_CMD_WIN% "--prog-name" "scala" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* +@rem SCALA_CLI_CMD_WIN is an array, set in cli-common-platform.bat. +@rem WE NEED TO PASS '--skip-cli-updates' for JVM launchers but we actually don't need it for native launchers +call %SCALA_CLI_CMD_WIN% "--prog-name" "scala" "--skip-cli-updates" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* if not %ERRORLEVEL%==0 ( set _EXITCODE=1& goto end ) From 91b8abde7bc555aad0d6c01bb99135771c8b6f86 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 25 May 2024 15:34:59 +0200 Subject: [PATCH 424/465] Avoid useless warnings about priority change in implicit search Warn about priority change in implicit search only if one of the participating candidates appears in the final result. It could be that we have an priority change between two ranked candidates that both are superseded by the result of the implicit search. In this case, no warning needs to be reported. --- .../dotty/tools/dotc/typer/Implicits.scala | 32 +++++++++++++++---- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 74bd59d4992f..e3615ce40592 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -419,6 +419,12 @@ object Implicits: sealed abstract class SearchResult extends Showable { def tree: Tree def toText(printer: Printer): Text = printer.toText(this) + + /** The references that were found, there can be two of them in the case + * of an AmbiguousImplicits failure + */ + def found: List[TermRef] + def recoverWith(other: SearchFailure => SearchResult): SearchResult = this match { case _: SearchSuccess => this case fail: SearchFailure => other(fail) @@ -434,13 +440,17 @@ object Implicits: * @param tstate The typer state to be committed if this alternative is chosen */ case class SearchSuccess(tree: Tree, ref: TermRef, level: Int, isExtension: Boolean = false)(val tstate: TyperState, val gstate: GadtConstraint) - extends SearchResult with RefAndLevel with Showable + extends SearchResult with RefAndLevel with Showable: + final def found = ref :: Nil /** A failed search */ case class SearchFailure(tree: Tree) extends SearchResult { require(tree.tpe.isInstanceOf[SearchFailureType], s"unexpected type for ${tree}") final def isAmbiguous: Boolean = tree.tpe.isInstanceOf[AmbiguousImplicits | TooUnspecific] final def reason: SearchFailureType = tree.tpe.asInstanceOf[SearchFailureType] + final def found = tree.tpe match + case tpe: AmbiguousImplicits => tpe.alt1.ref :: tpe.alt2.ref :: Nil + case _ => Nil } object SearchFailure { @@ -1290,6 +1300,11 @@ trait Implicits: /** Search a list of eligible implicit references */ private def searchImplicit(eligible: List[Candidate], contextual: Boolean): SearchResult = + // A map that associates a priority change warning (between -source 3.4 and 3.6) + // with a candidate ref mentioned in the warning. We report the associated + // message if the candidate ref is part of the result of the implicit search + var priorityChangeWarnings = mutable.ListBuffer[(TermRef, Message)]() + /** Compare `alt1` with `alt2` to determine which one should be chosen. * * @return a number > 0 if `alt1` is preferred over `alt2` @@ -1306,6 +1321,8 @@ trait Implicits: */ def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) + def warn(msg: Message) = + priorityChangeWarnings += (alt1.ref -> msg) += (alt2.ref -> msg) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else @@ -1319,16 +1336,16 @@ trait Implicits: case 1 => "the first alternative" case _ => "none - it's ambiguous" if sv.stable == SourceVersion.`3.5` then - report.warning( + warn( em"""Given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} will change |Current choice : ${choice(prev)} - |New choice from Scala 3.6: ${choice(cmp)}""", srcPos) + |New choice from Scala 3.6: ${choice(cmp)}""") prev else - report.warning( + warn( em"""Change in given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} |Previous choice : ${choice(prev)} - |New choice from Scala 3.6: ${choice(cmp)}""", srcPos) + |New choice from Scala 3.6: ${choice(cmp)}""") cmp else cmp else cmp @@ -1578,7 +1595,10 @@ trait Implicits: validateOrdering(ord) throw ex - rank(sort(eligible), NoMatchingImplicitsFailure, Nil) + val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) + for (ref, msg) <- priorityChangeWarnings do + if result.found.contains(ref) then report.warning(msg, srcPos) + result end searchImplicit def isUnderSpecifiedArgument(tp: Type): Boolean = From 9354ad5297d8b20bca82159536564d0b88e1820d Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 27 May 2024 17:57:03 +0200 Subject: [PATCH 425/465] Re-enable semanticdb test --- tests/semanticdb/expect/InventedNames.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/semanticdb/expect/InventedNames.scala b/tests/semanticdb/expect/InventedNames.scala index 61baae46c832..42c14c90e370 100644 --- a/tests/semanticdb/expect/InventedNames.scala +++ b/tests/semanticdb/expect/InventedNames.scala @@ -32,7 +32,7 @@ given [T]: Z[T] with val a = intValue val b = given_String -//val c = given_Double +val c = given_Double val d = given_List_T[Int] val e = given_Char val f = given_Float From 7ac54178c4ac0478808d1cf3fc95da209601d652 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 27 May 2024 23:38:03 +0200 Subject: [PATCH 426/465] Update semanticDB expect files --- tests/semanticdb/expect/InventedNames.expect.scala | 2 +- tests/semanticdb/metac.expect | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/tests/semanticdb/expect/InventedNames.expect.scala b/tests/semanticdb/expect/InventedNames.expect.scala index b92e9aa940a7..7c5b008209c2 100644 --- a/tests/semanticdb/expect/InventedNames.expect.scala +++ b/tests/semanticdb/expect/InventedNames.expect.scala @@ -32,7 +32,7 @@ given [T/*<-givens::InventedNames$package.given_Z_T#[T]*/]: Z/*->givens::Z#*/[T/ val a/*<-givens::InventedNames$package.a.*/ = intValue/*->givens::InventedNames$package.intValue.*/ val b/*<-givens::InventedNames$package.b.*/ = given_String/*->givens::InventedNames$package.given_String.*/ -//val c = given_Double +val c/*<-givens::InventedNames$package.c.*/ = given_Double/*->givens::InventedNames$package.given_Double().*/ val d/*<-givens::InventedNames$package.d.*/ = given_List_T/*->givens::InventedNames$package.given_List_T().*/[Int/*->scala::Int#*/] val e/*<-givens::InventedNames$package.e.*/ = given_Char/*->givens::InventedNames$package.given_Char.*/ val f/*<-givens::InventedNames$package.f.*/ = given_Float/*->givens::InventedNames$package.given_Float.*/ diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 98657f122255..84c3e7c6a110 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2093,15 +2093,16 @@ Schema => SemanticDB v4 Uri => InventedNames.scala Text => empty Language => Scala -Symbols => 44 entries -Occurrences => 64 entries -Synthetics => 2 entries +Symbols => 45 entries +Occurrences => 66 entries +Synthetics => 3 entries Symbols: -givens/InventedNames$package. => final package object givens extends Object { self: givens.type => +23 decls } +givens/InventedNames$package. => final package object givens extends Object { self: givens.type => +24 decls } givens/InventedNames$package.`* *`. => final implicit lazy val given method * * Long givens/InventedNames$package.a. => val method a Int givens/InventedNames$package.b. => val method b String +givens/InventedNames$package.c. => val method c Double givens/InventedNames$package.d. => val method d List[Int] givens/InventedNames$package.e. => val method e Char givens/InventedNames$package.f. => val method f Float @@ -2192,6 +2193,8 @@ Occurrences: [32:8..32:16): intValue -> givens/InventedNames$package.intValue. [33:4..33:5): b <- givens/InventedNames$package.b. [33:8..33:20): given_String -> givens/InventedNames$package.given_String. +[34:4..34:5): c <- givens/InventedNames$package.c. +[34:8..34:20): given_Double -> givens/InventedNames$package.given_Double(). [35:4..35:5): d <- givens/InventedNames$package.d. [35:8..35:20): given_List_T -> givens/InventedNames$package.given_List_T(). [35:21..35:24): Int -> scala/Int# @@ -2211,6 +2214,7 @@ Occurrences: Synthetics: [24:0..24:0): => *(x$1) +[34:8..34:20):given_Double => *(intValue) [40:8..40:15):given_Y => *(given_X) expect/Issue1749.scala From 0b812bde5d8ff18badf4db6e19d950d68c9ecac7 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 28 May 2024 18:00:27 +0200 Subject: [PATCH 427/465] Drop priority change warnings that don't qualify Drop priority change warnings if one the mentioned references does not succeed via tryImplicit. --- .../dotty/tools/dotc/typer/Implicits.scala | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index e3615ce40592..9c23036fa865 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1301,9 +1301,10 @@ trait Implicits: private def searchImplicit(eligible: List[Candidate], contextual: Boolean): SearchResult = // A map that associates a priority change warning (between -source 3.4 and 3.6) - // with a candidate ref mentioned in the warning. We report the associated - // message if the candidate ref is part of the result of the implicit search - var priorityChangeWarnings = mutable.ListBuffer[(TermRef, Message)]() + // with the candidate refs mentioned in the warning. We report the associated + // message if both candidates qualify in tryImplicit and at least one of the candidates + // is part of the result of the implicit search. + val priorityChangeWarnings = mutable.ListBuffer[(TermRef, TermRef, Message)]() /** Compare `alt1` with `alt2` to determine which one should be chosen. * @@ -1322,7 +1323,7 @@ trait Implicits: def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) def warn(msg: Message) = - priorityChangeWarnings += (alt1.ref -> msg) += (alt2.ref -> msg) + priorityChangeWarnings += ((alt1.ref, alt2.ref, msg)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else @@ -1440,7 +1441,11 @@ trait Implicits: // need a candidate better than `cand` healAmbiguous(fail, newCand => compareAlternatives(newCand, cand) > 0) - else rank(remaining, found, fail :: rfailures) + else + // keep only warnings that don't involve the failed candidate reference + priorityChangeWarnings.filterInPlace: (ref1, ref2, _) => + ref1 != cand.ref && ref2 != cand.ref + rank(remaining, found, fail :: rfailures) case best: SearchSuccess => if (ctx.mode.is(Mode.ImplicitExploration) || isCoherent) best @@ -1596,8 +1601,9 @@ trait Implicits: throw ex val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) - for (ref, msg) <- priorityChangeWarnings do - if result.found.contains(ref) then report.warning(msg, srcPos) + for (ref1, ref2, msg) <- priorityChangeWarnings do + if result.found.exists(ref => ref == ref1 || ref == ref2) then + report.warning(msg, srcPos) result end searchImplicit From 1d993a7099e102233125b210f54da4c33854f2e7 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 28 May 2024 18:40:55 +0200 Subject: [PATCH 428/465] Add test for #20484 --- tests/pos/i20484.scala | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 tests/pos/i20484.scala diff --git a/tests/pos/i20484.scala b/tests/pos/i20484.scala new file mode 100644 index 000000000000..2f02e6206101 --- /dev/null +++ b/tests/pos/i20484.scala @@ -0,0 +1,3 @@ +given Int = ??? +given Char = ??? +val a = summon[Int] \ No newline at end of file From 3677eaf8d24ecc1b0b95aac63471e42c025bea71 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 3 Jul 2024 04:42:33 -0700 Subject: [PATCH 429/465] Use final result type to check selector bound --- compiler/src/dotty/tools/dotc/transform/CheckUnused.scala | 2 +- tests/pos/i20860.scala | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i20860.scala diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index bd4ef73d6eea..ba77167de736 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -728,7 +728,7 @@ object CheckUnused: if selector.isGiven then // Further check that the symbol is a given or implicit and conforms to the bound sym.isOneOf(Given | Implicit) - && (selector.bound.isEmpty || sym.info <:< selector.boundTpe) + && (selector.bound.isEmpty || sym.info.finalResultType <:< selector.boundTpe) else // Normal wildcard, check that the symbol is not a given (but can be implicit) !sym.is(Given) diff --git a/tests/pos/i20860.scala b/tests/pos/i20860.scala new file mode 100644 index 000000000000..1e1ddea11b75 --- /dev/null +++ b/tests/pos/i20860.scala @@ -0,0 +1,3 @@ +def `i20860 use result to check selector bound`: Unit = + import Ordering.Implicits.given Ordering[?] + summon[Ordering[Seq[Int]]] From 876b64810cca7b3282643c6bafe1e0ef05b07e46 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 3 Jul 2024 12:06:30 +0200 Subject: [PATCH 430/465] Add changelog for 3.5.0-RC3 --- changelogs/3.5.0-RC3.md | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 changelogs/3.5.0-RC3.md diff --git a/changelogs/3.5.0-RC3.md b/changelogs/3.5.0-RC3.md new file mode 100644 index 000000000000..a7a2d164d5a7 --- /dev/null +++ b/changelogs/3.5.0-RC3.md @@ -0,0 +1,26 @@ +# Backported fixes + +- Release .zip instead of .tar.gz for windows in sdkman [#20630](https://github.com/scala/scala3/pull/20630) +- SIP 46 - read classpath from file, remove lib directory in distribution [#20631](https://github.com/scala/scala3/pull/20631) +- Bump scala-cli to 1.4.0 [#20859](https://github.com/scala/scala3/pull/20859) +- Priority warning fix alternative [#20487](https://github.com/scala/scala3/pull/20487) +- Add --skip-cli-updates by default to the scala command [#20900](https://github.com/scala/scala3/pull/20900) +- Upgrade Scala 2 to 2.13.14 (was 2.13.12) [#20902](https://github.com/scala/scala3/pull/20902) +- fix issue 20901: etaCollapse context bound type [#20910](https://github.com/scala/scala3/pull/20910) +- Use final result type to check selector bound [#20989](https://github.com/scala/scala3/pull/20989) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC2..3.5.0-RC3` these are: + +``` + 6 Wojciech Mazur + 5 Jamie Thompson + 5 Martin Odersky + 4 Hamza Remmal + 1 Hamza REMMAL + 1 Seth Tisue + 1 Som Snytt +``` From 6abb51aca2c028c4e523e7b5a11ce082acf87bd2 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 4 Jul 2024 12:29:47 +0200 Subject: [PATCH 431/465] Release 3.5.0-RC3 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 41e5f3e082f5..f994ae74cb95 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,7 @@ object Build { val referenceVersion = "3.4.2" - val baseVersion = "3.5.0-RC2" + val baseVersion = "3.5.0-RC3" // LTS or Next val versionLine = "Next" From 7a19b325da0983019e45a46d41203e73e71452c0 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 29 May 2024 18:26:08 +0200 Subject: [PATCH 432/465] Fix symbol reference retrivial of `scala.caps.Caps` - it was changed from opaque type to class in #18463 --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 11a4a8473e79..6a1332e91afb 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -991,7 +991,7 @@ class Definitions { @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") - @tu lazy val Caps_Cap: TypeSymbol = CapsModule.requiredType("Cap") + @tu lazy val Caps_Cap: TypeSymbol = requiredClass("scala.caps.Cap") @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") From ec87e7deebeae2f95ce003aba66a23ef15ed962b Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Jul 2024 14:13:45 +0200 Subject: [PATCH 433/465] Refine implicit priority change warnings Fixes #21036 Fixes #20572 --- .../dotty/tools/dotc/typer/Implicits.scala | 32 +++++++++++++------ tests/neg/given-triangle.check | 4 +++ tests/{warn => neg}/given-triangle.scala | 4 +-- tests/{warn => pos}/bson/Test.scala | 0 tests/{warn => pos}/bson/bson.scala | 0 tests/pos/i20572.scala | 7 ++++ tests/pos/i21036.scala | 16 ++++++++++ tests/run/given-triangle.scala | 2 +- tests/warn/bson.check | 10 ------ tests/warn/given-triangle.check | 6 ---- 10 files changed, 51 insertions(+), 30 deletions(-) create mode 100644 tests/neg/given-triangle.check rename tests/{warn => neg}/given-triangle.scala (73%) rename tests/{warn => pos}/bson/Test.scala (100%) rename tests/{warn => pos}/bson/bson.scala (100%) create mode 100644 tests/pos/i20572.scala create mode 100644 tests/pos/i21036.scala delete mode 100644 tests/warn/bson.check delete mode 100644 tests/warn/given-triangle.check diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 9c23036fa865..f997ab52fa64 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1302,9 +1302,8 @@ trait Implicits: // A map that associates a priority change warning (between -source 3.4 and 3.6) // with the candidate refs mentioned in the warning. We report the associated - // message if both candidates qualify in tryImplicit and at least one of the candidates - // is part of the result of the implicit search. - val priorityChangeWarnings = mutable.ListBuffer[(TermRef, TermRef, Message)]() + // message if one of the critical candidates is part of the result of the implicit search. + val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() /** Compare `alt1` with `alt2` to determine which one should be chosen. * @@ -1319,11 +1318,16 @@ trait Implicits: * return new result with preferGeneral = true * 3.6 and higher: compare with preferGeneral = true * + * @param only2ndCritical If true only the second alternative is critical in case + * of a priority change. */ - def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = + def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, only2ndCritical: Boolean = false): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) def warn(msg: Message) = - priorityChangeWarnings += ((alt1.ref, alt2.ref, msg)) + val critical = + if only2ndCritical then alt2.ref :: Nil + else alt1.ref :: alt2.ref :: Nil + priorityChangeWarnings += ((critical, msg)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else @@ -1443,8 +1447,8 @@ trait Implicits: compareAlternatives(newCand, cand) > 0) else // keep only warnings that don't involve the failed candidate reference - priorityChangeWarnings.filterInPlace: (ref1, ref2, _) => - ref1 != cand.ref && ref2 != cand.ref + priorityChangeWarnings.filterInPlace: (critical, _) => + !critical.contains(cand.ref) rank(remaining, found, fail :: rfailures) case best: SearchSuccess => if (ctx.mode.is(Mode.ImplicitExploration) || isCoherent) @@ -1454,7 +1458,15 @@ trait Implicits: val newPending = if (retained eq found) || remaining.isEmpty then remaining else remaining.filterConserve(cand => - compareAlternatives(retained, cand) <= 0) + compareAlternatives(retained, cand, only2ndCritical = true) <= 0) + // Here we drop some pending alternatives but retain in each case + // `retained`. Therefore, it's a priorty change only if the + // second alternative appears in the final search result. Otherwise + // we have the following scenario: + // - 1st alternative, bit not snd appears in final result + // - Hence, snd was eliminated either here, or otherwise by a direct + // comparison later. + // - Hence, no change in resolution. rank(newPending, retained, rfailures) case fail: SearchFailure => // The ambiguity happened in the current search: to recover we @@ -1601,8 +1613,8 @@ trait Implicits: throw ex val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) - for (ref1, ref2, msg) <- priorityChangeWarnings do - if result.found.exists(ref => ref == ref1 || ref == ref2) then + for (critical, msg) <- priorityChangeWarnings do + if result.found.exists(critical.contains(_)) then report.warning(msg, srcPos) result end searchImplicit diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check new file mode 100644 index 000000000000..bf92efac17fd --- /dev/null +++ b/tests/neg/given-triangle.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/given-triangle.scala:14:18 ------------------------------------------------------------- +14 |@main def Test = f // error + | ^ + |Ambiguous given instances: both given instance given_B and given instance given_C match type A of parameter a of method f diff --git a/tests/warn/given-triangle.scala b/tests/neg/given-triangle.scala similarity index 73% rename from tests/warn/given-triangle.scala rename to tests/neg/given-triangle.scala index ee4888ed1e06..9cc23104fcce 100644 --- a/tests/warn/given-triangle.scala +++ b/tests/neg/given-triangle.scala @@ -1,5 +1,3 @@ -//> using options -source 3.6-migration - class A class B extends A class C extends A @@ -13,4 +11,4 @@ def f(using a: A, b: B, c: C) = println(b.getClass) println(c.getClass) -@main def Test = f // warn +@main def Test = f // error diff --git a/tests/warn/bson/Test.scala b/tests/pos/bson/Test.scala similarity index 100% rename from tests/warn/bson/Test.scala rename to tests/pos/bson/Test.scala diff --git a/tests/warn/bson/bson.scala b/tests/pos/bson/bson.scala similarity index 100% rename from tests/warn/bson/bson.scala rename to tests/pos/bson/bson.scala diff --git a/tests/pos/i20572.scala b/tests/pos/i20572.scala new file mode 100644 index 000000000000..4ee4490c839c --- /dev/null +++ b/tests/pos/i20572.scala @@ -0,0 +1,7 @@ +//> using options -Werror +trait Writes[T] +trait Format[T] extends Writes[T] +given [T: List]: Writes[T] = null +given [T]: Format[T] = null + +val _ = summon[Writes[Int]] diff --git a/tests/pos/i21036.scala b/tests/pos/i21036.scala new file mode 100644 index 000000000000..1c98346e4ef3 --- /dev/null +++ b/tests/pos/i21036.scala @@ -0,0 +1,16 @@ +//> using options -source 3.5 -Werror +trait SameRuntime[A, B] +trait BSONWriter[T] +trait BSONHandler[T] extends BSONWriter[T] + +opaque type Id = String +object Id: + given SameRuntime[Id, String] = ??? + +given BSONHandler[String] = ??? +given [T: BSONHandler]: BSONHandler[List[T]] = ??? + +given opaqueWriter[T, A](using rs: SameRuntime[T, A], writer: BSONWriter[A]): BSONWriter[T] = ??? + +val x = summon[BSONHandler[List[Id]]] // this doesn't emit warning +val y = summon[BSONWriter[List[Id]]] // this did emit warning diff --git a/tests/run/given-triangle.scala b/tests/run/given-triangle.scala index 5ddba8df8b7b..0b483e87f28c 100644 --- a/tests/run/given-triangle.scala +++ b/tests/run/given-triangle.scala @@ -1,4 +1,4 @@ -import language.future +import language.`3.6` class A class B extends A diff --git a/tests/warn/bson.check b/tests/warn/bson.check deleted file mode 100644 index 258ac4b4ff2c..000000000000 --- a/tests/warn/bson.check +++ /dev/null @@ -1,10 +0,0 @@ --- Warning: tests/warn/bson/Test.scala:5:60 ---------------------------------------------------------------------------- -5 |def typedMapHandler[K, V: BSONHandler] = stringMapHandler[V] // warn - | ^ - |Given search preference for bson.BSONWriter[Map[String, V]] between alternatives (bson.BSONWriter.mapWriter : [V²](using x$1: bson.BSONWriter[V²]): bson.BSONDocumentWriter[Map[String, V²]]) and (bson.BSONWriter.collectionWriter : - | [T, Repr <: Iterable[T]](using x$1: bson.BSONWriter[T], x$2: Repr ¬ Option[T]): bson.BSONWriter[Repr]) will change - |Current choice : the first alternative - |New choice from Scala 3.6: none - it's ambiguous - | - |where: V is a type in method typedMapHandler - | V² is a type variable diff --git a/tests/warn/given-triangle.check b/tests/warn/given-triangle.check deleted file mode 100644 index e849f9d4d642..000000000000 --- a/tests/warn/given-triangle.check +++ /dev/null @@ -1,6 +0,0 @@ --- Warning: tests/warn/given-triangle.scala:16:18 ---------------------------------------------------------------------- -16 |@main def Test = f // warn - | ^ - | Change in given search preference for A between alternatives (given_A : A) and (given_B : B) - | Previous choice : the second alternative - | New choice from Scala 3.6: the first alternative From 3e1ed72f299a6713da26a601d152dd26e671470f Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Jul 2024 16:21:22 +0200 Subject: [PATCH 434/465] Fix -source for neg test --- tests/neg/given-triangle.check | 4 ++-- tests/neg/given-triangle.scala | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check index bf92efac17fd..f548df0078de 100644 --- a/tests/neg/given-triangle.check +++ b/tests/neg/given-triangle.check @@ -1,4 +1,4 @@ --- [E172] Type Error: tests/neg/given-triangle.scala:14:18 ------------------------------------------------------------- -14 |@main def Test = f // error +-- [E172] Type Error: tests/neg/given-triangle.scala:15:18 ------------------------------------------------------------- +15 |@main def Test = f // error | ^ |Ambiguous given instances: both given instance given_B and given instance given_C match type A of parameter a of method f diff --git a/tests/neg/given-triangle.scala b/tests/neg/given-triangle.scala index 9cc23104fcce..61273ef93925 100644 --- a/tests/neg/given-triangle.scala +++ b/tests/neg/given-triangle.scala @@ -1,3 +1,4 @@ +//> using -source 3.5 class A class B extends A class C extends A From 450d233997354986dde7a627a237f3a13edcfb61 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Jul 2024 16:53:27 +0200 Subject: [PATCH 435/465] Filter out more false positives in priority change warnings --- .../dotty/tools/dotc/typer/Implicits.scala | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index f997ab52fa64..86be195fae43 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1318,16 +1318,14 @@ trait Implicits: * return new result with preferGeneral = true * 3.6 and higher: compare with preferGeneral = true * + * @param disambiguate The call is used to disambiguate two successes, not for ranking. + * When ranking, we are always filtering out either > 0 or <= 0 results. + * In each case a priority change from 0 to -1 or vice versa makes no difference. * @param only2ndCritical If true only the second alternative is critical in case * of a priority change. */ - def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, only2ndCritical: Boolean = false): Int = + def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, disambiguate: Boolean = false, only2ndCritical: Boolean = false): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) - def warn(msg: Message) = - val critical = - if only2ndCritical then alt2.ref :: Nil - else alt1.ref :: alt2.ref :: Nil - priorityChangeWarnings += ((critical, msg)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else @@ -1336,6 +1334,12 @@ trait Implicits: if sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) if cmp != prev then + def warn(msg: Message) = + if disambiguate || cmp > 0 || prev > 0 then + val critical = + if only2ndCritical then alt2.ref :: Nil + else alt1.ref :: alt2.ref :: Nil + priorityChangeWarnings += ((critical, msg)) def choice(c: Int) = c match case -1 => "the second alternative" case 1 => "the first alternative" @@ -1362,7 +1366,7 @@ trait Implicits: */ def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => - var diff = compareAlternatives(alt1, alt2) + var diff = compareAlternatives(alt1, alt2, disambiguate = true) assert(diff <= 0) // diff > 0 candidates should already have been eliminated in `rank` if diff == 0 && alt1.ref =:= alt2.ref then diff = 1 // See i12951 for a test where this happens @@ -1463,7 +1467,7 @@ trait Implicits: // `retained`. Therefore, it's a priorty change only if the // second alternative appears in the final search result. Otherwise // we have the following scenario: - // - 1st alternative, bit not snd appears in final result + // - 1st alternative, but not snd appears in final result // - Hence, snd was eliminated either here, or otherwise by a direct // comparison later. // - Hence, no change in resolution. From acffad65f474deab08cf32b90e99fd87e3d2c18c Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 8 Jul 2024 19:36:27 +0200 Subject: [PATCH 436/465] Fix priority change logic for ranking As worked out in collaboration with @EugeneFlesselle --- .../dotty/tools/dotc/typer/Implicits.scala | 37 ++++++++----------- tests/warn/i21036a.check | 6 +++ tests/warn/i21036a.scala | 7 ++++ tests/warn/i21036b.check | 6 +++ tests/warn/i21036b.scala | 7 ++++ 5 files changed, 41 insertions(+), 22 deletions(-) create mode 100644 tests/warn/i21036a.check create mode 100644 tests/warn/i21036a.scala create mode 100644 tests/warn/i21036b.check create mode 100644 tests/warn/i21036b.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 86be195fae43..45c8731c553e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1305,6 +1305,9 @@ trait Implicits: // message if one of the critical candidates is part of the result of the implicit search. val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() + def isWarnPriorityChangeVersion(sv: SourceVersion): Boolean = + sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` + /** Compare `alt1` with `alt2` to determine which one should be chosen. * * @return a number > 0 if `alt1` is preferred over `alt2` @@ -1321,25 +1324,21 @@ trait Implicits: * @param disambiguate The call is used to disambiguate two successes, not for ranking. * When ranking, we are always filtering out either > 0 or <= 0 results. * In each case a priority change from 0 to -1 or vice versa makes no difference. - * @param only2ndCritical If true only the second alternative is critical in case - * of a priority change. */ - def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, disambiguate: Boolean = false, only2ndCritical: Boolean = false): Int = + def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, disambiguate: Boolean = false): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else var cmp = comp(using searchContext()) val sv = Feature.sourceVersion - if sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` then + if isWarnPriorityChangeVersion(sv) then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) - if cmp != prev then + if disambiguate && cmp != prev then def warn(msg: Message) = - if disambiguate || cmp > 0 || prev > 0 then - val critical = - if only2ndCritical then alt2.ref :: Nil - else alt1.ref :: alt2.ref :: Nil - priorityChangeWarnings += ((critical, msg)) + val critical = alt1.ref :: alt2.ref :: Nil + priorityChangeWarnings += ((critical, msg)) + implicits.println(i"PRIORITY CHANGE ${alt1.ref}, ${alt2.ref}, $disambiguate") def choice(c: Int) = c match case -1 => "the second alternative" case 1 => "the first alternative" @@ -1356,7 +1355,9 @@ trait Implicits: |Previous choice : ${choice(prev)} |New choice from Scala 3.6: ${choice(cmp)}""") cmp - else cmp + else cmp max prev + // When ranking, we keep the better of cmp and prev, which ends up retaining a candidate + // if it is retained in either version. else cmp end compareAlternatives @@ -1367,7 +1368,8 @@ trait Implicits: def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => var diff = compareAlternatives(alt1, alt2, disambiguate = true) - assert(diff <= 0) // diff > 0 candidates should already have been eliminated in `rank` + assert(diff <= 0 || isWarnPriorityChangeVersion(Feature.sourceVersion)) + // diff > 0 candidates should already have been eliminated in `rank` if diff == 0 && alt1.ref =:= alt2.ref then diff = 1 // See i12951 for a test where this happens else if diff == 0 && alt2.isExtension then @@ -1461,16 +1463,7 @@ trait Implicits: case retained: SearchSuccess => val newPending = if (retained eq found) || remaining.isEmpty then remaining - else remaining.filterConserve(cand => - compareAlternatives(retained, cand, only2ndCritical = true) <= 0) - // Here we drop some pending alternatives but retain in each case - // `retained`. Therefore, it's a priorty change only if the - // second alternative appears in the final search result. Otherwise - // we have the following scenario: - // - 1st alternative, but not snd appears in final result - // - Hence, snd was eliminated either here, or otherwise by a direct - // comparison later. - // - Hence, no change in resolution. + else remaining.filterConserve(newCand => compareAlternatives(newCand, retained) >= 0) rank(newPending, retained, rfailures) case fail: SearchFailure => // The ambiguity happened in the current search: to recover we diff --git a/tests/warn/i21036a.check b/tests/warn/i21036a.check new file mode 100644 index 000000000000..673c01374ef3 --- /dev/null +++ b/tests/warn/i21036a.check @@ -0,0 +1,6 @@ +-- Warning: tests/warn/i21036a.scala:7:17 ------------------------------------------------------------------------------ +7 |val y = summon[A] // warn + | ^ + | Given search preference for A between alternatives (b : B) and (a : A) will change + | Current choice : the first alternative + | New choice from Scala 3.6: the second alternative diff --git a/tests/warn/i21036a.scala b/tests/warn/i21036a.scala new file mode 100644 index 000000000000..ab97429852d6 --- /dev/null +++ b/tests/warn/i21036a.scala @@ -0,0 +1,7 @@ +//> using options -source 3.5 +trait A +trait B extends A +given b: B = ??? +given a: A = ??? + +val y = summon[A] // warn \ No newline at end of file diff --git a/tests/warn/i21036b.check b/tests/warn/i21036b.check new file mode 100644 index 000000000000..ff7fdfd7a87c --- /dev/null +++ b/tests/warn/i21036b.check @@ -0,0 +1,6 @@ +-- Warning: tests/warn/i21036b.scala:7:17 ------------------------------------------------------------------------------ +7 |val y = summon[A] // warn + | ^ + | Change in given search preference for A between alternatives (b : B) and (a : A) + | Previous choice : the first alternative + | New choice from Scala 3.6: the second alternative diff --git a/tests/warn/i21036b.scala b/tests/warn/i21036b.scala new file mode 100644 index 000000000000..16dd72266613 --- /dev/null +++ b/tests/warn/i21036b.scala @@ -0,0 +1,7 @@ +//> using options -source 3.6-migration +trait A +trait B extends A +given b: B = ??? +given a: A = ??? + +val y = summon[A] // warn \ No newline at end of file From dc9246aa12bd317fb678eabdf6c6c4df859ecf83 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Jul 2024 18:22:45 +0200 Subject: [PATCH 437/465] Fix -source for neg test (2) --- tests/neg/given-triangle.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/neg/given-triangle.scala b/tests/neg/given-triangle.scala index 61273ef93925..16aca7c44dee 100644 --- a/tests/neg/given-triangle.scala +++ b/tests/neg/given-triangle.scala @@ -1,4 +1,4 @@ -//> using -source 3.5 +//> using options -source 3.5 class A class B extends A class C extends A From 22d9df094ca5d385289d11225ac6183900f20bdd Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 8 Jul 2024 15:22:57 +0200 Subject: [PATCH 438/465] Use pathing jars in cli commands --- dist/bin/common | 20 ++------------------ dist/bin/common.bat | 2 +- dist/bin/scalac.bat | 31 ++----------------------------- dist/bin/scaladoc | 11 ++--------- dist/bin/scaladoc.bat | 27 +-------------------------- project/Build.scala | 2 +- project/RepublishPlugin.scala | 14 +++++++++++--- 7 files changed, 20 insertions(+), 87 deletions(-) diff --git a/dist/bin/common b/dist/bin/common index 63e598d70d7e..2de8bdf9f99a 100644 --- a/dist/bin/common +++ b/dist/bin/common @@ -6,25 +6,9 @@ source "$PROG_HOME/bin/common-shared" # * The code below is for Dotty # *-------------------------------------------------*/ -load_classpath () { - command="$1" - psep_pattern="$2" - __CLASS_PATH="" - while IFS= read -r line || [ -n "$line" ]; do - # jna-5 only appropriate for some combinations - if ! [[ ( -n ${conemu-} || -n ${msys-}) && "$line" == "*jna-5*" ]]; then - if [ -n "$__CLASS_PATH" ]; then - __CLASS_PATH+="$psep_pattern" - fi - __CLASS_PATH+="$PROG_HOME/maven2/$line" - fi - done < "$PROG_HOME/etc/$command.classpath" - echo "$__CLASS_PATH" -} - compilerJavaClasspathArgs () { - toolchain="$(load_classpath "scala" "$PSEP")" - toolchain_extra="$(load_classpath "with_compiler" "$PSEP")" + toolchain="$PROG_HOME/lib/scala.jar" + toolchain_extra="$PROG_HOME/lib/with_compiler.jar" if [ -n "$toolchain_extra" ]; then toolchain+="$PSEP$toolchain_extra" diff --git a/dist/bin/common.bat b/dist/bin/common.bat index f9c35e432b36..510771d43b6e 100644 --- a/dist/bin/common.bat +++ b/dist/bin/common.bat @@ -38,6 +38,6 @@ if not defined _PROG_HOME ( set _EXITCODE=1 goto :eof ) -set "_ETC_DIR=%_PROG_HOME%\etc" +set "_LIB_DIR=%_PROG_HOME%\lib" set _PSEP=; diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index dbcbaf11b8e2..7ad368582127 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -88,17 +88,8 @@ goto :eof @rem output parameter: _JVM_CP_ARGS :compilerJavaClasspathArgs - -set "CP_FILE=%_ETC_DIR%\scala.classpath" -call :loadClasspathFromFile %CP_FILE% -set "__TOOLCHAIN=%_CLASS_PATH_RESULT%" - -set "CP_FILE=%_ETC_DIR%\with_compiler.classpath" -call :loadClasspathFromFile %CP_FILE% - -if defined _CLASS_PATH_RESULT ( - set "__TOOLCHAIN=%__TOOLCHAIN%%_PSEP%%_CLASS_PATH_RESULT%" -) +set "__TOOLCHAIN=%_LIB_DIR%\scala.jar" +set "__TOOLCHAIN=%__TOOLCHAIN%%_PSEP%%_LIB_DIR%\with_compiler.jar%" if defined _SCALA_CPATH ( set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" @@ -107,24 +98,6 @@ if defined _SCALA_CPATH ( ) goto :eof -@REM concatentate every line in "%_ARG_FILE%" with _PSEP -@REM arg 1 - file to read -:loadClasspathFromFile -set _ARG_FILE=%1 -set _CLASS_PATH_RESULT= -if exist "%_ARG_FILE%" ( - for /f "usebackq delims=" %%i in ("%_ARG_FILE%") do ( - set "_LIB=%_PROG_HOME%\maven2\%%i" - set "_LIB=!_LIB:/=\!" - if not defined _CLASS_PATH_RESULT ( - set "_CLASS_PATH_RESULT=!_LIB!" - ) else ( - set "_CLASS_PATH_RESULT=!_CLASS_PATH_RESULT!%_PSEP%!_LIB!" - ) - ) -) -goto :eof - @rem ######################################################################### @rem ## Cleanups diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index 15bc0813f93a..f4ef37af00ee 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -36,6 +36,7 @@ CompilerMain=dotty.tools.dotc.Main DecompilerMain=dotty.tools.dotc.decompiler.Main ReplMain=dotty.tools.repl.Main ScriptingMain=dotty.tools.scripting.Main +JVM_CP_ARGS="$PROG_HOME/lib/scaladoc.jar" PROG_NAME=$CompilerMain @@ -52,12 +53,6 @@ addScrip() { script_args+=("'$1'") } -classpathArgs () { - CLASS_PATH="$(load_classpath "scaladoc" "$PSEP")" - - jvm_cp_args="-classpath \"$CLASS_PATH\"" -} - #for A in "$@" ; do echo "A[$A]" ; done ; exit 2 while [[ $# -gt 0 ]]; do @@ -79,12 +74,10 @@ case "$1" in esac done -classpathArgs - eval "\"$JAVACMD\"" \ ${JAVA_OPTS:-$default_java_opts} \ "${java_args[@]}" \ - "${jvm_cp_args-}" \ + -classpath "${JVM_CP_ARGS}" \ -Dscala.usejavacp=true \ "dotty.tools.scaladoc.Main" \ "${scala_args[@]}" \ diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index 16433a83f501..fe4055633e02 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -21,8 +21,6 @@ call :args %* @rem ######################################################################### @rem ## Main -call :classpathArgs - if defined JAVA_OPTS ( set _JAVA_OPTS=%JAVA_OPTS% ) else ( set _JAVA_OPTS=%_DEFAULT_JAVA_OPTS% ) @@ -31,7 +29,7 @@ if defined JAVA_OPTS ( set _JAVA_OPTS=%JAVA_OPTS% set "_JAVACMD=!_JAVACMD:%%=%%%%!" call "%_JAVACMD%" %_JAVA_OPTS% %_JAVA_DEBUG% %_JAVA_ARGS% ^ --classpath "%_CLASS_PATH%" ^ +-classpath "%_LIB_DIR%\scaladoc.jar" ^ -Dscala.usejavacp=true ^ dotty.tools.scaladoc.Main %_SCALA_ARGS% %_RESIDUAL_ARGS% if not %ERRORLEVEL%==0 ( @@ -103,29 +101,6 @@ goto :eof set _RESIDUAL_ARGS=%_RESIDUAL_ARGS% %~1 goto :eof -@rem output parameter: _CLASS_PATH -:classpathArgs -set "_ETC_DIR=%_PROG_HOME%\etc" -@rem keep list in sync with bash script `bin\scaladoc` ! -call :loadClasspathFromFile -goto :eof - -@REM concatentate every line in "%_ETC_DIR%\scaladoc.classpath" with _PSEP -:loadClasspathFromFile -set _CLASS_PATH= -if exist "%_ETC_DIR%\scaladoc.classpath" ( - for /f "usebackq delims=" %%i in ("%_ETC_DIR%\scaladoc.classpath") do ( - set "_LIB=%_PROG_HOME%\maven2\%%i" - set "_LIB=!_LIB:/=\!" - if not defined _CLASS_PATH ( - set "_CLASS_PATH=!_LIB!" - ) else ( - set "_CLASS_PATH=!_CLASS_PATH!%_PSEP%!_LIB!" - ) - ) -) -goto :eof - @rem ######################################################################### @rem ## Cleanups diff --git a/project/Build.scala b/project/Build.scala index f994ae74cb95..7a3154477f21 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2131,7 +2131,7 @@ object Build { republishRepo := target.value / "republish", packResourceDir += (republishRepo.value / "bin" -> "bin"), packResourceDir += (republishRepo.value / "maven2" -> "maven2"), - packResourceDir += (republishRepo.value / "etc" -> "etc"), + packResourceDir += (republishRepo.value / "lib" -> "lib"), republishCommandLibs += ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), republishCommandLibs += diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index e4bf40545a6b..8b95c6423e68 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -213,16 +213,24 @@ object RepublishPlugin extends AutoPlugin { val classpaths = coursierFetch(coursierJar, log, csrCacheDir, localRepo, resolvedLocal.map(_.id.toString)) if (commandLibs.nonEmpty) { - IO.createDirectory(republishDir / "etc") + IO.createDirectory(republishDir / "lib") for ((command, libs) <- commandLibs) { val (negated, actual) = libs.partition(_.startsWith("^!")) val subtractions = negated.map(_.stripPrefix("^!")) def compose(libs: List[String]): List[String] = libs.map(fuzzyFind(classpaths, _)).reduceOption(_ ++ _).map(_.distinct).getOrElse(Nil) - + + // Compute the classpath entries val entries = compose(actual).diff(compose(subtractions)) - IO.write(republishDir / "etc" / s"$command.classpath", entries.mkString("\n")) + // Generate the MANIFEST for the pathing jar + val manifest = new java.util.jar.Manifest(); + manifest.getMainAttributes().put(java.util.jar.Attributes.Name.MANIFEST_VERSION, "1.0"); + manifest.getMainAttributes().put(java.util.jar.Attributes.Name.CLASS_PATH, entries.map(e => s"../maven2/$e").mkString(" ")) + // Write the pathing jar to the Disk + val file = republishDir / "lib" / s"$command.jar" + val jar = new java.util.jar.JarOutputStream(new java.io.FileOutputStream(file), manifest) + jar.close() } } From 1910ea91d0dc8af5047f5a845ce09476cfa86183 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 8 Jul 2024 19:18:51 +0200 Subject: [PATCH 439/465] Add support for Class-Path entries in Manifest --- .../dotc/classpath/ClassPathFactory.scala | 21 +++++++++++++++++-- compiler/src/dotty/tools/io/ClassPath.scala | 13 ++++++++---- dist/bin/scalac | 1 + dist/bin/scalac.bat | 2 +- dist/bin/scaladoc | 1 + dist/bin/scaladoc.bat | 1 + project/RepublishPlugin.scala | 2 +- 7 files changed, 33 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala index 0b66f339bf53..080f8d4e63d2 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala @@ -7,6 +7,7 @@ import dotty.tools.io.{AbstractFile, VirtualDirectory} import FileUtils.* import dotty.tools.io.ClassPath import dotty.tools.dotc.core.Contexts.* +import java.nio.file.Files /** * Provides factory methods for classpath. When creating classpath instances for a given path, @@ -52,14 +53,30 @@ class ClassPathFactory { // Internal protected def classesInPathImpl(path: String, expand: Boolean)(using Context): List[ClassPath] = - for { + val files = for { file <- expandPath(path, expand) dir <- { def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None Option(AbstractFile.getDirectory(file)).orElse(asImage) } } - yield newClassPath(dir) + yield dir + + val expanded = + if scala.util.Properties.propOrFalse("scala.expandjavacp") then + for + file <- files + a <- ClassPath.expandManifestPath(file.absolutePath) + path = java.nio.file.Paths.get(a.toURI()).nn + if Files.exists(path) + yield + newClassPath(AbstractFile.getFile(path)) + else + Seq.empty + + files.map(newClassPath) ++ expanded + + end classesInPathImpl private def createSourcePath(file: AbstractFile)(using Context): ClassPath = if (file.isJarOrZip) diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index f77bc1efca91..01a3f2cc1870 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -152,13 +152,18 @@ object ClassPath { val baseDir = file.parent new Jar(file).classPathElements map (elem => - specToURL(elem) getOrElse (baseDir / elem).toURL + specToURL(elem, baseDir) getOrElse (baseDir / elem).toURL ) } - def specToURL(spec: String): Option[URL] = - try Some(new URI(spec).toURL) - catch case _: MalformedURLException | _: URISyntaxException => None + def specToURL(spec: String, basedir: Directory): Option[URL] = + try + val uri = new URI(spec) + if uri.isAbsolute() then Some(uri.toURL()) + else + Some(basedir.resolve(Path(spec)).toURL) + catch + case _: MalformedURLException | _: URISyntaxException => None def manifests: List[java.net.URL] = { import scala.jdk.CollectionConverters.EnumerationHasAsScala diff --git a/dist/bin/scalac b/dist/bin/scalac index d9bd21ca425b..a527d9767749 100755 --- a/dist/bin/scalac +++ b/dist/bin/scalac @@ -86,6 +86,7 @@ eval "\"$JAVACMD\"" \ ${JAVA_OPTS:-$default_java_opts} \ "${java_args[@]}" \ "-classpath \"$jvm_cp_args\"" \ + "-Dscala.expandjavacp=true" \ "-Dscala.usejavacp=true" \ "-Dscala.home=\"$PROG_HOME\"" \ "dotty.tools.MainGenericCompiler" \ diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index 7ad368582127..e2898bdc2890 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -24,7 +24,7 @@ call :compilerJavaClasspathArgs @rem we need to escape % in the java command path, for some reason this doesnt work in common.bat set "_JAVACMD=!_JAVACMD:%%=%%%%!" -call "%_JAVACMD%" %_JAVA_ARGS% -classpath "%_JVM_CP_ARGS%" "-Dscala.usejavacp=true" "-Dscala.home=%_PROG_HOME%" dotty.tools.MainGenericCompiler %_SCALA_ARGS% +call "%_JAVACMD%" %_JAVA_ARGS% -classpath "%_JVM_CP_ARGS%" "-Dscala.usejavacp=true" "-Dscala.expandjavacp=true" "-Dscala.home=%_PROG_HOME%" dotty.tools.MainGenericCompiler %_SCALA_ARGS% if not %ERRORLEVEL%==0 ( set _EXITCODE=1 goto end diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index f4ef37af00ee..0af5a2b55acb 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -78,6 +78,7 @@ eval "\"$JAVACMD\"" \ ${JAVA_OPTS:-$default_java_opts} \ "${java_args[@]}" \ -classpath "${JVM_CP_ARGS}" \ + -Dscala.expandjavacp=true \ -Dscala.usejavacp=true \ "dotty.tools.scaladoc.Main" \ "${scala_args[@]}" \ diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index fe4055633e02..b9e4820b006d 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -30,6 +30,7 @@ set "_JAVACMD=!_JAVACMD:%%=%%%%!" call "%_JAVACMD%" %_JAVA_OPTS% %_JAVA_DEBUG% %_JAVA_ARGS% ^ -classpath "%_LIB_DIR%\scaladoc.jar" ^ +-Dscala.expandjavacp=true ^ -Dscala.usejavacp=true ^ dotty.tools.scaladoc.Main %_SCALA_ARGS% %_RESIDUAL_ARGS% if not %ERRORLEVEL%==0 ( diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 8b95c6423e68..5611af798b33 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -220,7 +220,7 @@ object RepublishPlugin extends AutoPlugin { def compose(libs: List[String]): List[String] = libs.map(fuzzyFind(classpaths, _)).reduceOption(_ ++ _).map(_.distinct).getOrElse(Nil) - + // Compute the classpath entries val entries = compose(actual).diff(compose(subtractions)) // Generate the MANIFEST for the pathing jar From 1a1a77fcf925a82e098a854e0668b3f75eef048c Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 10 Jul 2024 22:10:51 +0200 Subject: [PATCH 440/465] expand classpath in scala_legacy --- dist/bin/scala_legacy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dist/bin/scala_legacy b/dist/bin/scala_legacy index bd69d40c2b97..18fc6d874e34 100755 --- a/dist/bin/scala_legacy +++ b/dist/bin/scala_legacy @@ -65,7 +65,7 @@ done # exec here would prevent onExit from being called, leaving terminal in unusable state compilerJavaClasspathArgs [ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 -eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" +eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "-Dscala.expandjavacp=true" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" scala_exit_status=$? From fad86e392dc45f72c326aa57461d272d95dd59f9 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 11 Jul 2024 10:23:09 +0200 Subject: [PATCH 441/465] Add changelog for 3.5.0-RC4 --- changelogs/3.5.0-RC4.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 changelogs/3.5.0-RC4.md diff --git a/changelogs/3.5.0-RC4.md b/changelogs/3.5.0-RC4.md new file mode 100644 index 000000000000..75e72870d6f4 --- /dev/null +++ b/changelogs/3.5.0-RC4.md @@ -0,0 +1,19 @@ +# Backported fixes + +- Refine implicit priority change warnings [#21045](https://github.com/scala/scala3/pull/21045) +- Use pathing jars in cli commands [#21121](https://github.com/scala/scala3/pull/21121) +- expand classpath of pathing jars in scala_legacy command [#21160](https://github.com/scala/scala3/pull/21160) +- Fix symbol reference retrivial of `scala.caps.Caps` [#20493](https://github.com/scala/scala3/pull/20493) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC3..3.5.0-RC4` these are: + +``` + 5 Martin Odersky + 3 Wojciech Mazur + 2 Hamza REMMAL + 1 Jamie Thompson +``` From 97fc22c3331a8cd1aca0cd563e99328815a2a9e6 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 11 Jul 2024 10:24:08 +0200 Subject: [PATCH 442/465] Release 3.5.0-RC4 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 7a3154477f21..45402aebc9c4 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,7 @@ object Build { val referenceVersion = "3.4.2" - val baseVersion = "3.5.0-RC3" + val baseVersion = "3.5.0-RC4" // LTS or Next val versionLine = "Next" From a5514c58c830a79fc8e7c62f8a18299bf3fe119e Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 12 Jul 2024 17:44:11 +0200 Subject: [PATCH 443/465] emit generatedNonLocalClass in backend when callback is not enabled --- compiler/src/dotty/tools/backend/jvm/CodeGen.scala | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index 2286ad6c2c25..c5b0ec0929b8 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -133,8 +133,15 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( if (ctx.compilerCallback != null) ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - if isLocal then - ctx.withIncCallback(_.generatedLocalClass(sourceFile, clsFile.jpath)) + ctx.withIncCallback: cb => + if isLocal then + cb.generatedLocalClass(sourceFile, clsFile.jpath) + else if !cb.enabled() then + // callback is not enabled, so nonLocalClasses were not reported in ExtractAPI + val fullClassName = atPhase(sbtExtractDependenciesPhase) { + ExtractDependencies.classNameAsString(claszSymbol) + } + cb.generatedNonLocalClass(sourceFile, clsFile.jpath, className, fullClassName) } } From 73428165d3c525e3ea3aca039cf1a75eeea99c76 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 16 Jul 2024 19:46:43 +0200 Subject: [PATCH 444/465] add test to assert classes are still reported --- .../xsbt/ExtractUsedNamesSpecification.scala | 5 +- .../test/xsbt/ProductsSpecification.scala | 41 ++++++++ .../xsbt/ScalaCompilerForUnitTesting.scala | 95 +++++++++++-------- sbt-bridge/test/xsbti/TestCallback.scala | 4 + 4 files changed, 103 insertions(+), 42 deletions(-) create mode 100644 sbt-bridge/test/xsbt/ProductsSpecification.scala diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala index e47371175de6..0abefe2985c3 100644 --- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala @@ -1,7 +1,6 @@ package xsbt import xsbti.UseScope -import ScalaCompilerForUnitTesting.Callbacks import org.junit.{ Test, Ignore } import org.junit.Assert._ @@ -227,9 +226,9 @@ class ExtractUsedNamesSpecification { def findPatMatUsages(in: String): Set[String] = { val compilerForTesting = new ScalaCompilerForUnitTesting - val (_, Callbacks(callback, _)) = + val output = compilerForTesting.compileSrcs(List(List(sealedClass, in))) - val clientNames = callback.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) + val clientNames = output.analysis.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) val names: Set[String] = clientNames.flatMap { case (_, usages) => diff --git a/sbt-bridge/test/xsbt/ProductsSpecification.scala b/sbt-bridge/test/xsbt/ProductsSpecification.scala new file mode 100644 index 000000000000..adee351b5289 --- /dev/null +++ b/sbt-bridge/test/xsbt/ProductsSpecification.scala @@ -0,0 +1,41 @@ +package xsbt + +import org.junit.Assert.* +import org.junit.Ignore +import org.junit.Test + +import java.io.File +import java.nio.file.Path +import java.nio.file.Paths + +class ProductsSpecification { + + @Test + def extractNonLocalClassesNoInc = { + val src = + """package example + | + |class A { + | class B + | def foo = + | class C + |}""".stripMargin + val output = compiler.compileSrcsNoInc(src) + val srcFile = output.srcFiles.head + val (srcNames, binaryNames) = output.analysis.classNames(srcFile).unzip // non local class names + + assertFalse(output.analysis.enabled()) // inc phases are disabled + assertTrue(output.analysis.apis.isEmpty) // extract-api did not run + assertTrue(output.analysis.usedNamesAndScopes.isEmpty) // extract-dependencies did not run + + // note that local class C is not included, classNames only records non local classes + val expectedBinary = Set("example.A", "example.A$B") + assertEquals(expectedBinary, binaryNames.toSet) + + // note that local class C is not included, classNames only records non local classes + val expectedSrc = Set("example.A", "example.A.B") + assertEquals(expectedSrc, srcNames.toSet) + } + + private def compiler = new ScalaCompilerForUnitTesting +} diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index f17be692ee50..a5a969ee48b9 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -3,6 +3,7 @@ package xsbt import xsbti.compile.{CompileProgress, SingleOutput} import java.io.File +import java.nio.file.Path import xsbti._ import sbt.io.IO import xsbti.api.{ ClassLike, Def, DependencyContext } @@ -15,6 +16,8 @@ import dotty.tools.xsbt.CompilerBridge import TestCallback.ExtractedClassDependencies import ScalaCompilerForUnitTesting.Callbacks +case class CompileOutput(srcFiles: Seq[VirtualFileRef], classesOutput: Path, analysis: TestCallback, progress: TestCompileProgress) + object ScalaCompilerForUnitTesting: case class Callbacks(analysis: TestCallback, progress: TestCompileProgress) @@ -25,29 +28,24 @@ object ScalaCompilerForUnitTesting: class ScalaCompilerForUnitTesting { def extractEnteredPhases(srcs: String*): Seq[List[String]] = { - val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(srcs*) - val run = testProgress.runs.head - tempSrcFiles.map(src => run.unitPhases(src.id)) + val output = compileSrcs(srcs*) + val run = output.progress.runs.head + output.srcFiles.map(src => run.unitPhases(src.id)) } - def extractTotal(srcs: String*)(extraSourcePath: String*): Int = { - val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(List(srcs.toList), extraSourcePath.toList) - val run = testProgress.runs.head - run.total - } + def extractTotal(srcs: String*)(extraSourcePath: String*): Int = + compileSrcs(List(srcs.toList), extraSourcePath.toList).progress.runs.head.total - def extractProgressPhases(srcs: String*): List[String] = { - val (_, Callbacks(_, testProgress)) = compileSrcs(srcs*) - testProgress.runs.head.phases - } + def extractProgressPhases(srcs: String*): List[String] = + compileSrcs(srcs*).progress.runs.head.phases /** * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ def extractApiFromSrc(src: String): Seq[ClassLike] = { - val (Seq(tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(src) - analysisCallback.apis(tempSrcFile) + val output = compileSrcs(src) + output.analysis.apis(output.srcFiles.head) } /** @@ -55,8 +53,8 @@ class ScalaCompilerForUnitTesting { * extracted by ExtractAPI class. */ def extractApisFromSrcs(srcs: List[String]*): Seq[Seq[ClassLike]] = { - val (tempSrcFiles, Callbacks(analysisCallback, _)) = compileSrcs(srcs.toList) - tempSrcFiles.map(analysisCallback.apis) + val output = compileSrcs(srcs.toList) + output.srcFiles.map(output.analysis.apis) } /** @@ -73,15 +71,16 @@ class ScalaCompilerForUnitTesting { assertDefaultScope: Boolean = true ): Map[String, Set[String]] = { // we drop temp src file corresponding to the definition src file - val (Seq(_, tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(definitionSrc, actualSrc) + val output = compileSrcs(definitionSrc, actualSrc) + val analysis = output.analysis if (assertDefaultScope) for { - (className, used) <- analysisCallback.usedNamesAndScopes - analysisCallback.TestUsedName(name, scopes) <- used + (className, used) <- analysis.usedNamesAndScopes + analysis.TestUsedName(name, scopes) <- used } assert(scopes.size() == 1 && scopes.contains(UseScope.Default), s"$className uses $name in $scopes") - val classesInActualSrc = analysisCallback.classNames(tempSrcFile).map(_._1) - classesInActualSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + val classesInActualSrc = analysis.classNames(output.srcFiles.head).map(_._1) + classesInActualSrc.map(className => className -> analysis.usedNames(className)).toMap } /** @@ -91,11 +90,11 @@ class ScalaCompilerForUnitTesting { * Only the names used in the last src file are returned. */ def extractUsedNamesFromSrc(sources: String*): Map[String, Set[String]] = { - val (srcFiles, Callbacks(analysisCallback, _)) = compileSrcs(sources*) - srcFiles + val output = compileSrcs(sources*) + output.srcFiles .map { srcFile => - val classesInSrc = analysisCallback.classNames(srcFile).map(_._1) - classesInSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + val classesInSrc = output.analysis.classNames(srcFile).map(_._1) + classesInSrc.map(className => className -> output.analysis.usedNames(className)).toMap } .reduce(_ ++ _) } @@ -113,15 +112,15 @@ class ScalaCompilerForUnitTesting { * file system-independent way of testing dependencies between source code "files". */ def extractDependenciesFromSrcs(srcs: List[List[String]]): ExtractedClassDependencies = { - val (_, Callbacks(testCallback, _)) = compileSrcs(srcs) + val analysis = compileSrcs(srcs).analysis - val memberRefDeps = testCallback.classDependencies collect { + val memberRefDeps = analysis.classDependencies collect { case (target, src, DependencyByMemberRef) => (src, target) } - val inheritanceDeps = testCallback.classDependencies collect { + val inheritanceDeps = analysis.classDependencies collect { case (target, src, DependencyByInheritance) => (src, target) } - val localInheritanceDeps = testCallback.classDependencies collect { + val localInheritanceDeps = analysis.classDependencies collect { case (target, src, LocalDependencyByInheritance) => (src, target) } ExtractedClassDependencies.fromPairs(memberRefDeps, inheritanceDeps, localInheritanceDeps) @@ -142,12 +141,24 @@ class ScalaCompilerForUnitTesting { * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil): (Seq[VirtualFile], Callbacks) = { + def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil, compileToJar: Boolean = false, incEnabled: Boolean = true): CompileOutput = { val temp = IO.createTemporaryDirectory - val analysisCallback = new TestCallback + val (forceSbtArgs, analysisCallback) = + if (incEnabled) + (Seq("-Yforce-sbt-phases"), new TestCallback) + else + (Seq.empty, new TestCallbackNoInc) val testProgress = new TestCompileProgress - val classesDir = new File(temp, "classes") - classesDir.mkdir() + val classesOutput = + if (compileToJar) { + val jar = new File(temp, "classes.jar") + jar.createNewFile() + jar + } else { + val dir = new File(temp, "classes") + dir.mkdir() + dir + } val bridge = new CompilerBridge @@ -164,16 +175,16 @@ class ScalaCompilerForUnitTesting { } val virtualSrcFiles = srcFiles.toArray - val classesDirPath = classesDir.getAbsolutePath.toString + val classesOutputPath = classesOutput.getAbsolutePath() val output = new SingleOutput: - def getOutputDirectory() = classesDir + def getOutputDirectory() = classesOutput val maybeSourcePath = if extraFiles.isEmpty then Nil else List("-sourcepath", temp.getAbsolutePath.toString) bridge.run( virtualSrcFiles, new TestDependencyChanges, - Array("-Yforce-sbt-phases", "-classpath", classesDirPath, "-usejavacp", "-d", classesDirPath) ++ maybeSourcePath, + (forceSbtArgs ++: Array("-classpath", classesOutputPath, "-usejavacp", "-d", classesOutputPath)) ++ maybeSourcePath, output, analysisCallback, new TestReporter, @@ -185,17 +196,23 @@ class ScalaCompilerForUnitTesting { srcFiles } - (files.flatten.toSeq, Callbacks(analysisCallback, testProgress)) + CompileOutput(files.flatten.toSeq, classesOutput.toPath, analysisCallback, testProgress) } - def compileSrcs(srcs: String*): (Seq[VirtualFile], Callbacks) = { + def compileSrcs(srcs: String*): CompileOutput = { compileSrcs(List(srcs.toList)) } + def compileSrcsNoInc(srcs: String*): CompileOutput = { + compileSrcs(List(srcs.toList), incEnabled = false) + } + + def compileSrcsToJar(srcs: String*): CompileOutput = + compileSrcs(List(srcs.toList), compileToJar = true) + private def prepareSrcFile(baseDir: File, fileName: String, src: String): VirtualFile = { val srcFile = new File(baseDir, fileName) IO.write(srcFile, src) new TestVirtualFile(srcFile.toPath) } } - diff --git a/sbt-bridge/test/xsbti/TestCallback.scala b/sbt-bridge/test/xsbti/TestCallback.scala index 3398590b169a..9f6df75d84f0 100644 --- a/sbt-bridge/test/xsbti/TestCallback.scala +++ b/sbt-bridge/test/xsbti/TestCallback.scala @@ -11,6 +11,10 @@ import DependencyContext._ import java.{util => ju} import ju.Optional +class TestCallbackNoInc extends TestCallback { + override def enabled(): Boolean = false +} + class TestCallback extends AnalysisCallback2 { case class TestUsedName(name: String, scopes: ju.EnumSet[UseScope]) From 1e20d47d09a3b8e2f0e045a3428d89170632da35 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 17 Jul 2024 14:33:14 +0200 Subject: [PATCH 445/465] Add changelog for 3.5.0-RC5 --- changelogs/3.5.0-RC5.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 changelogs/3.5.0-RC5.md diff --git a/changelogs/3.5.0-RC5.md b/changelogs/3.5.0-RC5.md new file mode 100644 index 000000000000..405396223eb7 --- /dev/null +++ b/changelogs/3.5.0-RC5.md @@ -0,0 +1,14 @@ +# Backported fixes + +- emit generatedNonLocalClass in backend when callback is not enabled [#21186](https://github.com/scala/scala3/pull/21186) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC4..3.5.0-RC5` these are: + +``` + 2 Jamie Thompson + 2 Wojciech Mazur +``` From 8e6b582e17c428452eabbaa649695a07c1f541cc Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 17 Jul 2024 14:33:49 +0200 Subject: [PATCH 446/465] Release 3.5.0-RC5 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 45402aebc9c4..cbf1b354b073 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,7 @@ object Build { val referenceVersion = "3.4.2" - val baseVersion = "3.5.0-RC4" + val baseVersion = "3.5.0-RC5" // LTS or Next val versionLine = "Next" From 318054e614f02d2d70ac3e3ec6bf6c99db39b4ba Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 24 Jul 2024 17:58:29 +0200 Subject: [PATCH 447/465] Revert "Approximate MatchTypes with lub of case bodies, if non-recursive" --- compiler/src/dotty/tools/dotc/core/TypeComparer.scala | 7 ------- compiler/src/dotty/tools/dotc/typer/Typer.scala | 10 +--------- tests/pos/13633.scala | 2 +- tests/pos/Tuple.Drop.scala | 7 ------- tests/pos/Tuple.Elem.scala | 7 ------- tests/pos/i19710.scala | 11 ----------- 6 files changed, 2 insertions(+), 42 deletions(-) delete mode 100644 tests/pos/Tuple.Drop.scala delete mode 100644 tests/pos/Tuple.Elem.scala delete mode 100644 tests/pos/i19710.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index c2c502a984c4..93ed6e7d03a5 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2904,13 +2904,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp case tp: HKTypeLambda => tp - case tp: ParamRef => - val st = tp.superTypeNormalized - if st.exists then - disjointnessBoundary(st) - else - // workaround for when ParamRef#underlying returns NoType - defn.AnyType case tp: TypeProxy => disjointnessBoundary(tp.superTypeNormalized) case tp: WildcardType => diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 4cb695a15966..2a877a45b550 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2569,15 +2569,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer report.error(MatchTypeScrutineeCannotBeHigherKinded(sel1Tpe), sel1.srcPos) val pt1 = if (bound1.isEmpty) pt else bound1.tpe val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1Tpe, pt1)) - val bound2 = if tree.bound.isEmpty then - val lub = cases1.foldLeft(defn.NothingType: Type): (acc, case1) => - if !acc.exists then NoType - else if case1.body.tpe.isProvisional then NoType - else acc | case1.body.tpe - if lub.exists then TypeTree(lub, inferred = true) - else bound1 - else bound1 - assignType(cpy.MatchTypeTree(tree)(bound2, sel1, cases1), bound2, sel1, cases1) + assignType(cpy.MatchTypeTree(tree)(bound1, sel1, cases1), bound1, sel1, cases1) } def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(using Context): ByNameTypeTree = tree.result match diff --git a/tests/pos/13633.scala b/tests/pos/13633.scala index 8883ef98d0be..ca0f7e68e81e 100644 --- a/tests/pos/13633.scala +++ b/tests/pos/13633.scala @@ -21,7 +21,7 @@ object Sums extends App: type Reverse[A] = ReverseLoop[A, EmptyTuple] - type PlusTri[A, B, C] <: Tuple = (A, B, C) match + type PlusTri[A, B, C] = (A, B, C) match case (false, false, false) => (false, false) case (true, false, false) | (false, true, false) | (false, false, true) => (false, true) case (true, true, false) | (true, false, true) | (false, true, true) => (true, false) diff --git a/tests/pos/Tuple.Drop.scala b/tests/pos/Tuple.Drop.scala deleted file mode 100644 index 9b88cc227966..000000000000 --- a/tests/pos/Tuple.Drop.scala +++ /dev/null @@ -1,7 +0,0 @@ -import compiletime.ops.int.* - -type Drop[T <: Tuple, N <: Int] <: Tuple = N match - case 0 => T - case S[n1] => T match - case EmptyTuple => EmptyTuple - case x *: xs => Drop[xs, n1] diff --git a/tests/pos/Tuple.Elem.scala b/tests/pos/Tuple.Elem.scala deleted file mode 100644 index 81494485c321..000000000000 --- a/tests/pos/Tuple.Elem.scala +++ /dev/null @@ -1,7 +0,0 @@ -import compiletime.ops.int.* - -type Elem[T <: Tuple, I <: Int] = T match - case h *: tail => - I match - case 0 => h - case S[j] => Elem[tail, j] diff --git a/tests/pos/i19710.scala b/tests/pos/i19710.scala deleted file mode 100644 index 03fd1e2d80b3..000000000000 --- a/tests/pos/i19710.scala +++ /dev/null @@ -1,11 +0,0 @@ -import scala.util.NotGiven - -type HasName1 = [n] =>> [x] =>> x match { - case n => true - case _ => false - } -@main def Test = { - summon[HasName1["foo"]["foo"] =:= true] - summon[NotGiven[HasName1["foo"]["bar"] =:= true]] - summon[Tuple.Filter[(1, "foo", 2, "bar"), HasName1["foo"]] =:= Tuple1["foo"]] // error -} From 51629a24ba5f59738600f46a45a9455dff9946e0 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 24 Jul 2024 19:53:08 +0200 Subject: [PATCH 448/465] Fix failing run-macros/type-show test --- tests/run-macros/type-show/Test_2.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/run-macros/type-show/Test_2.scala b/tests/run-macros/type-show/Test_2.scala index de845f3e84dd..3bc9da043885 100644 --- a/tests/run-macros/type-show/Test_2.scala +++ b/tests/run-macros/type-show/Test_2.scala @@ -23,7 +23,7 @@ object Test { """TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Nothing"), """+ """TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Any"))), """+ """MatchType("""+ - """TypeRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "Int"), """+ // match type bound + """TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Any"), """+ // match type bound """ParamRef(binder, 0), """+ """List("""+ """MatchCase("""+ From 6a5e6e67ae639e905cae4480a5a0ec114ca07c3e Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 29 Jul 2024 13:42:07 +0200 Subject: [PATCH 449/465] Add changelog for 3.5.0-RC6 --- changelogs/3.5.0-RC6.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 changelogs/3.5.0-RC6.md diff --git a/changelogs/3.5.0-RC6.md b/changelogs/3.5.0-RC6.md new file mode 100644 index 000000000000..77731f346750 --- /dev/null +++ b/changelogs/3.5.0-RC6.md @@ -0,0 +1,13 @@ +# Backported fixes + +- Revert "Approximate MatchTypes with lub of case bodies, if non-recursive" in 3.5.0 [#21266](https://github.com/scala/scala3/pull/21266) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC5..3.5.0-RC6` these are: + +``` + 4 Wojciech Mazur +``` From 1fb613f9ecb938d7cdc9270393cb2d0a48a3a81e Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 29 Jul 2024 13:42:28 +0200 Subject: [PATCH 450/465] Release 3.5.0-RC6 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index cbf1b354b073..e1a61d82aca7 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,7 @@ object Build { val referenceVersion = "3.4.2" - val baseVersion = "3.5.0-RC5" + val baseVersion = "3.5.0-RC6" // LTS or Next val versionLine = "Next" From b079b115cd7161850c01b93a6ac0d07b0c4bf0d7 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 18 Jul 2024 18:35:43 +0200 Subject: [PATCH 451/465] Prefer extensions over conversions and implicits for member selection Before the changes, if `isAsGoodValueType` was called with an extension and a given conversion, it would prefer the conversion over the extension, because only the former yielded true in `isGiven`. Which contradicted the logic from searchImplicit which preferred extension over conversions for member selection. --- .../src/dotty/tools/dotc/typer/Applications.scala | 14 ++++++-------- tests/pos/i19715.scala | 3 ++- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index c3369ac58e31..114372f1fa59 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1816,10 +1816,8 @@ trait Applications extends Compatibility { isAsGood(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) } case _ => // (3) - def isGiven(alt: TermRef) = - alt1.symbol.is(Given) && alt.symbol != defn.NotGivenClass def compareValues(tp1: Type, tp2: Type)(using Context) = - isAsGoodValueType(tp1, tp2, isGiven(alt1), isGiven(alt2)) + isAsGoodValueType(tp1, tp2, alt1.symbol.is(Implicit), alt2.symbol.is(Implicit)) tp2 match case tp2: MethodType => true // (3a) case tp2: PolyType if tp2.resultType.isInstanceOf[MethodType] => true // (3a) @@ -1856,7 +1854,7 @@ trait Applications extends Compatibility { * for overloading resolution (when `preferGeneral is false), and the opposite relation * `U <: T` or `U convertible to `T` for implicit disambiguation between givens * (when `preferGeneral` is true). For old-style implicit values, the 3.4 behavior is kept. - * If one of the alternatives is a given and the other is an implicit, the given wins. + * If one of the alternatives is an implicit and the other is a given (or an extension), the implicit loses. * * - In Scala 3.5 and Scala 3.6-migration, we issue a warning if the result under * Scala 3.6 differ wrt to the old behavior up to 3.5. @@ -1864,7 +1862,7 @@ trait Applications extends Compatibility { * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. */ - def isAsGoodValueType(tp1: Type, tp2: Type, alt1isGiven: Boolean, alt2isGiven: Boolean)(using Context): Boolean = + def isAsGoodValueType(tp1: Type, tp2: Type, alt1IsImplicit: Boolean, alt2IsImplicit: Boolean)(using Context): Boolean = val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) if !preferGeneral || Feature.migrateTo3 && oldResolution then // Normal specificity test for overloading resolution (where `preferGeneral` is false) @@ -1882,7 +1880,7 @@ trait Applications extends Compatibility { if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) || oldResolution - || !alt1isGiven && !alt2isGiven + || alt1IsImplicit && alt2IsImplicit then // Intermediate rules: better means specialize, but map all type arguments downwards // These are enabled for 3.0-3.5, and for all comparisons between old-style implicits, @@ -1897,8 +1895,8 @@ trait Applications extends Compatibility { case _ => mapOver(t) (flip(tp1p) relaxed_<:< flip(tp2p)) || viewExists(tp1, tp2) else - // New rules: better means generalize, givens always beat implicits - if alt1isGiven != alt2isGiven then alt1isGiven + // New rules: better means generalize, givens (and extensions) always beat implicits + if alt1IsImplicit != alt2IsImplicit then alt2IsImplicit else (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) end isAsGoodValueType diff --git a/tests/pos/i19715.scala b/tests/pos/i19715.scala index 91aeda5c1698..be5471ffa9b3 100644 --- a/tests/pos/i19715.scala +++ b/tests/pos/i19715.scala @@ -6,7 +6,8 @@ class NT(t: Tup): object NT: extension (x: NT) def app(n: Int): Boolean = true - given Conversion[NT, Tup] = _.toTup + given c1: Conversion[NT, Tup] = _.toTup + implicit def c2(t: NT): Tup = c1(t) def test = val nt = new NT(Tup()) From 07ccc8d9582183e0fd058e3860a7c1b8315b37ca Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 5 Aug 2024 11:28:18 +0200 Subject: [PATCH 452/465] A left-biased variant for implicit/given pairs We now use a left-biased scheme, as follows. From 3.6 on: - A given x: X is better than a given or implicit y: Y if y can be instantiated/widened to X. - An implicit x: X is better than a given or implicit y: Y if y can be instantiated to a supertype of X. - Use owner score for givens as a tie breaker if after all other tests we still have an ambiguity. This is not transitive, so we need a separate scheme to work around that. Other change: - Drop special handling of NotGiven in prioritization. The previous logic pretended to do so, but was ineffective. --- .../tools/dotc/printing/Formatting.scala | 17 ++--- .../dotty/tools/dotc/reporting/messages.scala | 2 +- .../dotty/tools/dotc/typer/Applications.scala | 65 +++++++++++------ .../dotty/tools/dotc/typer/Implicits.scala | 39 ++++++++-- .../tools/dotc/StringFormatterTest.scala | 1 + tests/neg/given-triangle.check | 8 ++ tests/neg/i21212.check | 4 + tests/neg/i21212.scala | 11 +++ tests/neg/i21303/JavaEnum.java | 1 + tests/neg/i21303/Test.scala | 33 +++++++++ tests/neg/i2974.scala | 16 ++++ tests/neg/scala-uri.check | 14 ++++ tests/neg/scala-uri.scala | 30 ++++++++ tests/pos/given-priority.scala | 24 ++++++ tests/pos/i21212.scala | 22 ++++++ tests/pos/i21303/JavaEnum.java | 1 + tests/pos/i21303/Test.scala | 32 ++++++++ tests/pos/i21303a/JavaEnum.java | 1 + tests/pos/i21303a/Test.scala | 35 +++++++++ tests/pos/i21320.scala | 73 +++++++++++++++++++ tests/pos/i2974.scala | 3 +- tests/pos/scala-uri.scala | 22 ++++++ tests/pos/slick-migration-api-example.scala | 23 ++++++ tests/warn/i21036a.check | 6 +- tests/warn/i21036b.check | 6 +- 25 files changed, 445 insertions(+), 44 deletions(-) create mode 100644 tests/neg/i21212.check create mode 100644 tests/neg/i21212.scala create mode 100644 tests/neg/i21303/JavaEnum.java create mode 100644 tests/neg/i21303/Test.scala create mode 100644 tests/neg/i2974.scala create mode 100644 tests/neg/scala-uri.check create mode 100644 tests/neg/scala-uri.scala create mode 100644 tests/pos/given-priority.scala create mode 100644 tests/pos/i21212.scala create mode 100644 tests/pos/i21303/JavaEnum.java create mode 100644 tests/pos/i21303/Test.scala create mode 100644 tests/pos/i21303a/JavaEnum.java create mode 100644 tests/pos/i21303a/Test.scala create mode 100644 tests/pos/i21320.scala create mode 100644 tests/pos/scala-uri.scala create mode 100644 tests/pos/slick-migration-api-example.scala diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 6f1c32beb822..43cac17e6318 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -2,8 +2,6 @@ package dotty.tools package dotc package printing -import scala.language.unsafeNulls - import scala.collection.mutable import core.* @@ -52,7 +50,11 @@ object Formatting { object ShowAny extends Show[Any]: def show(x: Any): Shown = x - class ShowImplicits3: + class ShowImplicits4: + given [X: Show]: Show[X | Null] with + def show(x: X | Null) = if x == null then "null" else CtxShow(toStr(x.nn)) + + class ShowImplicits3 extends ShowImplicits4: given Show[Product] = ShowAny class ShowImplicits2 extends ShowImplicits3: @@ -77,15 +79,10 @@ object Formatting { given [K: Show, V: Show]: Show[Map[K, V]] with def show(x: Map[K, V]) = CtxShow(x.map((k, v) => s"${toStr(k)} => ${toStr(v)}")) - end given given [H: Show, T <: Tuple: Show]: Show[H *: T] with def show(x: H *: T) = CtxShow(toStr(x.head) *: toShown(x.tail).asInstanceOf[Tuple]) - end given - - given [X: Show]: Show[X | Null] with - def show(x: X | Null) = if x == null then "null" else CtxShow(toStr(x.nn)) given Show[FlagSet] with def show(x: FlagSet) = x.flagsString @@ -148,8 +145,8 @@ object Formatting { private def treatArg(arg: Shown, suffix: String)(using Context): (String, String) = arg.runCtxShow match { case arg: Seq[?] if suffix.indexOf('%') == 0 && suffix.indexOf('%', 1) != -1 => val end = suffix.indexOf('%', 1) - val sep = StringContext.processEscapes(suffix.substring(1, end)) - (arg.mkString(sep), suffix.substring(end + 1)) + val sep = StringContext.processEscapes(suffix.substring(1, end).nn) + (arg.mkString(sep), suffix.substring(end + 1).nn) case arg: Seq[?] => (arg.map(showArg).mkString("[", ", ", "]"), suffix) case arg => diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index ceb8ecbc8e03..9a20f149a6d1 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -2955,7 +2955,7 @@ class MissingImplicitArgument( /** Default error message for non-nested ambiguous implicits. */ def defaultAmbiguousImplicitMsg(ambi: AmbiguousImplicits) = - s"Ambiguous given instances: ${ambi.explanation}${location("of")}" + s"Ambiguous given instances: ${ambi.explanation}${location("of")}${ambi.priorityChangeWarningNote}" /** Default error messages for non-ambiguous implicits, or nested ambiguous * implicits. diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 114372f1fa59..2efe5282f025 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1748,6 +1748,17 @@ trait Applications extends Compatibility { else if sym2.is(Module) then compareOwner(sym1, cls2) else 0 + enum CompareScheme: + case Old // Normal specificity test for overloading resolution (where `preferGeneral` is false) + // and in mode Scala3-migration when we compare with the old Scala 2 rules. + + case Intermediate // Intermediate rules: better means specialize, but map all type arguments downwards + // These are enabled for 3.0-3.4, or if OldImplicitResolution + // is specified, and also for all comparisons between old-style implicits, + + case New // New rules: better means generalize, givens (and extensions) always beat implicits + end CompareScheme + /** Compare two alternatives of an overloaded call or an implicit search. * * @param alt1, alt2 Non-overloaded references indicating the two choices @@ -1774,6 +1785,15 @@ trait Applications extends Compatibility { */ def compare(alt1: TermRef, alt2: TermRef, preferGeneral: Boolean = false)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { record("resolveOverloaded.compare") + val scheme = + val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) + if !preferGeneral || Feature.migrateTo3 && oldResolution then + CompareScheme.Old + else if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) + || oldResolution + || alt1.symbol.is(Implicit) && alt2.symbol.is(Implicit) + then CompareScheme.Intermediate + else CompareScheme.New /** Is alternative `alt1` with type `tp1` as good as alternative * `alt2` with type `tp2` ? @@ -1816,15 +1836,15 @@ trait Applications extends Compatibility { isAsGood(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) } case _ => // (3) - def compareValues(tp1: Type, tp2: Type)(using Context) = - isAsGoodValueType(tp1, tp2, alt1.symbol.is(Implicit), alt2.symbol.is(Implicit)) + def compareValues(tp2: Type)(using Context) = + isAsGoodValueType(tp1, tp2, alt1.symbol.is(Implicit)) tp2 match case tp2: MethodType => true // (3a) case tp2: PolyType if tp2.resultType.isInstanceOf[MethodType] => true // (3a) case tp2: PolyType => // (3b) - explore(compareValues(tp1, instantiateWithTypeVars(tp2))) + explore(compareValues(instantiateWithTypeVars(tp2))) case _ => // 3b) - compareValues(tp1, tp2) + compareValues(tp2) } /** Test whether value type `tp1` is as good as value type `tp2`. @@ -1862,9 +1882,8 @@ trait Applications extends Compatibility { * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. */ - def isAsGoodValueType(tp1: Type, tp2: Type, alt1IsImplicit: Boolean, alt2IsImplicit: Boolean)(using Context): Boolean = - val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) - if !preferGeneral || Feature.migrateTo3 && oldResolution then + def isAsGoodValueType(tp1: Type, tp2: Type, alt1IsImplicit: Boolean)(using Context): Boolean = + if scheme == CompareScheme.Old then // Normal specificity test for overloading resolution (where `preferGeneral` is false) // and in mode Scala3-migration when we compare with the old Scala 2 rules. isCompatible(tp1, tp2) @@ -1878,13 +1897,7 @@ trait Applications extends Compatibility { val tp1p = prepare(tp1) val tp2p = prepare(tp2) - if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) - || oldResolution - || alt1IsImplicit && alt2IsImplicit - then - // Intermediate rules: better means specialize, but map all type arguments downwards - // These are enabled for 3.0-3.5, and for all comparisons between old-style implicits, - // and in 3.5 and 3.6-migration when we compare with previous rules. + if scheme == CompareScheme.Intermediate || alt1IsImplicit then val flip = new TypeMap: def apply(t: Type) = t match case t @ AppliedType(tycon, args) => @@ -1895,9 +1908,7 @@ trait Applications extends Compatibility { case _ => mapOver(t) (flip(tp1p) relaxed_<:< flip(tp2p)) || viewExists(tp1, tp2) else - // New rules: better means generalize, givens (and extensions) always beat implicits - if alt1IsImplicit != alt2IsImplicit then alt2IsImplicit - else (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) + (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) end isAsGoodValueType /** Widen the result type of synthetic given methods from the implementation class to the @@ -1968,13 +1979,19 @@ trait Applications extends Compatibility { // alternatives are the same after following ExprTypes, pick one of them // (prefer the one that is not a method, but that's arbitrary). if alt1.widenExpr =:= alt2 then -1 else 1 - else ownerScore match - case 1 => if winsType1 || !winsType2 then 1 else 0 - case -1 => if winsType2 || !winsType1 then -1 else 0 - case 0 => - if winsType1 != winsType2 then if winsType1 then 1 else -1 - else if alt1.symbol == alt2.symbol then comparePrefixes - else 0 + else + // For new implicit resolution, take ownerscore as more significant than type resolution + // Reason: People use owner hierarchies to explicitly prioritize, we should not + // break that by changing implicit priority of types. + def drawOrOwner = + if scheme == CompareScheme.New then ownerScore else 0 + ownerScore match + case 1 => if winsType1 || !winsType2 then 1 else drawOrOwner + case -1 => if winsType2 || !winsType1 then -1 else drawOrOwner + case 0 => + if winsType1 != winsType2 then if winsType1 then 1 else -1 + else if alt1.symbol == alt2.symbol then comparePrefixes + else 0 end compareWithTypes if alt1.symbol.is(ConstructorProxy) && !alt2.symbol.is(ConstructorProxy) then -1 diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 45c8731c553e..d98fc87655bf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -549,6 +549,11 @@ object Implicits: /** An ambiguous implicits failure */ class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree, val nested: Boolean = false) extends SearchFailureType: + private[Implicits] var priorityChangeWarnings: List[Message] = Nil + + def priorityChangeWarningNote(using Context): String = + priorityChangeWarnings.map(msg => s"\n\nNote: $msg").mkString + def msg(using Context): Message = var str1 = err.refStr(alt1.ref) var str2 = err.refStr(alt2.ref) @@ -1330,7 +1335,7 @@ trait Implicits: if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else - var cmp = comp(using searchContext()) + val cmp = comp(using searchContext()) val sv = Feature.sourceVersion if isWarnPriorityChangeVersion(sv) then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) @@ -1345,13 +1350,21 @@ trait Implicits: case _ => "none - it's ambiguous" if sv.stable == SourceVersion.`3.5` then warn( - em"""Given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} will change + em"""Given search preference for $pt between alternatives + | ${alt1.ref} + |and + | ${alt2.ref} + |will change. |Current choice : ${choice(prev)} |New choice from Scala 3.6: ${choice(cmp)}""") prev else warn( - em"""Change in given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} + em"""Given search preference for $pt between alternatives + | ${alt1.ref} + |and + | ${alt2.ref} + |has changed. |Previous choice : ${choice(prev)} |New choice from Scala 3.6: ${choice(cmp)}""") cmp @@ -1610,9 +1623,23 @@ trait Implicits: throw ex val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) - for (critical, msg) <- priorityChangeWarnings do - if result.found.exists(critical.contains(_)) then - report.warning(msg, srcPos) + + // Issue all priority change warnings that can affect the result + val shownWarnings = priorityChangeWarnings.toList.collect: + case (critical, msg) if result.found.exists(critical.contains(_)) => + msg + result match + case result: SearchFailure => + result.reason match + case ambi: AmbiguousImplicits => + // Make warnings part of error message because otherwise they are suppressed when + // the error is emitted. + ambi.priorityChangeWarnings = shownWarnings + case _ => + case _ => + for msg <- shownWarnings do + report.warning(msg, srcPos) + result end searchImplicit diff --git a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala index 4dfc08cc7e9b..b0ff8b8fc03e 100644 --- a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala +++ b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala @@ -23,6 +23,7 @@ class StringFormatterTest extends AbstractStringFormatterTest: @Test def flagsTup = check("(,final)", i"${(JavaStatic, Final)}") @Test def seqOfTup2 = check("(final,given), (private,lazy)", i"${Seq((Final, Given), (Private, Lazy))}%, %") @Test def seqOfTup3 = check("(Foo,given, (right is approximated))", i"${Seq((Foo, Given, TypeComparer.ApproxState.None.addHigh))}%, %") + @Test def tupleNull = check("(1,null)", i"${(1, null: String | Null)}") class StorePrinter extends Printer: var string: String = "" diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check index f548df0078de..73d5aea12dc4 100644 --- a/tests/neg/given-triangle.check +++ b/tests/neg/given-triangle.check @@ -2,3 +2,11 @@ 15 |@main def Test = f // error | ^ |Ambiguous given instances: both given instance given_B and given instance given_C match type A of parameter a of method f + | + |Note: Given search preference for A between alternatives + | (given_A : A) + |and + | (given_B : B) + |will change. + |Current choice : the second alternative + |New choice from Scala 3.6: the first alternative diff --git a/tests/neg/i21212.check b/tests/neg/i21212.check new file mode 100644 index 000000000000..5d9fe7728cbc --- /dev/null +++ b/tests/neg/i21212.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/i21212.scala:8:52 ---------------------------------------------------------------------- +8 | def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous + | ^ + |Ambiguous given instances: both parameter b2 and parameter a2 match type Minimization.A of parameter x of method summon in object Predef diff --git a/tests/neg/i21212.scala b/tests/neg/i21212.scala new file mode 100644 index 000000000000..389a82b19f1f --- /dev/null +++ b/tests/neg/i21212.scala @@ -0,0 +1,11 @@ +//> using options -source:3.6 +object Minimization: + + trait A + trait B extends A + + def test1(using a1: A)(using b1: B) = summon[A] // picks (most general) a1 + def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous + def test3(implicit a3: A, b3: B) = summon[A] // picks (most specific) b3 + +end Minimization diff --git a/tests/neg/i21303/JavaEnum.java b/tests/neg/i21303/JavaEnum.java new file mode 100644 index 000000000000..e626d5070626 --- /dev/null +++ b/tests/neg/i21303/JavaEnum.java @@ -0,0 +1 @@ +public enum JavaEnum { ABC, DEF, GHI } diff --git a/tests/neg/i21303/Test.scala b/tests/neg/i21303/Test.scala new file mode 100644 index 000000000000..fa8058140067 --- /dev/null +++ b/tests/neg/i21303/Test.scala @@ -0,0 +1,33 @@ +//> using options -source 3.6-migration +import scala.deriving.Mirror +import scala.compiletime.* +import scala.reflect.ClassTag +import scala.annotation.implicitNotFound + + +trait TSType[T] +object TSType extends DefaultTSTypes with TSTypeMacros + +trait TSNamedType[T] extends TSType[T] + +trait DefaultTSTypes extends JavaTSTypes +trait JavaTSTypes { + given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSNamedType[E] = ??? +} +object DefaultTSTypes extends DefaultTSTypes +trait TSTypeMacros { + inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + ??? + } + + private inline def summonAll[T <: Tuple]: List[TSType[_]] = { + inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[TSType[t]] :: summonAll[ts] + } + } +} + +@main def Test = summon[TSType[JavaEnum]] // error \ No newline at end of file diff --git a/tests/neg/i2974.scala b/tests/neg/i2974.scala new file mode 100644 index 000000000000..0bff2da1f3ba --- /dev/null +++ b/tests/neg/i2974.scala @@ -0,0 +1,16 @@ + +trait Foo[-T] +trait Bar[-T] extends Foo[T] + +object Test { + + locally: + implicit val fa: Foo[Int] = ??? + implicit val ba: Bar[Int] = ??? + summon[Foo[Int]] // ok + + locally: + implicit val fa: Foo[Int] = ??? + implicit val ba: Bar[Any] = ??? + summon[Foo[Int]] // error: ambiguous +} diff --git a/tests/neg/scala-uri.check b/tests/neg/scala-uri.check new file mode 100644 index 000000000000..91bcd7ab6a6c --- /dev/null +++ b/tests/neg/scala-uri.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg/scala-uri.scala:30:59 ------------------------------------------------------------------ +30 |@main def Test = summon[QueryKeyValue[(String, None.type)]] // error + | ^ + |No best given instance of type QueryKeyValue[(String, None.type)] was found for parameter x of method summon in object Predef. + |I found: + | + | QueryKeyValue.tuple2QueryKeyValue[String, None.type](QueryKey.stringQueryKey, + | QueryValue.optionQueryValue[A]( + | /* ambiguous: both given instance stringQueryValue in trait QueryValueInstances1 and given instance noneQueryValue in trait QueryValueInstances1 match type QueryValue[A] */ + | summon[QueryValue[A]] + | ) + | ) + | + |But both given instance stringQueryValue in trait QueryValueInstances1 and given instance noneQueryValue in trait QueryValueInstances1 match type QueryValue[A]. diff --git a/tests/neg/scala-uri.scala b/tests/neg/scala-uri.scala new file mode 100644 index 000000000000..3820f8cf5613 --- /dev/null +++ b/tests/neg/scala-uri.scala @@ -0,0 +1,30 @@ +import scala.language.implicitConversions + +trait QueryKey[A] +object QueryKey extends QueryKeyInstances +sealed trait QueryKeyInstances: + given stringQueryKey: QueryKey[String] = ??? + +trait QueryValue[-A] +object QueryValue extends QueryValueInstances +sealed trait QueryValueInstances1: + given stringQueryValue: QueryValue[String] = ??? + given noneQueryValue: QueryValue[None.type] = ??? + // The noneQueryValue makes no sense at this priority. Since QueryValue + // is contravariant, QueryValue[None.type] is always better than QueryValue[Option[A]] + // no matter whether it's old or new resolution. So taking both owner and type + // score into account, it's always a draw. With the new disambiguation, we prefer + // the optionQueryValue[A], which gives an ambiguity down the road, because we don't + // know what the wrapped type A is. Previously, we preferred QueryValue[None.type] + // because it is unconditional. The solution is to put QueryValue[None.type] in the + // same trait as QueryValue[Option[A]], as is shown in pos/scala-uri.scala. + +sealed trait QueryValueInstances extends QueryValueInstances1: + given optionQueryValue[A: QueryValue]: QueryValue[Option[A]] = ??? + +trait QueryKeyValue[A] +object QueryKeyValue: + given tuple2QueryKeyValue[K: QueryKey, V: QueryValue]: QueryKeyValue[(K, V)] = ??? + + +@main def Test = summon[QueryKeyValue[(String, None.type)]] // error diff --git a/tests/pos/given-priority.scala b/tests/pos/given-priority.scala new file mode 100644 index 000000000000..048e063eff35 --- /dev/null +++ b/tests/pos/given-priority.scala @@ -0,0 +1,24 @@ +/* These tests show various mechanisms available for implicit prioritization. + */ +import language.`3.6` + +class A // The type for which we infer terms below +class B extends A + +/* First, two schemes that require a pre-planned architecture for how and + * where given instances are defined. + * + * Traditional scheme: prioritize with location in class hierarchy + */ +class LowPriorityImplicits: + given g1: A() + +object NormalImplicits extends LowPriorityImplicits: + given g2: B() + +def test1 = + import NormalImplicits.given + val x = summon[A] + val _: B = x + val y = summon[B] + val _: B = y diff --git a/tests/pos/i21212.scala b/tests/pos/i21212.scala new file mode 100644 index 000000000000..1a1f2e35819a --- /dev/null +++ b/tests/pos/i21212.scala @@ -0,0 +1,22 @@ + +trait Functor[F[_]]: + def map[A, B](fa: F[A])(f: A => B): F[B] = ??? +trait Monad[F[_]] extends Functor[F] +trait MonadError[F[_], E] extends Monad[F]: + def raiseError[A](e: E): F[A] +trait Temporal[F[_]] extends MonadError[F, Throwable] + +trait FunctorOps[F[_], A]: + def map[B](f: A => B): F[B] = ??? +implicit def toFunctorOps[F[_], A](target: F[A])(implicit tc: Functor[F]): FunctorOps[F, A] = ??? + +class ContextBounds[F[_]: Temporal](using err: MonadError[F, Throwable]): + def useCase = err.raiseError(new RuntimeException()) + val bool: F[Boolean] = ??? + def fails = toFunctorOps(bool).map(_ => ()) // warns under -source:3.5, // error under -source:3.6 + +class UsingArguments[F[_]](using Temporal[F])(using err: MonadError[F, Throwable]): + def useCase = err.raiseError(new RuntimeException()) + val bool: F[Boolean] = ??? + def works = toFunctorOps(bool).map(_ => ()) // warns under -source:3.5 + diff --git a/tests/pos/i21303/JavaEnum.java b/tests/pos/i21303/JavaEnum.java new file mode 100644 index 000000000000..e626d5070626 --- /dev/null +++ b/tests/pos/i21303/JavaEnum.java @@ -0,0 +1 @@ +public enum JavaEnum { ABC, DEF, GHI } diff --git a/tests/pos/i21303/Test.scala b/tests/pos/i21303/Test.scala new file mode 100644 index 000000000000..fe3efa6e38f3 --- /dev/null +++ b/tests/pos/i21303/Test.scala @@ -0,0 +1,32 @@ +import scala.deriving.Mirror +import scala.compiletime.* +import scala.reflect.ClassTag +import scala.annotation.implicitNotFound + + +trait TSType[T] +object TSType extends DefaultTSTypes with TSTypeMacros + +trait TSNamedType[T] extends TSType[T] + +trait DefaultTSTypes extends JavaTSTypes +trait JavaTSTypes { + given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSType[E] = ??? +} +object DefaultTSTypes extends DefaultTSTypes +trait TSTypeMacros { + inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + ??? + } + + private inline def summonAll[T <: Tuple]: List[TSType[_]] = { + inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[TSType[t]] :: summonAll[ts] + } + } +} + +@main def Test = summon[TSType[JavaEnum]] \ No newline at end of file diff --git a/tests/pos/i21303a/JavaEnum.java b/tests/pos/i21303a/JavaEnum.java new file mode 100644 index 000000000000..e626d5070626 --- /dev/null +++ b/tests/pos/i21303a/JavaEnum.java @@ -0,0 +1 @@ +public enum JavaEnum { ABC, DEF, GHI } diff --git a/tests/pos/i21303a/Test.scala b/tests/pos/i21303a/Test.scala new file mode 100644 index 000000000000..83a598b5f17f --- /dev/null +++ b/tests/pos/i21303a/Test.scala @@ -0,0 +1,35 @@ +import scala.deriving.Mirror +import scala.compiletime.* +import scala.reflect.ClassTag +import scala.annotation.implicitNotFound + + +trait TSType[T] +object TSType extends DefaultTSTypes with TSTypeMacros + +trait TSNamedType[T] extends TSType[T] + +trait DefaultTSTypes extends JavaTSTypes +trait JavaTSTypes { + given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSType[E] = ??? + given javaEnumTSNamedType[E <: java.lang.Enum[E]: ClassTag]: TSNamedType[E] = ??? +} +object DefaultTSTypes extends DefaultTSTypes +trait TSTypeMacros { + inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + ??? + } + + private inline def summonAll[T <: Tuple]: List[TSType[_]] = { + inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[TSType[t]] :: summonAll[ts] + } + } +} + +@main def Test = + summon[TSType[JavaEnum]] + summon[TSNamedType[JavaEnum]] diff --git a/tests/pos/i21320.scala b/tests/pos/i21320.scala new file mode 100644 index 000000000000..0a7e0d1941d1 --- /dev/null +++ b/tests/pos/i21320.scala @@ -0,0 +1,73 @@ +import scala.deriving.* +import scala.compiletime.* + +trait ConfigMonoid[T]: + def zero: T + def orElse(main: T, defaults: T): T + +object ConfigMonoid: + given option[T]: ConfigMonoid[Option[T]] = ??? + + inline def zeroTuple[C <: Tuple]: Tuple = + inline erasedValue[C] match + case _: EmptyTuple => EmptyTuple + case _: (t *: ts) => + summonInline[ConfigMonoid[t]].zero *: zeroTuple[ts] + + inline def valueTuple[C <: Tuple, T](index: Int, main: T, defaults: T): Tuple = + inline erasedValue[C] match + case _: EmptyTuple => EmptyTuple + case _: (t *: ts) => + def get(v: T) = v.asInstanceOf[Product].productElement(index).asInstanceOf[t] + summonInline[ConfigMonoid[t]].orElse(get(main), get(defaults)) *: valueTuple[ts, T]( + index + 1, + main, + defaults + ) + + inline given derive[T](using m: Mirror.ProductOf[T]): ConfigMonoid[T] = + new ConfigMonoid[T]: + def zero: T = m.fromProduct(zeroTuple[m.MirroredElemTypes]) + def orElse(main: T, defaults: T): T = m.fromProduct(valueTuple[m.MirroredElemTypes, T](0, main, defaults)) + + + +final case class PublishOptions( + v1: Option[String] = None, + v2: Option[String] = None, + v3: Option[String] = None, + v4: Option[String] = None, + v5: Option[String] = None, + v6: Option[String] = None, + v7: Option[String] = None, + v8: Option[String] = None, + v9: Option[String] = None, + ci: PublishContextualOptions = PublishContextualOptions(), +) +object PublishOptions: + implicit val monoid: ConfigMonoid[PublishOptions] = ConfigMonoid.derive + +final case class PublishContextualOptions( + v1: Option[String] = None, + v2: Option[String] = None, + v3: Option[String] = None, + v4: Option[String] = None, + v5: Option[String] = None, + v6: Option[String] = None, + v7: Option[String] = None, + v8: Option[String] = None, + v9: Option[String] = None, + v10: Option[String] = None, + v11: Option[String] = None, + v12: Option[String] = None, + v13: Option[String] = None, + v14: Option[String] = None, + v15: Option[String] = None, + v16: Option[String] = None, + v17: Option[String] = None, + v18: Option[String] = None, + v19: Option[String] = None, + v20: Option[String] = None +) +object PublishContextualOptions: + given monoid: ConfigMonoid[PublishContextualOptions] = ConfigMonoid.derive \ No newline at end of file diff --git a/tests/pos/i2974.scala b/tests/pos/i2974.scala index 75c6a24a41bb..8f1c2e2d6d2f 100644 --- a/tests/pos/i2974.scala +++ b/tests/pos/i2974.scala @@ -7,6 +7,7 @@ object Test { implicit val ba: Bar[Int] = ??? def test: Unit = { - implicitly[Foo[Int]] + val x = summon[Foo[Int]] + val _: Bar[Int] = x } } diff --git a/tests/pos/scala-uri.scala b/tests/pos/scala-uri.scala new file mode 100644 index 000000000000..75ea2fc70d8a --- /dev/null +++ b/tests/pos/scala-uri.scala @@ -0,0 +1,22 @@ +// This works for implicit/implicit pairs but not for givens, see neg version. +import scala.language.implicitConversions + +trait QueryKey[A] +object QueryKey extends QueryKeyInstances +sealed trait QueryKeyInstances: + implicit val stringQueryKey: QueryKey[String] = ??? + +trait QueryValue[-A] +object QueryValue extends QueryValueInstances +sealed trait QueryValueInstances1: + implicit final val stringQueryValue: QueryValue[String] = ??? + implicit final val noneQueryValue: QueryValue[None.type] = ??? + +sealed trait QueryValueInstances extends QueryValueInstances1: + implicit final def optionQueryValue[A: QueryValue]: QueryValue[Option[A]] = ??? + +trait QueryKeyValue[A] +object QueryKeyValue: + implicit def tuple2QueryKeyValue[K: QueryKey, V: QueryValue]: QueryKeyValue[(K, V)] = ??? + +@main def Test = summon[QueryKeyValue[(String, None.type)]] diff --git a/tests/pos/slick-migration-api-example.scala b/tests/pos/slick-migration-api-example.scala new file mode 100644 index 000000000000..3b6f1b4a82f4 --- /dev/null +++ b/tests/pos/slick-migration-api-example.scala @@ -0,0 +1,23 @@ +trait Migration +object Migration: + implicit class MigrationConcat[M <: Migration](m: M): + def &[N <: Migration, O](n: N)(implicit ccm: CanConcatMigrations[M, N, O]): O = ??? + +trait ReversibleMigration extends Migration +trait MigrationSeq extends Migration +trait ReversibleMigrationSeq extends MigrationSeq with ReversibleMigration + +trait ToReversible[-A <: Migration] +object ToReversible: + implicit val reversible: ToReversible[ReversibleMigration] = ??? +class CanConcatMigrations[-A, -B, +C] +trait CanConcatMigrationsLow: + implicit def default[A <: Migration, B <: Migration]: CanConcatMigrations[A, B, MigrationSeq] = ??? +object CanConcatMigrations extends CanConcatMigrationsLow: + implicit def reversible[A <: Migration, B <: Migration](implicit reverseA: ToReversible[A], + reverseB: ToReversible[B]): CanConcatMigrations[A, B, ReversibleMigrationSeq] = ??? + +@main def Test = + val rm: ReversibleMigration = ??? + val rms = rm & rm & rm + summon[rms.type <:< ReversibleMigrationSeq] // error Cannot prove that (rms : slick.migration.api.MigrationSeq) <:< slick.migration.api.ReversibleMigrationSeq. \ No newline at end of file diff --git a/tests/warn/i21036a.check b/tests/warn/i21036a.check index 673c01374ef3..876a81ad8a83 100644 --- a/tests/warn/i21036a.check +++ b/tests/warn/i21036a.check @@ -1,6 +1,10 @@ -- Warning: tests/warn/i21036a.scala:7:17 ------------------------------------------------------------------------------ 7 |val y = summon[A] // warn | ^ - | Given search preference for A between alternatives (b : B) and (a : A) will change + | Given search preference for A between alternatives + | (b : B) + | and + | (a : A) + | will change. | Current choice : the first alternative | New choice from Scala 3.6: the second alternative diff --git a/tests/warn/i21036b.check b/tests/warn/i21036b.check index ff7fdfd7a87c..11bb38727d77 100644 --- a/tests/warn/i21036b.check +++ b/tests/warn/i21036b.check @@ -1,6 +1,10 @@ -- Warning: tests/warn/i21036b.scala:7:17 ------------------------------------------------------------------------------ 7 |val y = summon[A] // warn | ^ - | Change in given search preference for A between alternatives (b : B) and (a : A) + | Given search preference for A between alternatives + | (b : B) + | and + | (a : A) + | has changed. | Previous choice : the first alternative | New choice from Scala 3.6: the second alternative From 8a41389dd4ea6c15f2089519ac5883c4c72a0c56 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 5 Aug 2024 13:48:25 +0200 Subject: [PATCH 453/465] Compensate loss of transitivity We only have transitivity between givens or between implicits. To cope with that - We tank first all implicits, giving a best implicit search result. - Then we rank all givens startign with the implicit result. If there is a given that is better than the best implicit, the best given will be chosen. Otherwise we will stick with the best implicit. --- .../src/dotty/tools/dotc/typer/Implicits.scala | 18 +++++++++++++++--- tests/pos/given-owner-disambiguate.scala | 13 +++++++++++++ 2 files changed, 28 insertions(+), 3 deletions(-) create mode 100644 tests/pos/given-owner-disambiguate.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index d98fc87655bf..8a4ec986e23a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1381,8 +1381,6 @@ trait Implicits: def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => var diff = compareAlternatives(alt1, alt2, disambiguate = true) - assert(diff <= 0 || isWarnPriorityChangeVersion(Feature.sourceVersion)) - // diff > 0 candidates should already have been eliminated in `rank` if diff == 0 && alt1.ref =:= alt2.ref then diff = 1 // See i12951 for a test where this happens else if diff == 0 && alt2.isExtension then @@ -1622,7 +1620,21 @@ trait Implicits: validateOrdering(ord) throw ex - val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) + val sorted = sort(eligible) + val result = sorted match + case first :: rest => + val firstIsImplicit = first.ref.symbol.is(Implicit) + if rest.exists(_.ref.symbol.is(Implicit) != firstIsImplicit) then + // Mixture of implicits and givens + // Rank implicits first, then, if there is a given that it better than the best implicit(s) + // switch over to givens. + val (sortedImplicits, sortedGivens) = sorted.partition(_.ref.symbol.is(Implicit)) + val implicitResult = rank(sortedImplicits, NoMatchingImplicitsFailure, Nil) + rank(sortedGivens, implicitResult, Nil) + else + rank(sorted, NoMatchingImplicitsFailure, Nil) + case _ => + NoMatchingImplicitsFailure // Issue all priority change warnings that can affect the result val shownWarnings = priorityChangeWarnings.toList.collect: diff --git a/tests/pos/given-owner-disambiguate.scala b/tests/pos/given-owner-disambiguate.scala new file mode 100644 index 000000000000..f0a44ecc441a --- /dev/null +++ b/tests/pos/given-owner-disambiguate.scala @@ -0,0 +1,13 @@ +class General +class Specific extends General + +class LowPriority: + given a:General() + +object NormalPriority extends LowPriority: + given b:Specific() + +def run = + import NormalPriority.given + val x = summon[General] + val _: Specific = x // <- b was picked \ No newline at end of file From 0f0c20d759d008769e5210876c113aeb4569a2c9 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 5 Aug 2024 18:14:37 +0200 Subject: [PATCH 454/465] Delay priority change until 3.7 Warnings from 3.6, change in 3.7. This is one version later than originally planned. --- .../tools/dotc/config/SourceVersion.scala | 1 + .../dotty/tools/dotc/typer/Applications.scala | 14 +++++++------- .../src/dotty/tools/dotc/typer/Implicits.scala | 18 +++++++++--------- .../scala/runtime/stdLibPatches/language.scala | 15 +++++++++++++++ tests/neg/given-triangle.check | 2 +- tests/neg/given-triangle.scala | 2 +- tests/neg/i15264.scala | 2 +- tests/neg/i21212.check | 4 ++-- tests/neg/i21212.scala | 3 ++- tests/neg/i21303/Test.scala | 2 +- tests/pos/source-import-3-7-migration.scala | 1 + tests/pos/source-import-3-7.scala | 1 + tests/run/given-triangle.scala | 2 +- tests/run/implicit-specifity.scala | 2 +- tests/run/implied-priority.scala | 2 +- tests/warn/i20420.scala | 2 +- tests/warn/i21036a.check | 2 +- tests/warn/i21036a.scala | 2 +- tests/warn/i21036b.check | 2 +- tests/warn/i21036b.scala | 2 +- 20 files changed, 50 insertions(+), 31 deletions(-) create mode 100644 tests/pos/source-import-3-7-migration.scala create mode 100644 tests/pos/source-import-3-7.scala diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 935b95003729..02140c3f4e3b 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -12,6 +12,7 @@ enum SourceVersion: case `3.4-migration`, `3.4` case `3.5-migration`, `3.5` case `3.6-migration`, `3.6` + case `3.7-migration`, `3.7` // !!! Keep in sync with scala.runtime.stdlibPatches.language !!! case `future-migration`, `future` diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 2efe5282f025..9a5db44b15ca 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1753,7 +1753,7 @@ trait Applications extends Compatibility { // and in mode Scala3-migration when we compare with the old Scala 2 rules. case Intermediate // Intermediate rules: better means specialize, but map all type arguments downwards - // These are enabled for 3.0-3.4, or if OldImplicitResolution + // These are enabled for 3.0-3.5, or if OldImplicitResolution // is specified, and also for all comparisons between old-style implicits, case New // New rules: better means generalize, givens (and extensions) always beat implicits @@ -1789,7 +1789,7 @@ trait Applications extends Compatibility { val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) if !preferGeneral || Feature.migrateTo3 && oldResolution then CompareScheme.Old - else if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) + else if Feature.sourceVersion.isAtMost(SourceVersion.`3.5`) || oldResolution || alt1.symbol.is(Implicit) && alt2.symbol.is(Implicit) then CompareScheme.Intermediate @@ -1855,7 +1855,7 @@ trait Applications extends Compatibility { * available in 3.0-migration if mode `Mode.OldImplicitResolution` is turned on as well. * It is used to highlight differences between Scala 2 and 3 behavior. * - * - In Scala 3.0-3.5, the behavior is as follows: `T <:p U` iff there is an impliit conversion + * - In Scala 3.0-3.6, the behavior is as follows: `T <:p U` iff there is an implicit conversion * from `T` to `U`, or * * flip(T) <: flip(U) @@ -1870,14 +1870,14 @@ trait Applications extends Compatibility { * of parameters are not affected. So `T <: U` would imply `Set[Cmp[U]] <:p Set[Cmp[T]]`, * as usual, because `Set` is non-variant. * - * - From Scala 3.6, `T <:p U` means `T <: U` or `T` convertible to `U` + * - From Scala 3.7, `T <:p U` means `T <: U` or `T` convertible to `U` * for overloading resolution (when `preferGeneral is false), and the opposite relation * `U <: T` or `U convertible to `T` for implicit disambiguation between givens - * (when `preferGeneral` is true). For old-style implicit values, the 3.4 behavior is kept. + * (when `preferGeneral` is true). For old-style implicit values, the 3.5 behavior is kept. * If one of the alternatives is an implicit and the other is a given (or an extension), the implicit loses. * - * - In Scala 3.5 and Scala 3.6-migration, we issue a warning if the result under - * Scala 3.6 differ wrt to the old behavior up to 3.5. + * - In Scala 3.6 and Scala 3.7-migration, we issue a warning if the result under + * Scala 3.7 differs wrt to the old behavior up to 3.6. * * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 8a4ec986e23a..056356db6947 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1305,13 +1305,13 @@ trait Implicits: /** Search a list of eligible implicit references */ private def searchImplicit(eligible: List[Candidate], contextual: Boolean): SearchResult = - // A map that associates a priority change warning (between -source 3.4 and 3.6) + // A map that associates a priority change warning (between -source 3.6 and 3.7) // with the candidate refs mentioned in the warning. We report the associated // message if one of the critical candidates is part of the result of the implicit search. val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() def isWarnPriorityChangeVersion(sv: SourceVersion): Boolean = - sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` + sv.stable == SourceVersion.`3.6` || sv == SourceVersion.`3.7-migration` /** Compare `alt1` with `alt2` to determine which one should be chosen. * @@ -1319,12 +1319,12 @@ trait Implicits: * a number < 0 if `alt2` is preferred over `alt1` * 0 if neither alternative is preferred over the other * The behavior depends on the source version - * before 3.5: compare with preferGeneral = false - * 3.5: compare twice with preferGeneral = false and true, warning if result is different, + * before 3.6: compare with preferGeneral = false + * 3.6: compare twice with preferGeneral = false and true, warning if result is different, * return old result with preferGeneral = false - * 3.6-migration: compare twice with preferGeneral = false and true, warning if result is different, + * 3.7-migration: compare twice with preferGeneral = false and true, warning if result is different, * return new result with preferGeneral = true - * 3.6 and higher: compare with preferGeneral = true + * 3.7 and higher: compare with preferGeneral = true * * @param disambiguate The call is used to disambiguate two successes, not for ranking. * When ranking, we are always filtering out either > 0 or <= 0 results. @@ -1348,7 +1348,7 @@ trait Implicits: case -1 => "the second alternative" case 1 => "the first alternative" case _ => "none - it's ambiguous" - if sv.stable == SourceVersion.`3.5` then + if sv.stable == SourceVersion.`3.6` then warn( em"""Given search preference for $pt between alternatives | ${alt1.ref} @@ -1356,7 +1356,7 @@ trait Implicits: | ${alt2.ref} |will change. |Current choice : ${choice(prev)} - |New choice from Scala 3.6: ${choice(cmp)}""") + |New choice from Scala 3.7: ${choice(cmp)}""") prev else warn( @@ -1366,7 +1366,7 @@ trait Implicits: | ${alt2.ref} |has changed. |Previous choice : ${choice(prev)} - |New choice from Scala 3.6: ${choice(cmp)}""") + |New choice from Scala 3.7: ${choice(cmp)}""") cmp else cmp max prev // When ranking, we keep the better of cmp and prev, which ends up retaining a candidate diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 1171c62602fb..b9f9d47bb0b1 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -300,6 +300,21 @@ object language: @compileTimeOnly("`3.6` can only be used at compile time in import statements") object `3.6` + /** Set source version to 3.7-migration. + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.7-migration` can only be used at compile time in import statements") + object `3.7-migration` + + /** Set source version to 3.7 + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.7` can only be used at compile time in import statements") + object `3.7` + + // !!! Keep in sync with dotty.tools.dotc.config.SourceVersion !!! // Also add tests in `tests/pos/source-import-3-x.scala` and `tests/pos/source-import-3-x-migration.scala` diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check index 73d5aea12dc4..147c54270afb 100644 --- a/tests/neg/given-triangle.check +++ b/tests/neg/given-triangle.check @@ -9,4 +9,4 @@ | (given_B : B) |will change. |Current choice : the second alternative - |New choice from Scala 3.6: the first alternative + |New choice from Scala 3.7: the first alternative diff --git a/tests/neg/given-triangle.scala b/tests/neg/given-triangle.scala index 16aca7c44dee..4842c5314f51 100644 --- a/tests/neg/given-triangle.scala +++ b/tests/neg/given-triangle.scala @@ -1,4 +1,4 @@ -//> using options -source 3.5 +//> using options -source 3.6 class A class B extends A class C extends A diff --git a/tests/neg/i15264.scala b/tests/neg/i15264.scala index 825e74701f73..d690eccf23f3 100644 --- a/tests/neg/i15264.scala +++ b/tests/neg/i15264.scala @@ -1,4 +1,4 @@ -import language.`3.6` +import language.`3.7` object priority: // lower number = higher priority class Prio0 extends Prio1 diff --git a/tests/neg/i21212.check b/tests/neg/i21212.check index 5d9fe7728cbc..06740af36d77 100644 --- a/tests/neg/i21212.check +++ b/tests/neg/i21212.check @@ -1,4 +1,4 @@ --- [E172] Type Error: tests/neg/i21212.scala:8:52 ---------------------------------------------------------------------- -8 | def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous +-- [E172] Type Error: tests/neg/i21212.scala:9:52 ---------------------------------------------------------------------- +9 | def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous | ^ |Ambiguous given instances: both parameter b2 and parameter a2 match type Minimization.A of parameter x of method summon in object Predef diff --git a/tests/neg/i21212.scala b/tests/neg/i21212.scala index 389a82b19f1f..3b030cefcdc7 100644 --- a/tests/neg/i21212.scala +++ b/tests/neg/i21212.scala @@ -1,4 +1,5 @@ -//> using options -source:3.6 +//> using options -source 3.7 + object Minimization: trait A diff --git a/tests/neg/i21303/Test.scala b/tests/neg/i21303/Test.scala index fa8058140067..25d43dac344e 100644 --- a/tests/neg/i21303/Test.scala +++ b/tests/neg/i21303/Test.scala @@ -1,4 +1,4 @@ -//> using options -source 3.6-migration +//> using options -source 3.7-migration import scala.deriving.Mirror import scala.compiletime.* import scala.reflect.ClassTag diff --git a/tests/pos/source-import-3-7-migration.scala b/tests/pos/source-import-3-7-migration.scala new file mode 100644 index 000000000000..2e80fcb0bab2 --- /dev/null +++ b/tests/pos/source-import-3-7-migration.scala @@ -0,0 +1 @@ +import language.`3.7-migration` \ No newline at end of file diff --git a/tests/pos/source-import-3-7.scala b/tests/pos/source-import-3-7.scala new file mode 100644 index 000000000000..7fa68fd496f6 --- /dev/null +++ b/tests/pos/source-import-3-7.scala @@ -0,0 +1 @@ +import language.`3.7` \ No newline at end of file diff --git a/tests/run/given-triangle.scala b/tests/run/given-triangle.scala index 0b483e87f28c..66339f44e43c 100644 --- a/tests/run/given-triangle.scala +++ b/tests/run/given-triangle.scala @@ -1,4 +1,4 @@ -import language.`3.6` +import language.`3.7` class A class B extends A diff --git a/tests/run/implicit-specifity.scala b/tests/run/implicit-specifity.scala index da90110c9866..9e59cf5f1869 100644 --- a/tests/run/implicit-specifity.scala +++ b/tests/run/implicit-specifity.scala @@ -1,4 +1,4 @@ -import language.`3.6` +import language.`3.7` case class Show[T](val i: Int) object Show { diff --git a/tests/run/implied-priority.scala b/tests/run/implied-priority.scala index 15f6a40a27ef..a9380e117875 100644 --- a/tests/run/implied-priority.scala +++ b/tests/run/implied-priority.scala @@ -1,6 +1,6 @@ /* These tests show various mechanisms available for implicit prioritization. */ -import language.`3.6` +import language.`3.7` class E[T](val str: String) // The type for which we infer terms below diff --git a/tests/warn/i20420.scala b/tests/warn/i20420.scala index d28270509f91..4c7585e32f48 100644 --- a/tests/warn/i20420.scala +++ b/tests/warn/i20420.scala @@ -1,4 +1,4 @@ -//> using options -source 3.5-migration +//> using options -source 3.6-migration final class StrictEqual[V] final class Less[V] diff --git a/tests/warn/i21036a.check b/tests/warn/i21036a.check index 876a81ad8a83..63d611a6e246 100644 --- a/tests/warn/i21036a.check +++ b/tests/warn/i21036a.check @@ -7,4 +7,4 @@ | (a : A) | will change. | Current choice : the first alternative - | New choice from Scala 3.6: the second alternative + | New choice from Scala 3.7: the second alternative diff --git a/tests/warn/i21036a.scala b/tests/warn/i21036a.scala index ab97429852d6..b7aba27ca95e 100644 --- a/tests/warn/i21036a.scala +++ b/tests/warn/i21036a.scala @@ -1,4 +1,4 @@ -//> using options -source 3.5 +//> using options -source 3.6 trait A trait B extends A given b: B = ??? diff --git a/tests/warn/i21036b.check b/tests/warn/i21036b.check index 11bb38727d77..dfa19a0e9bb1 100644 --- a/tests/warn/i21036b.check +++ b/tests/warn/i21036b.check @@ -7,4 +7,4 @@ | (a : A) | has changed. | Previous choice : the first alternative - | New choice from Scala 3.6: the second alternative + | New choice from Scala 3.7: the second alternative diff --git a/tests/warn/i21036b.scala b/tests/warn/i21036b.scala index 16dd72266613..c440f5d3c06d 100644 --- a/tests/warn/i21036b.scala +++ b/tests/warn/i21036b.scala @@ -1,4 +1,4 @@ -//> using options -source 3.6-migration +//> using options -source 3.7-migration trait A trait B extends A given b: B = ??? From f68345811a3353c96131f3f73c9f73a01abd7254 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 6 Aug 2024 19:57:01 +0200 Subject: [PATCH 455/465] Fix ranking logic --- .../dotty/tools/dotc/typer/Implicits.scala | 31 +++++++--- tests/pos/i15264.scala | 1 + tests/warn/i15264.scala | 56 +++++++++++++++++++ 3 files changed, 79 insertions(+), 9 deletions(-) create mode 100644 tests/warn/i15264.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 056356db6947..14491184b7a2 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1369,8 +1369,13 @@ trait Implicits: |New choice from Scala 3.7: ${choice(cmp)}""") cmp else cmp max prev - // When ranking, we keep the better of cmp and prev, which ends up retaining a candidate - // if it is retained in either version. + // When ranking, alt1 is always the new candidate and alt2 is the + // solution found previously. We keep the candidate if the outcome is 0 + // (ambiguous) or 1 (first wins). Or, when ranking in healImplicit we keep the + // candidate only if the outcome is 1. In both cases, keeping the better + // of `cmp` and `prev` means we keep candidates that could match + // in either scheme. This means that subsequent disambiguation + // comparisons will record a warning if cmp != prev. else cmp end compareAlternatives @@ -1416,7 +1421,15 @@ trait Implicits: if diff < 0 then alt2 else if diff > 0 then alt1 else SearchFailure(new AmbiguousImplicits(alt1, alt2, pt, argument), span) - case _: SearchFailure => alt2 + case fail: SearchFailure => + fail.reason match + case ambi: AmbiguousImplicits => + if compareAlternatives(ambi.alt1, alt2) < 0 && + compareAlternatives(ambi.alt2, alt2) < 0 + then alt2 + else alt1 + case _ => + alt2 /** Try to find a best matching implicit term among all the candidates in `pending`. * @param pending The list of candidates that remain to be tested @@ -1621,7 +1634,7 @@ trait Implicits: throw ex val sorted = sort(eligible) - val result = sorted match + val res = sorted match case first :: rest => val firstIsImplicit = first.ref.symbol.is(Implicit) if rest.exists(_.ref.symbol.is(Implicit) != firstIsImplicit) then @@ -1638,11 +1651,11 @@ trait Implicits: // Issue all priority change warnings that can affect the result val shownWarnings = priorityChangeWarnings.toList.collect: - case (critical, msg) if result.found.exists(critical.contains(_)) => + case (critical, msg) if res.found.exists(critical.contains(_)) => msg - result match - case result: SearchFailure => - result.reason match + res match + case res: SearchFailure => + res.reason match case ambi: AmbiguousImplicits => // Make warnings part of error message because otherwise they are suppressed when // the error is emitted. @@ -1652,7 +1665,7 @@ trait Implicits: for msg <- shownWarnings do report.warning(msg, srcPos) - result + res end searchImplicit def isUnderSpecifiedArgument(tp: Type): Boolean = diff --git a/tests/pos/i15264.scala b/tests/pos/i15264.scala index 5be8436c12ba..18ca92df6cb1 100644 --- a/tests/pos/i15264.scala +++ b/tests/pos/i15264.scala @@ -1,3 +1,4 @@ +import language.`3.7` object priority: // lower number = higher priority class Prio0 extends Prio1 diff --git a/tests/warn/i15264.scala b/tests/warn/i15264.scala new file mode 100644 index 000000000000..9435c6364c08 --- /dev/null +++ b/tests/warn/i15264.scala @@ -0,0 +1,56 @@ +// Note: No check file for this test since the precise warning messages are non-deterministic +import language.`3.7-migration` +object priority: + // lower number = higher priority + class Prio0 extends Prio1 + object Prio0 { given Prio0() } + + class Prio1 extends Prio2 + object Prio1 { given Prio1() } + + class Prio2 + object Prio2 { given Prio2() } + +object repro: + // analogous to cats Eq, Hash, Order: + class A[V] + class B[V] extends A[V] + class C[V] extends A[V] + + class Q[V] + + object context: + // prios work here, which is cool + given[V](using priority.Prio0): C[V] = new C[V] + given[V](using priority.Prio1): B[V] = new B[V] + given[V](using priority.Prio2): A[V] = new A[V] + + object exports: + // so will these exports + export context.given + + // if you import these don't import from 'context' above + object qcontext: + // base defs, like what you would get from cats + given ga: A[Int] = new B[Int] // added so that we don't get an ambiguity in test2 + given gb: B[Int] = new B[Int] + given gc: C[Int] = new C[Int] + + // these seem like they should work but don't + given gcq[V](using p0: priority.Prio0)(using c: C[V]): C[Q[V]] = new C[Q[V]] + given gbq[V](using p1: priority.Prio1)(using b: B[V]): B[Q[V]] = new B[Q[V]] + given gaq[V](using p2: priority.Prio2)(using a: A[V]): A[Q[V]] = new A[Q[V]] + +object test1: + import repro.* + import repro.exports.given + + // these will work + val a = summon[A[Int]] // warn + + +object test2: + import repro.* + import repro.qcontext.given + + val a = summon[A[Q[Int]]] // warn From 33d7da88bc63f6f163adf4ef919fb0374ae9cf76 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 6 Aug 2024 20:05:59 +0200 Subject: [PATCH 456/465] Make priority change warning messages stable Make the wording of a priority change warning message stable under different orders of eligibles. We now always report the previously chosen alternative first and the new one second. Note: We can still get ambiguities by fallging different pairs of alternatives depending on initial order. --- .../dotty/tools/dotc/typer/Implicits.scala | 66 +++++++++---------- tests/neg/given-triangle.check | 8 +-- 2 files changed, 36 insertions(+), 38 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 14491184b7a2..e6b2d16eace2 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1310,9 +1310,6 @@ trait Implicits: // message if one of the critical candidates is part of the result of the implicit search. val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() - def isWarnPriorityChangeVersion(sv: SourceVersion): Boolean = - sv.stable == SourceVersion.`3.6` || sv == SourceVersion.`3.7-migration` - /** Compare `alt1` with `alt2` to determine which one should be chosen. * * @return a number > 0 if `alt1` is preferred over `alt2` @@ -1337,37 +1334,38 @@ trait Implicits: else val cmp = comp(using searchContext()) val sv = Feature.sourceVersion - if isWarnPriorityChangeVersion(sv) then + val isLastOldVersion = sv.stable == SourceVersion.`3.6` + val isMigratingVersion = sv == SourceVersion.`3.7-migration` + if isLastOldVersion || isMigratingVersion then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) if disambiguate && cmp != prev then - def warn(msg: Message) = - val critical = alt1.ref :: alt2.ref :: Nil - priorityChangeWarnings += ((critical, msg)) - implicits.println(i"PRIORITY CHANGE ${alt1.ref}, ${alt2.ref}, $disambiguate") - def choice(c: Int) = c match - case -1 => "the second alternative" - case 1 => "the first alternative" - case _ => "none - it's ambiguous" - if sv.stable == SourceVersion.`3.6` then - warn( - em"""Given search preference for $pt between alternatives - | ${alt1.ref} - |and - | ${alt2.ref} - |will change. - |Current choice : ${choice(prev)} - |New choice from Scala 3.7: ${choice(cmp)}""") - prev - else - warn( - em"""Given search preference for $pt between alternatives - | ${alt1.ref} - |and - | ${alt2.ref} - |has changed. - |Previous choice : ${choice(prev)} - |New choice from Scala 3.7: ${choice(cmp)}""") - cmp + implicits.println(i"PRIORITY CHANGE ${alt1.ref}, ${alt2.ref}") + val (loser, winner) = + prev match + case 1 => (alt1, alt2) + case -1 => (alt2, alt1) + case 0 => + cmp match + case 1 => (alt2, alt1) + case -1 => (alt1, alt2) + def choice(nth: String, c: Int) = + if c == 0 then "none - it's ambiguous" + else s"the $nth alternative" + val (change, whichChoice) = + if isLastOldVersion + then ("will change", "Current choice ") + else ("has changed", "Previous choice") + val msg = + em"""Given search preference for $pt between alternatives + | ${loser.ref} + |and + | ${winner.ref} + |$change. + |$whichChoice : ${choice("first", prev)} + |New choice from Scala 3.7: ${choice("second", cmp)}""" + val critical = alt1.ref :: alt2.ref :: Nil + priorityChangeWarnings += ((critical, msg)) + if isLastOldVersion then prev else cmp else cmp max prev // When ranking, alt1 is always the new candidate and alt2 is the // solution found previously. We keep the candidate if the outcome is 0 @@ -1424,8 +1422,8 @@ trait Implicits: case fail: SearchFailure => fail.reason match case ambi: AmbiguousImplicits => - if compareAlternatives(ambi.alt1, alt2) < 0 && - compareAlternatives(ambi.alt2, alt2) < 0 + if compareAlternatives(ambi.alt1, alt2, disambiguate = true) < 0 + && compareAlternatives(ambi.alt2, alt2, disambiguate = true) < 0 then alt2 else alt1 case _ => diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check index 147c54270afb..f366c18e78f0 100644 --- a/tests/neg/given-triangle.check +++ b/tests/neg/given-triangle.check @@ -4,9 +4,9 @@ |Ambiguous given instances: both given instance given_B and given instance given_C match type A of parameter a of method f | |Note: Given search preference for A between alternatives - | (given_A : A) - |and | (given_B : B) + |and + | (given_A : A) |will change. - |Current choice : the second alternative - |New choice from Scala 3.7: the first alternative + |Current choice : the first alternative + |New choice from Scala 3.7: the second alternative From d439b58bb09380453830db9c5ee11aa721a27ad5 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 6 Aug 2024 23:42:07 +0200 Subject: [PATCH 457/465] Fix `healAmbiguous` to `compareAlternatives` with `disambiguate = true` On the final result, compared with all the ambiguous candidates we are trying to recover from. We should still use `disambiguate = false` when filtering the `pending` candidates for the purpose of warnings, as in the other cases. Before the changes, it was possible for an ambiguous SearchFailure to be healed by a candidate which was considered better (possibly only) under a prioritization scheme different from the current one. As an optimization, we can avoid redoing compareAlternatives in versions which could have only used the new prioritization scheme to begin with. Also restores behaviour avoiding false positive warnings. Specifically, in cases where we could report a change in prioritization, despite having not yet done `tryImplicit` on the alternative, i.e. it was only compared as part of an early filtering See #21045 for related changes --- .../dotty/tools/dotc/typer/Implicits.scala | 49 ++++++++++--------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index e6b2d16eace2..90e8c832dd87 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1310,6 +1310,10 @@ trait Implicits: // message if one of the critical candidates is part of the result of the implicit search. val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() + val sv = Feature.sourceVersion + val isLastOldVersion = sv.stable == SourceVersion.`3.6` + val isWarnPriorityChangeVersion = isLastOldVersion || sv == SourceVersion.`3.7-migration` + /** Compare `alt1` with `alt2` to determine which one should be chosen. * * @return a number > 0 if `alt1` is preferred over `alt2` @@ -1333,10 +1337,7 @@ trait Implicits: else if alt1.level != alt2.level then alt1.level - alt2.level else val cmp = comp(using searchContext()) - val sv = Feature.sourceVersion - val isLastOldVersion = sv.stable == SourceVersion.`3.6` - val isMigratingVersion = sv == SourceVersion.`3.7-migration` - if isLastOldVersion || isMigratingVersion then + if isWarnPriorityChangeVersion then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) if disambiguate && cmp != prev then implicits.println(i"PRIORITY CHANGE ${alt1.ref}, ${alt2.ref}") @@ -1419,15 +1420,7 @@ trait Implicits: if diff < 0 then alt2 else if diff > 0 then alt1 else SearchFailure(new AmbiguousImplicits(alt1, alt2, pt, argument), span) - case fail: SearchFailure => - fail.reason match - case ambi: AmbiguousImplicits => - if compareAlternatives(ambi.alt1, alt2, disambiguate = true) < 0 - && compareAlternatives(ambi.alt2, alt2, disambiguate = true) < 0 - then alt2 - else alt1 - case _ => - alt2 + case _: SearchFailure => alt2 /** Try to find a best matching implicit term among all the candidates in `pending`. * @param pending The list of candidates that remain to be tested @@ -1451,12 +1444,27 @@ trait Implicits: pending match { case cand :: remaining => /** To recover from an ambiguous implicit failure, we need to find a pending - * candidate that is strictly better than the failed candidate(s). + * candidate that is strictly better than the failed `ambiguous` candidate(s). * If no such candidate is found, we propagate the ambiguity. */ - def healAmbiguous(fail: SearchFailure, betterThanFailed: Candidate => Boolean) = - val newPending = remaining.filter(betterThanFailed) - rank(newPending, fail, Nil).recoverWith(_ => fail) + def healAmbiguous(fail: SearchFailure, ambiguous: List[RefAndLevel]) = + def betterThanAmbiguous(newCand: RefAndLevel, disambiguate: Boolean): Boolean = + ambiguous.forall(compareAlternatives(newCand, _, disambiguate) > 0) + + inline def betterByCurrentScheme(newCand: RefAndLevel): Boolean = + if isWarnPriorityChangeVersion then + // newCand may have only been kept in pending because it was better in the other priotization scheme. + // If that candidate produces a SearchSuccess, disambiguate will return it as the found SearchResult. + // We must now recheck it was really better than the ambigous candidates we are recovering from, + // under the rules of the current scheme, which are applied when disambiguate = true. + betterThanAmbiguous(newCand, disambiguate = true) + else true + + val newPending = remaining.filter(betterThanAmbiguous(_, disambiguate = false)) + rank(newPending, fail, Nil) match + case found: SearchSuccess if betterByCurrentScheme(found) => found + case _ => fail + end healAmbiguous negateIfNot(tryImplicit(cand, contextual)) match { case fail: SearchFailure => @@ -1471,8 +1479,7 @@ trait Implicits: else // The ambiguity happened in a nested search: to recover we // need a candidate better than `cand` - healAmbiguous(fail, newCand => - compareAlternatives(newCand, cand) > 0) + healAmbiguous(fail, cand :: Nil) else // keep only warnings that don't involve the failed candidate reference priorityChangeWarnings.filterInPlace: (critical, _) => @@ -1491,9 +1498,7 @@ trait Implicits: // The ambiguity happened in the current search: to recover we // need a candidate better than the two ambiguous alternatives. val ambi = fail.reason.asInstanceOf[AmbiguousImplicits] - healAmbiguous(fail, newCand => - compareAlternatives(newCand, ambi.alt1) > 0 && - compareAlternatives(newCand, ambi.alt2) > 0) + healAmbiguous(fail, ambi.alt1 :: ambi.alt2 :: Nil) } } case nil => From 73c6e883318324d46b978154ff0213b8e6eed76d Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 7 Aug 2024 13:57:38 +0200 Subject: [PATCH 458/465] Adjust compilation tests to backported changes --- tests/neg/scala-uri.check | 4 ++-- tests/neg/scala-uri.scala | 1 + tests/pos/i13044.scala | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/neg/scala-uri.check b/tests/neg/scala-uri.check index 91bcd7ab6a6c..b6d52d6fffd0 100644 --- a/tests/neg/scala-uri.check +++ b/tests/neg/scala-uri.check @@ -1,5 +1,5 @@ --- [E172] Type Error: tests/neg/scala-uri.scala:30:59 ------------------------------------------------------------------ -30 |@main def Test = summon[QueryKeyValue[(String, None.type)]] // error +-- [E172] Type Error: tests/neg/scala-uri.scala:31:59 ------------------------------------------------------------------ +31 |@main def Test = summon[QueryKeyValue[(String, None.type)]] // error | ^ |No best given instance of type QueryKeyValue[(String, None.type)] was found for parameter x of method summon in object Predef. |I found: diff --git a/tests/neg/scala-uri.scala b/tests/neg/scala-uri.scala index 3820f8cf5613..f3bff269234f 100644 --- a/tests/neg/scala-uri.scala +++ b/tests/neg/scala-uri.scala @@ -1,3 +1,4 @@ +//> using options -source:3.6 import scala.language.implicitConversions trait QueryKey[A] diff --git a/tests/pos/i13044.scala b/tests/pos/i13044.scala index 4c9b8b914062..36299d9e8366 100644 --- a/tests/pos/i13044.scala +++ b/tests/pos/i13044.scala @@ -1,4 +1,4 @@ -//> using options -Xmax-inlines:33 +//> using options -Xmax-inlines:35 import scala.deriving.Mirror import scala.compiletime._ From a1882e1edc5e04a7e16200354ff161a4533b1009 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 7 Aug 2024 21:38:02 +0200 Subject: [PATCH 459/465] Revert "Compensate loss of transitivity" This reverts commit 8a41389dd4ea6c15f2089519ac5883c4c72a0c56. --- .../src/dotty/tools/dotc/typer/Implicits.scala | 17 ++--------------- tests/pos/given-owner-disambiguate.scala | 13 ------------- 2 files changed, 2 insertions(+), 28 deletions(-) delete mode 100644 tests/pos/given-owner-disambiguate.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 90e8c832dd87..5ca5ac5bb59d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1385,6 +1385,7 @@ trait Implicits: def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => var diff = compareAlternatives(alt1, alt2, disambiguate = true) + // diff > 0 candidates should already have been eliminated in `rank` if diff == 0 && alt1.ref =:= alt2.ref then diff = 1 // See i12951 for a test where this happens else if diff == 0 && alt2.isExtension then @@ -1636,21 +1637,7 @@ trait Implicits: validateOrdering(ord) throw ex - val sorted = sort(eligible) - val res = sorted match - case first :: rest => - val firstIsImplicit = first.ref.symbol.is(Implicit) - if rest.exists(_.ref.symbol.is(Implicit) != firstIsImplicit) then - // Mixture of implicits and givens - // Rank implicits first, then, if there is a given that it better than the best implicit(s) - // switch over to givens. - val (sortedImplicits, sortedGivens) = sorted.partition(_.ref.symbol.is(Implicit)) - val implicitResult = rank(sortedImplicits, NoMatchingImplicitsFailure, Nil) - rank(sortedGivens, implicitResult, Nil) - else - rank(sorted, NoMatchingImplicitsFailure, Nil) - case _ => - NoMatchingImplicitsFailure + val res = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) // Issue all priority change warnings that can affect the result val shownWarnings = priorityChangeWarnings.toList.collect: diff --git a/tests/pos/given-owner-disambiguate.scala b/tests/pos/given-owner-disambiguate.scala deleted file mode 100644 index f0a44ecc441a..000000000000 --- a/tests/pos/given-owner-disambiguate.scala +++ /dev/null @@ -1,13 +0,0 @@ -class General -class Specific extends General - -class LowPriority: - given a:General() - -object NormalPriority extends LowPriority: - given b:Specific() - -def run = - import NormalPriority.given - val x = summon[General] - val _: Specific = x // <- b was picked \ No newline at end of file From d72e8e0421524389b209fcd65eb656cf3fc0d385 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 8 Aug 2024 10:20:09 +0200 Subject: [PATCH 460/465] Add changelog for 3.5.0-RC7 --- changelogs/3.5.0-RC7.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 changelogs/3.5.0-RC7.md diff --git a/changelogs/3.5.0-RC7.md b/changelogs/3.5.0-RC7.md new file mode 100644 index 000000000000..dab10f60b1ee --- /dev/null +++ b/changelogs/3.5.0-RC7.md @@ -0,0 +1,15 @@ +# Backported fixes + +- Backport "Fix healAmbiguous to compareAlternatives with disambiguate = true" to 3.5.0 [#21344](https://github.com/scala/scala3/pull/21344) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC6..3.5.0-RC7` these are: + +``` + 5 Martin Odersky + 4 Wojciech Mazur + 2 Eugene Flesselle +``` From 19534dbf7252d003ae5e9044f981773ab653a955 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 8 Aug 2024 10:20:36 +0200 Subject: [PATCH 461/465] Release 3.5.0-RC7 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index e1a61d82aca7..3ab5aa77bf15 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,7 @@ object Build { val referenceVersion = "3.4.2" - val baseVersion = "3.5.0-RC6" + val baseVersion = "3.5.0-RC7" // LTS or Next val versionLine = "Next" From 180deab26a55b1e8936b2fc6fb0e48eb4af21bf4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Mon, 12 Aug 2024 15:04:35 +0200 Subject: [PATCH 462/465] Add changelog for 3.5.0 --- changelogs/3.5.0.md | 278 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 278 insertions(+) create mode 100644 changelogs/3.5.0.md diff --git a/changelogs/3.5.0.md b/changelogs/3.5.0.md new file mode 100644 index 000000000000..654a19b169a8 --- /dev/null +++ b/changelogs/3.5.0.md @@ -0,0 +1,278 @@ +# Highlights of the release + +- Bundle scala-cli in scala command (For RC1 requires JVM 17, further RCs will use native launchers) +- Introduce Best Effort compilation options [#17582](https://github.com/lampepfl/dotty/pull/17582) +- Add support for Pipelined builds [#18880](https://github.com/lampepfl/dotty/pull/18880) +- Add support for `var` in refinements [#19982](https://github.com/lampepfl/dotty/pull/19982) +- Implement SIP-42 - Support for binary integer literals [#19405](https://github.com/lampepfl/dotty/pull/19405) + +# Other changes and fixes + +## Backend + +- Fix Closure span assignment in makeClosure [#15841](https://github.com/lampepfl/dotty/pull/15841) + +## Default parameters + +- Fix default args lookup for given classes [#20256](https://github.com/lampepfl/dotty/pull/20256) +- Fix implicit search failure reporting [#20261](https://github.com/lampepfl/dotty/pull/20261) + +## Derivation + +- Fix infinite loop in Mirror synthesis of unreducible match type [#20133](https://github.com/lampepfl/dotty/pull/20133) + +## Desugaring + +- Add explanation to checkCaseClassInheritanceInvariant error msg [#20141](https://github.com/lampepfl/dotty/pull/20141) + +## Exports + +- Add annotations in parameters for exports [#20140](https://github.com/lampepfl/dotty/pull/20140) +- Fix isAliasType [#20195](https://github.com/lampepfl/dotty/pull/20195) + +## Implicits + +- Fix implicitNotFound message for type aliases [#19343](https://github.com/lampepfl/dotty/pull/19343) +- Normalize types before collecting parts determining implicit scope [#20077](https://github.com/lampepfl/dotty/pull/20077) +- Better error diagnostics under -explain-cyclic [#20251](https://github.com/lampepfl/dotty/pull/20251) +- Update unreducible match types error reporting [#19954](https://github.com/lampepfl/dotty/pull/19954) +- Improve ConstraintHandling of SkolemTypes [#20175](https://github.com/lampepfl/dotty/pull/20175) + +## Incremental Compilation + +- Retain default parameters with `export` [#20167](https://github.com/lampepfl/dotty/pull/20167) + +## Inline + +- Fix by-name parameter in beta-reduction [#20096](https://github.com/lampepfl/dotty/pull/20096) +- Add warning for anonymous inline classes (#16723) [#20291](https://github.com/lampepfl/dotty/pull/20291) +- Avoid conversion of `Unit` type into `()` term [#20295](https://github.com/lampepfl/dotty/pull/20295) +- Type desugared `transparent inline def unapply` call in the correct mode [#20108](https://github.com/lampepfl/dotty/pull/20108) +- Regression: fix compilation performance on Windows [#20193](https://github.com/lampepfl/dotty/pull/20193) +- Fix inline match on blocks with multiple statements [#20125](https://github.com/lampepfl/dotty/pull/20125) +- Inline `unapply`s in the inlining phase [#19382](https://github.com/lampepfl/dotty/pull/19382) +- Fix outerSelect in Inliner [#20313](https://github.com/lampepfl/dotty/pull/20313) + +## Linting + +- Fix #20146: attach the original name if there is an import selection for an indent [#20163](https://github.com/lampepfl/dotty/pull/20163) +- Add regression test for issue 18632 [#20308](https://github.com/lampepfl/dotty/pull/20308) + +## Match Types + +- Make aliases of `MatchAlias`es normal `TypeAlias`es [#19871](https://github.com/lampepfl/dotty/pull/19871) +- Fix #19746: Do not follow param term refs in `isConcrete`. [#20015](https://github.com/lampepfl/dotty/pull/20015) +- Do match type reduction atPhaseNoLater than ElimOpaque [#20017](https://github.com/lampepfl/dotty/pull/20017) +- Do not flag match types as `Deferred` and amend #20077 [#20147](https://github.com/lampepfl/dotty/pull/20147) +- Always use baseType when constraining patternTp with scrutineeTp [#20032](https://github.com/lampepfl/dotty/pull/20032) +- Use `MirrorSource.reduce` result for `companionPath` [#20207](https://github.com/lampepfl/dotty/pull/20207) +- Regression: Fix match type extraction of a MatchAlias [#20111](https://github.com/lampepfl/dotty/pull/20111) +- Revert "Approximate MatchTypes with lub of case bodies, if non-recursive" in 3.5.0 [#21266](https://github.com/scala/scala3/pull/21266) + +## Polyfunctions + +- Discard poly-functions when trying to resolve overloading [#20181](https://github.com/lampepfl/dotty/pull/20181) + +## Presentation Compiler + +- Stabilise returned completions by improving deduplication + extra completions for constructors [#19976](https://github.com/lampepfl/dotty/pull/19976) +- Fix active param index for empty param lists [#20142](https://github.com/lampepfl/dotty/pull/20142) +- Delias type members in hover [#20173](https://github.com/lampepfl/dotty/pull/20173) +- Interactive: handle context bounds in extension construct workaround [#20201](https://github.com/lampepfl/dotty/pull/20201) +- Fix: prefer non-export definition locations [#20252](https://github.com/lampepfl/dotty/pull/20252) +- Don't show enum completions in new keyword context [#20304](https://github.com/lampepfl/dotty/pull/20304) +- Chore: Backport changes for presentation compiler [#20345](https://github.com/lampepfl/dotty/pull/20345) +- Add custom matchers for completions (fuzzy search for presentation compiler) [#19850](https://github.com/lampepfl/dotty/pull/19850) + +## Quotes + +- Fix TermRef prefixes not having their type healed [#20102](https://github.com/lampepfl/dotty/pull/20102) +- Improve reporting in staging about the possible use of an incorrect class loader [#20137](https://github.com/lampepfl/dotty/pull/20137) +- Introduce MethodTypeKind to quotes reflection API [#20249](https://github.com/lampepfl/dotty/pull/20249) +- Add quote ASTs to TASTy [#20165](https://github.com/lampepfl/dotty/pull/20165) + +## Reflection + +- Allow to beta reduce curried function applications in quotes reflect [#18121](https://github.com/lampepfl/dotty/pull/18121) +- Set the inlining phase in the Context used for checking macro trees [#20087](https://github.com/lampepfl/dotty/pull/20087) +- Add Symbol.isSuperAccessor to reflection API [#13388](https://github.com/lampepfl/dotty/pull/13388) +- Stabilize reflect `SymbolMethods.isSuperAccessor` [#20198](https://github.com/lampepfl/dotty/pull/20198) + +## Repl + +- Fix validity period of derived SingleDenotations [#19983](https://github.com/lampepfl/dotty/pull/19983) +- Fix #18383: Never consider top-level `import`s as unused in the repl. [#20310](https://github.com/lampepfl/dotty/pull/20310) + +## Reporting + +- Warn if extension receiver already has member [#17543](https://github.com/lampepfl/dotty/pull/17543) +- Deprecation of case class elements [#17911](https://github.com/lampepfl/dotty/pull/17911) +- Support src filter in -WConf (Closes #17635) [#18783](https://github.com/lampepfl/dotty/pull/18783) +- Add note about type mismatch in automatically inserted apply argument [#20023](https://github.com/lampepfl/dotty/pull/20023) +- Make error reporting resilient to exception thrown while reporting [#20158](https://github.com/lampepfl/dotty/pull/20158) +- Remove duplicate comma from Matchable selector warning [#20159](https://github.com/lampepfl/dotty/pull/20159) +- Generalize warnings for top-level calls to Any or AnyRef methods [#20312](https://github.com/lampepfl/dotty/pull/20312) +- Make CheckUnused not slow. [#20321](https://github.com/lampepfl/dotty/pull/20321) +- Bring back ambiguity filter when we report an implicit not found error [#20368](https://github.com/scala/scala3/pull/20368) +- Treat 3.5-migration the same as 3.5 for a warning about implicit priority change [#20436](https://github.com/scala/scala3/pull/20436) +- Priority warning fix alternative [#20487](https://github.com/scala/scala3/pull/20487) +- Use final result type to check selector bound [#20989](https://github.com/scala/scala3/pull/20989) +- Refine implicit priority change warnings [#21045](https://github.com/scala/scala3/pull/21045) +- Backport "Fix healAmbiguous to compareAlternatives with disambiguate = true" to 3.5.0 [#21344](https://github.com/scala/scala3/pull/21344) + +## Rewrites + +- Patch indentation when removing braces (and other bug fixes in `-indent -rewrite`) [#17522](https://github.com/lampepfl/dotty/pull/17522) +- Extra check to avoid converting block expressions on the rhs of an in… [#20043](https://github.com/lampepfl/dotty/pull/20043) + +## Scaladoc + +- Fix scaladoc crash on Windows - illegal path character [#20311](https://github.com/lampepfl/dotty/pull/20311) +- Scaladoc: improve refined function types rendering [#20333](https://github.com/lampepfl/dotty/pull/20333) +- Relax font-weight reset [#20348](https://github.com/lampepfl/dotty/pull/20348) + +## Scala JS + +- Optimize main.js [#20093](https://github.com/lampepfl/dotty/pull/20093) + +## Settings + +- Lift Scala Settings from experimental to stabilized [#20199](https://github.com/lampepfl/dotty/pull/20199) + +## Tooling + +- Detect macro dependencies that are missing from the classloader [#20139](https://github.com/lampepfl/dotty/pull/20139) +- Write pipelined tasty in parallel. [#20153](https://github.com/lampepfl/dotty/pull/20153) +- ConsoleReporter sends INFO to stdout [#20328](https://github.com/lampepfl/dotty/pull/20328) +- Bundle scala-cli in scala command [#20351](https://github.com/scala/scala3/pull/20351) +- Adapt the workflow to release on SDKMAN! [#20535](https://github.com/scala/scala3/pull/20535) +- Adapt the release workflow to SIP-46 [#20565](https://github.com/scala/scala3/pull/20565) +- Release .zip instead of .tar.gz for windows in sdkman [#20630](https://github.com/scala/scala3/pull/20630) +- SIP 46 - read classpath from file, remove lib directory in distribution [#20631](https://github.com/scala/scala3/pull/20631) +.gz for windows in sdkman [#20630](https://github.com/scala/scala3/pull/20630) +- Bump scala-cli to 1.4.0 [#20859](https://github.com/scala/scala3/pull/20859) +- Add --skip-cli-updates by default to the scala command [#20900](https://github.com/scala/scala3/pull/20900) +- Use pathing jars in cli commands [#21121](https://github.com/scala/scala3/pull/21121) +- expand classpath of pathing jars in scala_legacy command [#21160](https://github.com/scala/scala3/pull/21160) +- emit generatedNonLocalClass in backend when callback is not enabled [#21186](https://github.com/scala/scala3/pull/21186) + +## Transform + +- Fix overloaded default methods test in RefChecks [#20218](https://github.com/lampepfl/dotty/pull/20218) +- Fix handling of AppliedType aliases in outerPrefix [#20190](https://github.com/lampepfl/dotty/pull/20190) +- Elide unit binding when beta-reducing [#20085](https://github.com/lampepfl/dotty/pull/20085) + +## Typer + +- Reduce projections of type aliases with class type prefixes [#19931](https://github.com/lampepfl/dotty/pull/19931) +- Re-lub also hard union types in simplify [#20027](https://github.com/lampepfl/dotty/pull/20027) +- Fix #19789: Merge same TypeParamRef in orDominator [#20090](https://github.com/lampepfl/dotty/pull/20090) +- Allow SAM types to contain match alias refinements [#20092](https://github.com/lampepfl/dotty/pull/20092) +- Don't dealias when deciding which arguments to defer [#20116](https://github.com/lampepfl/dotty/pull/20116) +- Avoid the TypeVar.inst trap [#20160](https://github.com/lampepfl/dotty/pull/20160) +- Avoid crash when superType does not exist after erasure [#20188](https://github.com/lampepfl/dotty/pull/20188) +- Refine overloading and implicit disambiguation [#20084](https://github.com/lampepfl/dotty/pull/20084) +- Refactor constant folding of applications [#20099](https://github.com/lampepfl/dotty/pull/20099) +- Rollback constraints if `isSameType` failed second direction [#20109](https://github.com/lampepfl/dotty/pull/20109) +- Suppress "extension method will never be selected" for overrides [#20164](https://github.com/lampepfl/dotty/pull/20164) +- Allow SAM types to contain multiple refinements [#20172](https://github.com/lampepfl/dotty/pull/20172) +- Normalize when verifying if TypeTestCasts are unchecked [#20258](https://github.com/lampepfl/dotty/pull/20258) +- Avoid stacked thisCall contexts [#20488](https://github.com/scala/scala3/pull/20488) +- fix issue 20901: etaCollapse context bound type [#20910](https://github.com/scala/scala3/pull/20910) +- Fix symbol reference retrivial of `scala.caps.Caps` [#20493](https://github.com/scala/scala3/pull/20493) + +# Experimental Changes + +- Named tuples second implementation [#19174](https://github.com/lampepfl/dotty/pull/19174) +- Change rules for given prioritization [#19300](https://github.com/lampepfl/dotty/pull/19300) +- Enable experimental mode when experimental feature is imported [#19807](https://github.com/lampepfl/dotty/pull/19807) +- Add message parameter to `@experimental` annotation [#19935](https://github.com/lampepfl/dotty/pull/19935) +- Implement match type amendment: extractors follow aliases and singletons [#20161](https://github.com/lampepfl/dotty/pull/20161) +- Avoid forcing whole package when using -experimental [#20409](https://github.com/scala/scala3/pull/20409) + +## Capture Checking + +- Carry and check universal capability from parents correctly [#20004](https://github.com/lampepfl/dotty/pull/20004) +- Make parameter types of context functions inferred type trees [#20155](https://github.com/lampepfl/dotty/pull/20155) +- Handle reach capabilities correctly in depedent functions [#20203](https://github.com/lampepfl/dotty/pull/20203) +- Fix the visibility check in `markFree` [#20221](https://github.com/lampepfl/dotty/pull/20221) +- Make inline proxy vals have inferred types [#20241](https://github.com/lampepfl/dotty/pull/20241) +- CC: Give more info when context function parameters leak [#20244](https://github.com/lampepfl/dotty/pull/20244) +- Plug soundness hole for reach capabilities [#20051](https://github.com/lampepfl/dotty/pull/20051) +- Tighten the screws a bit more to seal the soundness hole for reach capabilities [#20056](https://github.com/lampepfl/dotty/pull/20056) +- Drop retains annotations in inferred type trees [#20057](https://github.com/lampepfl/dotty/pull/20057) +- Allow @retains arguments to be context functions [#20232](https://github.com/lampepfl/dotty/pull/20232) +- Fix conversion of this.fld capture refs under separate compilation [#20238](https://github.com/lampepfl/dotty/pull/20238) + +## Erased definitions + +- Fix "Compiler crash when using CanThrow" [#20210](https://github.com/lampepfl/dotty/pull/20210) +- Only allow erased parameters in erased definitions [#19686](https://github.com/lampepfl/dotty/pull/19686) + +## Initialization + +- Deprecate `StandardPlugin.init` in favor of `initialize` method taking implicit Context [#20330](https://github.com/lampepfl/dotty/pull/20330) +- Fix missing changesParents in PostTyper [#20062](https://github.com/lampepfl/dotty/pull/20062) +- Special case for next field of colon colon in global init checker [#20281](https://github.com/lampepfl/dotty/pull/20281) +- Extend whitelist in global initialization checker [#20290](https://github.com/lampepfl/dotty/pull/20290) + +## Macro Annotations + +- Allow macro annotation to transform companion [#19677](https://github.com/lampepfl/dotty/pull/19677) +- Remove experimental `MainAnnotation`/`newMain` (replaced with `MacroAnnotation`) [#19937](https://github.com/lampepfl/dotty/pull/19937) + +## Nullability + +- Add flexible types to deal with Java-defined signatures under -Yexplicit-nulls [#18112](https://github.com/lampepfl/dotty/pull/18112) +- Fix #20287: Add flexible types to Quotes library [#20293](https://github.com/lampepfl/dotty/pull/20293) +- Add fromNullable to Predef for explicit nulls [#20222](https://github.com/lampepfl/dotty/pull/20222) + + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.4.2..3.5.0` these are: + +``` + 153 Martin Odersky + 53 Eugene Flesselle + 41 Jamie Thompson + 29 Wojciech Mazur + 25 Nicolas Stucki + 22 Sébastien Doeraene + 18 noti0na1 + 16 Matt Bovel + 13 Guillaume Martres + 11 Paweł Marks + 10 Hamza REMMAL + 9 Yichen Xu + 8 Jan Chyb + 7 Hamza Remmal + 7 Som Snytt + 6 Jędrzej Rochala + 5 Fengyun Liu + 5 dependabot[bot] + 3 Mikołaj Fornal + 2 Aviv Keller + 2 EnzeXing + 1 Chris Pado + 1 Filip Zybała + 1 Georgi Krastev + 1 Jisoo Park + 1 Katarzyna Marek + 1 Lucas Nouguier + 1 Lucy Martin + 1 Ola Flisbäck + 1 Pascal Weisenburger + 1 Quentin Bernet + 1 Raphael Jolly + 1 Seth Tisue + 1 Stephane Bersier + 1 Tomasz Godzik + 1 Yoonjae Jeon + 1 aherlihy + 1 rochala + 1 willerf + +``` From 834c973b61848dfdd9c8a817a372e319526d7fdd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Mon, 12 Aug 2024 15:07:18 +0200 Subject: [PATCH 463/465] Release 3.5.0 --- project/Build.scala | 2 +- tasty/src/dotty/tools/tasty/TastyFormat.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 3ab5aa77bf15..047f2c0c22ea 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,7 @@ object Build { val referenceVersion = "3.4.2" - val baseVersion = "3.5.0-RC7" + val baseVersion = "3.5.0" // LTS or Next val versionLine = "Next" diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index c29ea99bcd8d..1e075efcf857 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -340,7 +340,7 @@ object TastyFormat { * is able to read final TASTy documents if the file's * `MinorVersion` is strictly less than the current value. */ - final val ExperimentalVersion: Int = 1 + final val ExperimentalVersion: Int = 0 /**This method implements a binary relation (`<:<`) between two TASTy versions. * From 7590f91da3f42854ba7abcc707d5487153c2b20c Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 21 Aug 2024 11:39:41 +0100 Subject: [PATCH 464/465] Update hamzaremmal/sdkman-release-action action --- .github/workflows/publish-sdkman.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index de12f81426b5..77bbebf3f846 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -46,7 +46,7 @@ jobs: - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: - - uses: hamzaremmal/sdkman-release-action@7e437233a6bd79bc4cb0fa9071b685e94bdfdba6 + - uses: hamzaremmal/sdkman-release-action@978b8cdb5f9c3b83ebdc45e0a1bf97bf17cc6280 with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} From 9da1ae80e4536a1b987f862eef634be8974ab996 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 21 Aug 2024 12:10:54 +0100 Subject: [PATCH 465/465] Update hamzaremmal/sdkman-release-action & hamzaremmal/sdkman-default-action action --- .github/workflows/publish-sdkman.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 77bbebf3f846..6f10ac128b6e 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -46,7 +46,7 @@ jobs: - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: - - uses: hamzaremmal/sdkman-release-action@978b8cdb5f9c3b83ebdc45e0a1bf97bf17cc6280 + - uses: hamzaremmal/sdkman-release-action@4cb6c8cf99cfdf0ed5de586d6b38500558737e65 with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} @@ -59,7 +59,7 @@ jobs: runs-on: ubuntu-latest needs: publish steps: - - uses: hamzaremmal/sdkman-default-action@866bc79fc5bd397eeb48f9cedda2f15221c8515d + - uses: hamzaremmal/sdkman-default-action@f312ff69dec7c4f83b060c3df90df7ed19e2d70e with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }}