diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a17790778ef2..ad738d73a1de 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -19,7 +19,7 @@ You'll need to sign one of these before any code contributions will be accepted (Currently, the signed CLA must be posted or scanned/emailed directly to Gradleware. We're still working on our electronic signing process). 1. Download, print and fill out the CLA, entering all of the requested personal information (Name, E-Mail, Mailing Address, Telephone, Country). 2. Sign the CLA. - 3. Post the document to one of the Gradleware offices listed at http://www.gradleware.com/contact, or scan and email the document to info@gradleware.com. + 3. Post the document to one of the Gradleware offices listed at http://www.gradleware.com/contact, or scan and email the document to info@gradleware.com (please include your GitHub username in the email or letter so we know who you are). 2. Before starting to work on a feature or a fix, it's generally a good idea to open a discussion about your proposed changes on the Gradle Developer List (dev@gradle.codehaus.org). Doing so helps to ensure that: 1. You understand how your proposed changes fit with the strategic goals of the Gradle project. diff --git a/build.gradle b/build.gradle index 4de6d0fcd7ed..be0e137ded2e 100644 --- a/build.gradle +++ b/build.gradle @@ -18,6 +18,7 @@ import org.gradle.build.Install import org.gradle.build.BuildTypes import org.gradle.build.TestReportAggregator + defaultTasks 'assemble' apply plugin: 'java-base' archivesBaseName = 'gradle' @@ -110,6 +111,7 @@ apply from: "gradle/classycle.gradle" apply from: "gradle/noDependencyResolutionDuringConfiguration.gradle" apply from: "gradle/testSetup.gradle" apply from: "gradle/testGroupings.gradle" +apply from: "gradle/analytics.gradle" allprojects { group = 'org.gradle' @@ -158,6 +160,7 @@ dependencies { runtime project(':wrapper') gradlePlugins pluginProjects gradlePlugins project(':coreImpl') + gradlePlugins project(':pluginUse') } task verifyIsProductionBuildEnvironment << { diff --git a/buildSrc/src/main/groovy/org/gradle/build/docs/dsl/docbook/JavadocConverter.java b/buildSrc/src/main/groovy/org/gradle/build/docs/dsl/docbook/JavadocConverter.java index 64f27b65d07d..56293581e2e2 100644 --- a/buildSrc/src/main/groovy/org/gradle/build/docs/dsl/docbook/JavadocConverter.java +++ b/buildSrc/src/main/groovy/org/gradle/build/docs/dsl/docbook/JavadocConverter.java @@ -330,6 +330,7 @@ private HtmlElementTranslatingHandler(DocBookBuilder nodes, Document document) { elementToElementMap.put("ol", "orderedlist"); elementToElementMap.put("li", "listitem"); elementToElementMap.put("em", "emphasis"); + elementToElementMap.put("strong", "emphasis"); elementToElementMap.put("i", "emphasis"); elementToElementMap.put("b", "emphasis"); elementToElementMap.put("code", "literal"); diff --git a/buildSrc/src/test/groovy/org/gradle/build/docs/dsl/docbook/JavadocConverterTest.groovy b/buildSrc/src/test/groovy/org/gradle/build/docs/dsl/docbook/JavadocConverterTest.groovy index 3124e3b09c81..c348aa6d8614 100644 --- a/buildSrc/src/test/groovy/org/gradle/build/docs/dsl/docbook/JavadocConverterTest.groovy +++ b/buildSrc/src/test/groovy/org/gradle/build/docs/dsl/docbook/JavadocConverterTest.groovy @@ -287,6 +287,16 @@ literal code does something. format(result.docbook) == '''text''' } + def convertsAStrongElementToAnEmphasisElement() { + _ * classMetaData.rawCommentText >> 'text' + + when: + def result = parser.parse(classMetaData, listener) + + then: + format(result.docbook) == '''text''' + } + def convertsBAndIElementToAnEmphasisElement() { _ * classMetaData.rawCommentText >> 'text other' diff --git a/design-docs/component-metadata.md b/design-docs/component-metadata.md index 243d8275bbf7..8978d07e45eb 100644 --- a/design-docs/component-metadata.md +++ b/design-docs/component-metadata.md @@ -132,12 +132,13 @@ Add a "changing" property to `ComponentMetadataDetails`. * Static dependency * Dynamic dependency (that is, the dependency may refer to different components over time, but the components themselves do not change) -## Use Ivy extra attributes to determine status of module +## Use Ivy extra info properties to determine status of module -This story makes extra attributes defined in ivy.xml files available to component metadata rules, on request. +An ivy.xml `` element permits arbitrary child elements with string values. This story makes these extra info properties available to component metadata rules, +on request. -A rule should declare that these extra attributes form an input to the rule, in which case they will be provided. -While this is perhaps not important for Ivy extra attributes, which are cheap to determine, this will be more important for +A rule should declare that these extra info properties form an input to the rule, in which case they will be provided. +While this is perhaps not important for Ivy properties, which are cheap to determine, this will be more important for Artifactory properties (see below). A medium-term goal is to sync the Component Metadata Rules DSL with the new general-purpose Rules DSL. So the same mechanism will be @@ -146,59 +147,42 @@ simply attempt to introduce a DSL to declare such rules. ### User visible changes -Option1: Typed ComponentMetadataDetails: - - componentMetadata { - eachComponent { IvyModuleMetadataDetails details -> - if (details.ivyExtraAttributes['my-custom-attribute'] == 'value') { - details.status == 'release' - } - } + interface IvyModuleDescriptor { + Map extraInfo } -Option2: Additional parameters: - componentMetadata { - eachComponent { ComponentMetadataDetails details, IvyExtraAttributes ivyAttributes -> - if (ivyAttributes['my-custom-attribute'] == 'value') { + eachComponent { ComponentMetadataDetails details, IvyModuleDescriptor ivyModule -> + if (ivyModule.extraInfo['my-custom-attribute'] == 'value') { details.status == 'release' } } } -Option3: Limit rule applicability (this might be useful in addition to one of the above): - - componentMetadata { - eachComponent.withType(IvyModule) { - if (it.ivyAttributes['my-custom-attribute'] == 'value') { - it.status == 'release' - } - } - } - - // This could later be extended with - eachComponent.withType(IvyModule).matching({ group == 'myorg' }) { - // custom rule that only applies to my internal modules - } - ### Implementation * Add a model for Ivy-specific module metadata and make this available via `ModuleVersionMetaData` + * Include any name/value pairs defined as child elements of the `` element. Do not include the namespace qualifier. * The actual values should already be available (and cached) via the underlying Ivy ModuleDescriptor - * The API should assume that a number of custom domain metadata elements may be present -* For any rule defined that requires Ivy extra attributes to be processed, then Ivy extra attributes will be made available as per the DSL chosen + * The API should assume that other metadata models may be present as well +* For any rule that declares IvyModuleDescriptor as an input: + * Provide the IvyModuleDescriptor as input where the resolved module came from an ivy repository + * Do not execute the rule where the resolved module does not have an associated ivy.xml file ### Test coverage -* Publish with arbitrary extra attributes, and ensure these are available in resolve. +* Publish with arbitrary extra info properties, and ensure these are available in resolve. * Publish again with changed values: * Original values are take from cache * New values are obtained when changing module is refreshed -* Component metadata rule does not have access to ivy extra attributes if not declared as rule input +* Component metadata rule does not have access to ivy extra info properties if not declared as rule input * Component metadata rule is not evaluated for non-ivy module when rule declares ivy attributes as input * Resolve with rule that does not have ivy extra attributes as input. Modify rule to include those inputs and resolve again Attributes are made available to rule (extra HTTP requests are OK, but not required). +### Open issues + + ## Use Artifactory properties to determine status of module This story makes it possible to access published Artifactory properties from within a Component Metadata Rule: diff --git a/design-docs/continuous-delivery-for-c-plus-plus.md b/design-docs/continuous-delivery-for-c-plus-plus.md index 66e7ab604d5f..1defc8a4050c 100644 --- a/design-docs/continuous-delivery-for-c-plus-plus.md +++ b/design-docs/continuous-delivery-for-c-plus-plus.md @@ -570,6 +570,7 @@ The implementation will also remove stale object files. - Need to handle `#import` with Visual C++, which may reference a `.tld` file. - Should not parse headers that we deem to be unchanging: 'system' libraries, unchanging repository libraries, etc. +- Implementation currently locks the task artifact cache while compiling ## Story: Modify command line arguments for binary tool prior to execution @@ -1121,6 +1122,8 @@ This story introduces a set of headers that are visible to all the source files - Default location for the implementation headers - Rename `lib()` to `dependsOn()` or similar? +- Model 'implicit' headers: in the same directory as the source files or included via relative path. + - Need to make these available in the IDE and include in compile task inputs ### Story: Introduce public headers for native libraries @@ -1239,6 +1242,7 @@ This story moves definition and configuration of the source sets for a component built-in tools. - Fix `TargetPlatformConfiguration` and `PlatformToolChain` to make them extensible, so that not every configuration supports every tool. - Gcc and Clang tool chains need to provide the correct compile and link time arguments on OS X and Linux. +- Add test coverage on Windows ### Story: Incremental compilation for Objective-C and Objective-C++ @@ -1267,6 +1271,8 @@ this consistent with the way that tool arguments are configured in a tool chain. * Replace `PlatformConfigurableToolChain.addPlatformConfiguration` with `PlatformConfigurableToolChain.target(Platform..., Action)` * Replace the built-in `TargetPlatformConfiguration` actions with `Action` * If `PlatformConfigurableToolChain.target()` is called on a tool chain, then the default target configurations are removed. +* Documentation describes how to define a new platform. +* Documentation describes how to configure a tool for an existing platform. ### User visible changes @@ -1305,6 +1311,56 @@ this consistent with the way that tool arguments are configured in a tool chain. * When no Platform architecture/os is defined, assume the current platform architecture/os, not the tool chain default. * This will require detecting the current platform, and supplying the correct tool chain arguments when building. + +## Story: Modify command line arguments for visualCpp toolchain prior execution + +provide a 'hook' allowing the build author to control the exact set of arguments passed to a visualcpp toolchain executable. +This will allow a build author to work around any limitations in Gradle, or incorrect assumptions that Gradle makes. + +### Implementation + +* Change `VisualCppToolChain` to extend `ExtendableToolChain` and register `linker` and `staticLibArchiver` tools +* Move registration of cpp, windows-rc and assembler tools in VisualCppToolChain to according plugins +* Extract `CommandLineToolChain` interface out of `Gcc` and introduce similar functionality to VisualCpp and Clang tool chains. +* Move setter/getter of executables in into GccCommandLineToolConfiguration +* Add according documentation to userguide/DSL reference and update release notes + +### User visible changes + + apply plugin:'cpp' + + model { + toolChains { + visualCpp(VisualCpp) { + cppCompiler.withArguments { args -> + args << "-DFRENCH" + } + } + } + } + + +### Test coverage + +* Can tweak arguments for VisualCpp, Gcc and Clang + +## Story: Allow configuration of tool chain executables on a per-platform basis (Gcc based toolchains) + +### Implementation + +* In AbstractGccCompatibleToolChain change initTools method to configure configurableToolChain instead after targetPlatformConfigurationConfiguration is applied + +### Test coverage + +* Can use g++ instead of gcc for compiling C sources +* Can use custom executable + +### Open issues + +* path configuration is currently not possible on a per platform basis. full paths to executables must be used or executable must be on +system path. + + ## Story: Improved DSL for tool chain configuration This story improves the DSL for tweaking arguments for a command-line tool that is part of a tool chain, and extends this @@ -1316,8 +1372,7 @@ ability to all command-line based tool chains. It also permits the configuration * Rename `GccTool` to `CommandLineTool` and change to have `withInvocation(Action)` in place of `withArguments` * Remove tool-specific getters from `Gcc`, and instead make `Gcc` serve as a NamedDomainObjectSet of `CommandLineTool` instances. * Continue to register a `CommandLineTool` for every supported language. -* Allow the `eachInvocation` method to override the default executable to use. -* Extract `CommandLineToolChain` interface out of `Gcc` and introduce similar functionality to VisualCpp and Clang tool chains. +* Allow the `withInvocation` method to override the default executable to use. * Add a sample, user-guide documentation and note the breaking change in the release notes. * Consolidate various `ArgsTransformer` implementations so that most/all simply set/modify args on a `CommandLineToolInvocation`. @@ -1354,6 +1409,7 @@ ability to all command-line based tool chains. It also permits the configuration * Only to register a `CommandLineTool` for languages that are supported by build. * Need to make it easy to have centralised tool chain configuration that works regardless of languages in effect. +* Make it simpler to add support for new languages to existing tool chain implementations ## Story: Only use gcc/g++ front-ends for GCC tool chain @@ -1421,6 +1477,12 @@ This story also aggregates a bunch of review items that relate to Architecture a - How to make Platform instance immutable - Consistent API for Architecture and OperatingSystem: either public method on both [os.isWindows(),arch.isAmd64()] or only on internal api. - Include ABI in architecture so that the correct prebuilt library can be selected for a tool chain +- When no Platform architecture/os is defined, assume the current platform architecture/os, not the tool chain default. + - This will require detecting the current platform, and supplying the correct tool chain arguments when building. + - We can then remove the concept of the "tool chain default" platform, and always explicitly tell the tool chain which platform to build for. +- For GCC, need to probe the output of the compiler to determine exactly what is being created +- For Clang, need to provide the full `-target ` to define the exact output, or provide a subset and probe the output. + - see http://clang.llvm.org/docs/CrossCompilation.html ## Story: Include all macro definitions in Visual Studio project configuration @@ -1878,6 +1940,11 @@ TBD - Don't create compile tasks for empty source sets - Compile windows resource files with gcc/clang using [`windres`](http://sourceware.org/binutils/docs/binutils/windres.html) +### Incremental compile + +- Perform incremental compile when header is included via simple macro definition +- Keep a separate, build-scoped cache of file -> parsed includes. This would prevent need for reparsing per-variant and per-component. +- Detect changes to headers that are implicit on the include path via the tool chain ## Target platforms @@ -1913,11 +1980,14 @@ TBD - A component packaging that satisfies multiple points in the variant space. - Use `lipo` to merge two binaries into a single universal binary. - Transforms the meta-data for the component - same set of variants but different set of binaries. +- Use separate directories for output binaries, rather than encoding all dimensions in the name: eg `flavor/platform/buildType/myLib.so` ## Toolchains - DSL to declare that a toolchain supports certain target platform, and how to invoke the tools to do so. - Introduce `XCode`, `Cygwin`, `MinGW` toolchains, to allow selection of specific gcc or clang implementations. + - Use the `XCode` tool chain to determine mac-specific gcc args + - Use the `Cygwin` and `MinGW` toolchains to provide additional path requirements for `InstallExecutable` task toolchains { gcc { @@ -1936,6 +2006,8 @@ TBD } } +- Prevent configuration of tool chains after availability has been determined. + ## Structure - Some common plugin that determines which tool-chains to apply @@ -1953,6 +2025,11 @@ TBD - Need to make standard 'build', 'check' lifecycle tasks available too. The `assemble` task should build all buildable variants. - Reasonable behaviour when nothing is buildable on the current machine. - Come up with consistent naming scheme for language plugins: 'cpp', 'c', 'assembler', 'java-lang', 'scala-lang', etc +- Windows resources + - Automatically add `/noentry` and `/machine` linker args when building resource-only DLL + - Actually inspect the binary to determine if there are any exported symbols + - Use a model rule to determine if library sources contain windows resources + ### Performance @@ -1967,8 +2044,16 @@ TBD - Better way to see how the compiler is being invoked - Make names less important +## Test coverage + +- Update the UnknownOS CI build to run on java 7, and remove the "CAN_INSTALL_EXECUTABLE" test requirement +- Integration test coverage for 64-bit assembler on all platforms/tool chains. +- Verify that the correct windows system libraries are available at link time + - Use a test app that uses something like WriteFile() to write its hello world message to stdout. If it can link and run, then the paths are probably ok. + # Open issues +* For incremental build with visual c++, use `dumpbin /RAWDATA` to strip timestamps from binary files before comparison * Add ABI as an aspect of target platform. * Output of any custom post link task should be treated as input to anything that depends on the binary. * Route stdout to info level when linking a shared library using visual studio, to get rid of the pointless notice. @@ -2003,3 +2088,6 @@ TBD * JNI plugin generates native header, and sets up the JNI stuff in $java.home as a platform library. * Model minimum OS version. * For OS X can use -mmacosx-version-min option. +* Clean task for a binary +* Update CDT support to match Visual Studio support +* Rename 'install' task to indicate that it's installing a developer image diff --git a/design-docs/dependency-management-bug-fixes.md b/design-docs/dependency-management-bug-fixes.md index 76694f9c87d8..1d4c70cfd892 100644 --- a/design-docs/dependency-management-bug-fixes.md +++ b/design-docs/dependency-management-bug-fixes.md @@ -3,32 +3,6 @@ This feature is really a bucket for key things we want to fix in the short-term As this 'feature' is a list of bug fixes, this feature spec will not follow the usual template. -# GRADLE-2861 Handle parent pom with unknown placeholders - -See [GRADLE-2861](http://issues.gradle.org/browse/GRADLE-2861) - -Currently, the POM parser (inherited from Ivy) attaches special extra attributes to the `ModuleDescriptor` for a POM. These are later used by the POM parser -when it parses a child POM. Sometimes these attributes cause badly formed XML to be generated, hence the failure listed in the jira issue. - -The solution is to have the parser request the parent POM artifact directly, rather than indirectly via the module meta-data: - -1. Add a `LocallyAvailableExternalResource getArtifact(Artifact)` method to `DescriptorParseContext`. - - Implementation can reuse the `ModuleVersionResolveResult` from the existing `getModuleDescriptor()` method. This result includes an `ArtifactResolver` which - can be used to resolve an `Artifact` to a `File`. There's an example of how to adapt a `File` to a `LocallyAvailableExternalResource` instance in - `AbstractModuleDescriptorParser.parseMetaData()`. -2. Change the `GradlePomModuleDescriptorParser.parseOtherPom()` to use this new method to fetch and parse the parent POM artifact, rather than using the parsed - `ModuleDescriptor` for the parent. For this step, can continue to represent the parent pom using a `ModuleDescriptor` inside the parser. -3. Change `GradlePomModuleDescriptorParser` to represent the parent POM using a `PomReader` rather than a `ModuleDescriptor`. -4. Clean out `GradlePomModuleDescriptorBuilder` so that it no longer defines any extra properties on the parsed `ModuleDescriptor`. -5. Change `IvyXmlModuleDescriptorParser.parseOtherIvyFile()` to use the new method to fetch and parse the Ivy descriptor artifact. -6. Remove `DescriptorParseContext.getModuleDescriptor()`. It should no longer be required. - -## Test coverage - -* Unignore the existing test case in `BadPomFileResolveIntegrationTest`. -* Add a test case to `MavenParentPomResolveIntegrationTest` to cover two Maven modules that share a common parent. -* Add a test case to `MavenParentPomResolveIntegrationTest` to cover a Maven module that has a parent and grandparent module. - # Conflict resolution considers conflicts on production classes See [this post](http://forums.gradle.org/gradle/topics/npe_in_dependencygraphbuilder_dependencyedge_getfailure) @@ -54,65 +28,6 @@ See [GRADLE-2516](http://issues.gradle.org/browse/GRADLE-2516) - Change the local dependencies (eg `gradleApi()`, `localGroovy()`) to imply the various modules that they contribute to the result. -# Latest status dynamic versions work across multiple repositories - -See [GRADLE-2502](http://issues.gradle.org/browse/GRADLE-2502) - -### Test coverage - -1. Using `latest.integration` - 1. Empty repository fails with not found. - 2. Publish `1.0` and `1.1` with status `integration`. Resolves to `1.1`. - 3. Publish `1.2` with status `release`. Resolves to `1.2` - 4. Publish `1.3` with no ivy.xml. Resolves to `1.3`. -2. Using `latest.milestone` - 1. Empty repository fails with not found. - 2. Publish `2.0` with no ivy.xml. Fails with not found. - 3. Publish `1.3` with status `integration`. Fails with not found. - 4. Publish `1.0` and `1.1` with ivy.xml and status `milestone`. Resolves to `1.1`. - 5. Publish `1.2` with status `release`. Resolves to `1.2` -3. Using `latest.release` - 1. Empty repository fails with not found. - 2. Publish `2.0` with no ivy.xml. Fails with not found. - 3. Publish `1.3` with status `milestone`. Fails with not found. - 4. Publish `1.0` and `1.1` with ivy.xml and status `release`. Resolves to `1.1`. -4. Multiple repositories. -5. Checking for changes. Using `latest.release` - 1. Publish `1.0` with status `release` and `2.0` with status `milestone`. - 2. Resolve and assert directory listing and `1.0` artifacts downloaded. - 3. Resolve and assert directory listing downloaded. - 4. Publish `1.1` with status `release`. - 5. Resolve and assert directory listing and `1.1` artifacts downloaded. -6. Maven integration - 1. Publish `1.0`. Check `latest.integration` resolves to `1.0` and `latest.release` fails with not found. - 2. Publish `1.1-SNAPSHOT`. Check `latest.integration` resolves to `1.1-SNAPSHOT` and `latest.release` fails with not found. -7. Version ranges -8. Repository with multiple patterns. -9. Repository with `[type]` in pattern before `[revision]`. -10. Multiple dynamic versions match the same remote revision. - -### Implementation strategy - -Change ExternalResourceResolver.getDependency() to use the following algorithm: -1. Calculate an ordered list of candidate versions. - 1. For a static version selector the list contains a single candidate. - 2. For a dynamic version selector the list is the full set of versions for the module. - * For a Maven repository, this is determined using `maven-metadata.xml` if available, falling back to a directory listing. - * For an Ivy repository, this is determined using a directory listing. - * Fail if directory listing is not available. -2. For each candidate version: - 1. If the version matcher does not accept the module version, continue. - 2. Fetch the module version meta-data, as described below. If not found, continue. - 3. If the version matcher requires the module meta-data and it does not accept the meta-data, continue. - 4. Use the module version. -3. Return not found. - -To fetch the meta-data for a module version: -1. Download the meta data descriptor resource, via the resource cache. If found, parse. - 1. Validate module version in meta-data == the expected module version. -2. Check for a jar artifact, via the resource cache. If found, use default meta-data. The meta-data must have `default` set to `true` and `status` set to `integration`. -3. Return not found. - # Correctness issues in HTTP resource caching * GRADLE-2328 - invalidate cached HTTP/HTTPS resource when user credentials change. @@ -171,59 +86,6 @@ TODO - flesh this out TODO - flesh this out -# Correct handling of packaging and dependency type declared in poms - -* GRADLE-2188: Artifact not found resolving dependencies with packaging/type "orbit" - -### Description -Our engine for parsing Maven pom files is borrowed from ivy, and assumes the 'packaging' element equals the artifact type, with a few exceptions (ejb, bundle, eclipse-plugin, maven-plugin). -This is different from the way Maven does the calculation, which is: -* Type defaults to 'jar' but can be explicitly declared. -* Maven maps the type to an [extension, classifier] combination using some hardcoded rules. Unknown types are mapped to [type, ""]. -* To resolve the artefact, maven looks for an artefact with the given artifactId, version, classifier and extension. - -### Strategic solution - -At present, our model of an Artifact is heavily based on ivy; for this fix we can introduce the concept of mapping between our internal model and a repository-centric -artifact model. This will be a small step toward an independent Gradle model of artifacts, which then maps to repository specific things link extension, classifier, etc. - -### User visible changes - -* When the dependency declaration has no 'type' specified, or a 'type' that maps to the extension 'jar' - * Resolution of a POM module with packaging in ['', 'pom', 'jar', 'ejb', 'bundle', 'maven-plugin', 'eclipse-plugin'] will not change - * Resolution of a POM with packaging 'foo' that maps to 'module.foo', a deprecation warning will be emitted and the artifact 'module.foo' will be used - * Resolution of a POM with packaging 'foo' that maps to 'module.jar', the artifact 'module.jar' will be successfully found. (ie 'orbit'). An extra HTTP - request will be required to first look for 'module.foo'. -* When the dependency declaration has a 'type' specified that maps to an extension 'ext' (other than 'jar') - * Resolution of a POM module with packaging in ['pom', 'jar', 'ejb', 'bundle', 'maven-plugin', 'eclipse-plugin'] will emit a deprecation warning before using 'module.jar' if it exists - * Resolution of a POM with packaging 'foo' and actual artifact 'module.foo', a deprecation warning will be emitted and the artifact 'module.foo' will be used - * Resolution of a POM with packaging 'foo' and actual artifact 'module.ext', the artifact 'module.ext' will be successfully found. An extra HTTP - request will be required to first look for 'module.foo'. - -### Integration test coverage - -* Coverage for resolving pom dependencies referenced in various ways: - * Need modules published in maven repositories with packaging = ['', 'pom', 'jar', 'war', 'eclipse-plugin', 'custom'] - * Test resolution of artifacts in these modules via - 1. Direct dependency in a Gradle project - 2. Transitive dependency in a maven module (pom) which is itself a dependency of a Gradle project - 3. Transitive dependency in an ivy module (ivy.xml) which is itself a dependency of a Gradle project - * For 1. and 2., need dependency declaration with and without type attribute specified - * Must verify that deprecation warning is logged appropriately -* Sad-day coverage for the case where neither packaging nor type can successfully locate the maven artifact. Error message should report 'type'-based location. - -### Implementation approach - -* Determine 2 locations for the primary artifact: - * The 'packaging' location: apply the current logic to determine location from module packaging attribute - * Retain current packaging->extension mapping for specific packaging types - * The 'type' location: Use maven3 rules to map type->extension+classifier, and construct a location -* If both locations are the same, use the artifact at that location. -* If not, look for the artifact in the packaging location - * If found, emit a deprecation warning and use that location - * If not found, use the artifact from the type location -* In 2.0, we will remove the packaging->extension mapping and the deprecation warning - # RedHat finishes porting gradle to fedora * GRADLE-2210: Migrate to maven 3 @@ -280,7 +142,6 @@ And a test that regular resolve succeeds from http repository when settings.xml # Allow resolution of java-source and javadoc types from maven repositories (and other types: tests, ejb-client) -* GRADLE-201: Enable support for retrieving source artifacts of a module * GRADLE-1444: Sources are not downloaded when dependency is using a classifier * GRADLE-2320: Support for multiple artifacts with source jars in Eclipse plugin @@ -361,11 +222,6 @@ Until we map these types into the ivy repository model as well: * GRADLE-2211: Resolved binary executables and libraries do not use the platform specific naming scheme -# Handle pom-only modules in mavenLocal - -* GRADLE-2034: Existence of pom file requires that declared artifacts can be found in the same repository -* GRADLE-2369: Dependency resolution fails for mavenLocal(), mavenCentral() if artifact partially in mavenLocal() - # Support for kerberos and custom authentication * GRADLE-2335: Provide the ability to implement a custom HTTP authentication scheme for repository access diff --git a/design-docs/dependency-management-for-jvm-components.md b/design-docs/dependency-management-for-jvm-components.md new file mode 100644 index 000000000000..8d160c576f95 --- /dev/null +++ b/design-docs/dependency-management-for-jvm-components.md @@ -0,0 +1,630 @@ + +This spec describes some work to allow plugins to define the kinds of components that they produce and consume. + +## A note on terminology + +There is currently a disconnect in the terminology used for the dependency management component model, and that used +for the component model provided by the native plugins. + +The dependency management model uses the term `component instance` or `component` to refer to what is known as a `binary` +in the native model. A `component` in the native model doesn't really have a corresponding concept in the dependency +management model (a `module` is the closest we have, and this is not the same thing). + +Part of the work for this spec is to unify the terminology. This is yet to be defined. + +For now, this spec uses the terminology from the native component model, using `binary` to refer to what is also +known as a `component instance` or `variant`. + +# Features + +## Feature: Build author creates a JVM library with Java sources + +### Story: Build author defines JVM library + +#### DSL + +Project defining single jvm libraries + + apply plugin: 'jvm-component' + + jvm { + libraries { + main + } + } + +Project defining multiple jvm libraries + + apply plugin: 'jvm-component' + + jvm { + libraries { + main + extra + } + } + +Combining native and jvm libraries in single project + + apply plugin: 'jvm-component' + apply plugin: 'native-component' + + jvm { + libraries { + myJvmLib + } + } + nativeCode + libraries { + myNativeLib + } + } + +#### Implementation plan + +- Introduce `org.gradle.jvm.JvmLibrary` +- Rename `org.gradle.nativebinaries.Library` to `org.gradle.nativebinaries.NativeLibrary` + - Similar renames for `Executable`, `TestSuite` and all related classes +- Introduce a common superclass for `Library`. +- Extract `org.gradle.nativebinaries.LibraryContainer` out of `nativebinaries` project into `language-base` project, + and make it an extensible polymorphic container. + - Different 'library' plugins will register a factory for library types. +- Add a `jvm-component` plugin, that: + - Registers support for `JvmLibrary`. + - Adds a single `JvmLibraryBinary` instance to the `binaries` container for every `JvmLibrary` + - Creates a binary lifecycle task for generating this `JvmLibraryBinary` + - Wires the binary lifecycle task into the main `assemble` task. +- Rename `NativeBinariesPlugin` to `NativeComponentPlugin` with id `native-component`. +- Move `Binary` and `ClassDirectoryBinary` to live with the runtime support classes (and not the language support classes) +- Extract a common supertype `Application` for NativeExecutable, and a common supertype `Component` for `Library` and `Application` +- Introduce a 'filtered' view of the ExtensiblePolymorphicDomainObjectContainer, such that only elements of a particular type are returned + and any element created is given that type. + - Add a backing 'components' container that contains all Library and Application elements + - Add 'jvm' and 'nativeCode' extensions for namespacing different library containers + - Add 'nativeCode.libraries' and 'jvm.libraries' as filtered containers on 'components', with appropriate library type + - Add 'nativeCode.executables' as filtered view on 'components + - Use the 'components' container in native code where currently must iterate separately over 'libraries' and 'executables' + +#### Test cases + +- Can apply `jvm-component` plugin without defining library + - No binaries defined + - No lifecycle task added +- Define a jvm library component + - `JvmLibraryBinary` added to binaries container + - Lifecycle task available to build binary: skipped when no sources for binary + - Binary is buildable: can add dependent tasks which are executed when building binary +- Define and build multiple java libraries in the same project + - Build library jars individually using binary lifecycle task + - `gradle assemble` builds single jar for each library +- Can combine native and JVM libraries in the same project + - `gradle assemble` executes lifecycle tasks for each native library and each jvm library + +#### Open issues + +- Come up with a better name for JvmLibraryBinary, or perhaps add a `JarBinary` subtype +- Consider splitting up `assemble` into various lifecycle tasks. There are several use cases: + - As a developer, build me a binary I can play with or test in some way. + - As part of some workflow, build all binaries that should be possible to build in this specific environment. Fail if a certain binary cannot be built. + For example, if I'm on Windows build all the Windows variants and fail if the Windows SDK (with 64bit support) is not installed. + Or, if I'm building for Android, fail if the SDK is not installed. + - Build everything. Fail if a certain binary cannot be built. + +### Story: Build author creates JVM library jar from Java sources + +When a JVM library is defined with Java language support, then binary is built from conventional source set locations: + +- Has a single Java source set hardcoded to `src/myLib/java` +- Has a single resources source set hardcoded to `src/myLib/resources` + +#### DSL + +Java library using conventional source locations + + apply plugin: 'jvm-component' + apply plugin: 'java-lang' + + jvm { + libraries { + myLib(JvmLibrary) + } + } + + +Combining jvm-java and native (multi-lang) libraries in single project + + apply plugin: 'jvm-component' + apply plugin: 'java-lang' + + apply plugin: 'native-component' + apply plugin: 'cpp-lang' + apply plugin: 'c-lang' + + jvm { + libraries { + myJvmLib + } + } + nativeCode { + libraries { + myNativeLib + } + } + +#### Implementation plan + +- Replace the current 'java-lang' plugin with a simpler one that does not know about legacy conventions +- For each JvmLibrary: + - Adds a single ResourceSet for 'src/${component}/resources' + - Adds a single JavaSourceSet for 'src/${component}/java' +- Each created JvmLibraryBinary has the source sets of it's JvmLibrary +- Create a ProcessResources task for each ResourceSet for a JvmLibraryBinary + - copy resources to `build/classes/${binaryName}` +- Create a CompileJava task for each JavaSourceSet for a JvmLibraryBinary + - compile classes to `build/classes/${binaryName}` +- Create a Jar task for each JvmLibraryBinary + - produce jar file at `build/binaries/${binaryName}/${componentName}.jar +- Rejig the native language plugins so that '*-lang' + 'native-components' is sufficient to apply language support + - Existing 'cpp', 'c', etc plugins will simply apply '*-lang' and 'native-components' + +#### Test cases + +- Define and build the jar for a java library (assert jar contents for each case) + - With no sources or resources + - With sources but no resources + - With resources but no sources + - With both sources and resources +- Reports failure to compile source +- Compiled sources and resources are available in a common directory +- All generated resources are removed when all resources source files are removed. +- Can build native and JVM libraries in the same project + - `gradle assemble` builds each native library and each jvm library + +#### Open issues + +- Need `groovy-lang` and `scala-lang` plugins +- All compiled classes are removed when all java source files are removed. + +### Story: Legacy JVM language plugins declare a jvm library + +#### Test cases + +- JVM library with name `main` is defined with any combination of `java`, `groovy` and `scala` plugins applied +- Can build legacy jvm library jar using standard lifecycle task + +#### Open issues + +- The legacy application plugin should also declare a jvm application. + +## Feature: Custom plugin defines a custom library type + +### Story: plugin declares its own library type + +Add a sample plugin that declares a custom library type: + + apply plugin: 'my-sample' + + mySample { + // can use its own DSL + ... + } + + // Library is also visible in libraries container + assert libraries.withType(SampleLibrary).size() == 1 + +A custom library type: +- Extends or implements some public base `Library` type. +- Has no dependencies. +- Produces no artifacts. + +### Story: Custom library produces custom binaries + +Change the sample plugin so that it declares its own binary type for the libraries it defines: + + apply plugin: 'my-sample' + + mySample { + // can use its own DSL + ... + } + + // Binaries are also visible in the binaries container + assert binaries.withType(SampleBinary).size() == 2 + +Allow a plugin to declare the binaries for a custom library. + +A custom binary: +- Extends or implements some public base `LibraryBinary` type. +- Has some lifecycle task to build its outputs. + +Running `gradle assemble` will build each library binary. + +### Story: Custom binary is build from Java sources + +Change the sample plugin so that it compiles Java source to produce its binaries + +- Uses same conventions as a Java library. +- No dependencies. + +## Feature: Build author declares that a Java library depends on a Java library produced by another project + +For example: + + apply plugin: 'new-java' + + jvm { + libraries { + myLib { + dependencies { + project 'other-project' // Infer the target library + project 'other-project' library 'my-lib' + } + } + } + } + +When the project attribute refers to a project with a component plugin applied: + +- Select the target library from the libraries of the project. Assert that there is exactly one matching JVM library. +- At compile time, include the library's jar binary only. +- At runtime time, include the library's jar binary and runtime dependencies. + +When the project attribute refers to a project without a component plugin applied: + +- At compile and runtime, include the artifacts and dependencies from the `default` configuration. + +### Open issues + +- Should be able to depend on a library in the same project. +- Need an API to query the various classpaths. +- Need to be able to configure the resolution strategy for each usage. + +## Feature: Build author declares that a Java library depends on an external Java library + +For example: + + apply plugin: 'java-components' + + jvm { + libraries { + myLib { + dependencies { + library "myorg:mylib:2.3" + } + } + } + } + +This makes the jar of `myorg:mylib:2.3` and its dependencies available at both compile time and runtime. + +### Open issues + +- Using `library "some:thing:1.2"` will conflict with a dependency `library "someLib"` on a library declared in the same project. +Could potentially just assert that component names do not contain ':' (should do this anyway). + +## Feature: Build author declares that legacy Java project depends on a Java library produced by another project + +For example: + + apply plugin: 'java' + + dependencies { + compile project: 'other-project' + } + +When the project attribute refers to a project with a component plugin applied: + +- Select the target library from the libraries of the project. Assert that there is exactly one JVM library. +- At compile time, include the library's jar binary only. +- At runtime time, include the library's jar binary and runtime dependencies. + +### Open issues + +- Allow `library` attribute? + +## Feature: Build user views the dependencies for the Java libraries of a project + +The dependency reports show the dependencies of the Java libraries of a project: + +- `dependencies` task +- `dependencyInsight` task +- HTML report + +## Feature: Build author declares that a native component depends on a native library + +Add the ability to declare dependencies directly on a native component, using a similar DSL as for Java libraries: + + apply plugin: 'cpp' + + libraries { + myLib { + dependencies { + project 'other-project' + library 'my-prebuilt' + library 'local-lib' linkage 'api' + } + } + } + +Also reuse the dependency DSL at the source set level: + + apply plugin: 'cpp' + + libraries { + myLib + } + + sources { + myLib { + java { + dependencies { + project 'other-project' + library 'my-lib' linkage 'api' + } + } + } + } + +## Feature: Build author declares that the API of a Java library requires some Java library + +For example: + + apply plugin: 'new-java' + + libraries { + myLib { + dependencies { + api { + project 'other-project' library 'other-lib' + } + } + } + } + +This makes the API of the library 'other-lib' available at compile time, and the runtime artifacts and dependencies of 'other-lib' available at +runtime. + +It also exposes the API of the library 'other-lib' as part of the API for 'myLib', so that it is visible at compile time for any other component that +depends on 'myLib'. + +The default API of a Java library is its Jar file and no dependencies. + +### Open issues + +- Add this to native libraries + +## Feature: Build author declares that a Java library requires some Java library at runtime + +For example: + + apply plugin: 'new-java' + + libraries { + myLib { + dependencies { + runtime { + project 'other-project' library 'other-lib' + } + } + } + } + +### Open issues + +- Add this to native libraries + +## Feature: Build author declares the target JVM for a Java library + +For example: + + apply plugin: 'new-java' + + platforms { + // Java versions are visible here + } + + libraries { + myLib { + buildFor platforms.java7 + } + } + +This declares that the bytecode for the binary should be generated for Java 7, and should be compiled against the Java 7 API. +Assume that the source also uses Java 7 language features. + +When a library `a` depends on another library `b`, assert that the target JVM for `b` is compatible with the target JVM for `a` - that is +JVM for `a` is same or newer thatn the JVM for `b`. + +The target JVM for a legacy Java library is the lowest of `sourceCompatibility` and `targetCompatibility`. + +### Open issues + +- Need to discover or configure the JDK installations. + +## Feature: Build author declares a custom target platform for a Java library + +For example: + + apply plugin: 'new-java' + + platforms { + myContainer { + runsOn platforms.java6 + provides { + library 'myorg:mylib:1.2' + } + } + } + + libraries { + myLib { + buildFor platforms.myContainer + } + } + +This defines a custom container that requires Java 6 or later, and states that the library should be built for that container. + +This includes the API of 'myorg:mylib:1.2' at compile time, but not at runtime. The bytecode for the library is compiled for java 6. + +When a library `a` depends on another library `b`, assert that both libraries run on the same platform, or that `b` targets a JVM compatible with +the JVM for the platform of `a`. + +### Open issues + +- Rework the platform DSL for native component to work the same way. +- Retrofit into legacy java and web plugins. + +## Feature: Build author declares dependencies for a Java source set + +For example: + + apply plugin: 'new-java' + + libraries { + myLib { + source { + java { + runsOn platforms.java7 + dependencies { + project 'some-project' + library 'myorg:mylib:1.2' + runtime { + ... + } + } + } + } + } + } + +Will have to move source sets live with the library domain object. + +### Open issues + +- Fail or skip if target platform is not applicable for the the component's platform? + +## Feature: Build author declares dependencies for custom library + +Change the sample plugin so that it allows Java and custom libraries to be used as dependencies: + + apply plugin: 'my-sample' + + libraries { + myCustomLib { + dependencies { + project 'other-project' + customUsage { + project 'other-project' library 'some-lib' + } + } + } + } + +Allow a plugin to resolve the dependencies for a custom library, via some API. Target library must produce exactly +one binary of the target type. + +Move the hard-coded Java library model out of the dependency management engine and have the jvm plugins define the +Java library type. + +Resolve dependencies with inline notation: + + def compileClasspath = dependencies.newDependencySet() + .withType(JvmLibrary.class) + .withUsage(Usage.COMPILE) + .forDependencies("org.group:module:1.0", ...) // Any dependency notation, or dependency instances + .create() + + compileTask.classPath = compileClasspath.files + assert compileClasspath.files == compileClasspath.artifactResolutionResult.files + +Resolve dependencies based on a configuration: + + def testRuntimeUsage = dependencies.newDependencySet() + .withType(JvmLibrary.class) + .withUsage(Usage.RUNTIME) + .forDependencies(configurations.test.incoming.dependencies) + .create() + copy { + from testRuntimeUsage.artifactResolutionResult.artifactFiles + into "libs" + } + + testRuntimeUsage.resolutionResult.allDependencies { dep -> + println dep.requested + } + +Resolve dependencies not added a configuration: + + dependencies { + def lib1 = create("org.group:mylib:1.+") { + transitive false + } + def projectDep = project(":foo") + } + def deps = dependencies.newDependencySet() + .withType(JvmLibrary) + .withUsage(Usage.RUNTIME) + .forDependencies(lib1, projectDep) + .create() + deps.files.each { + println it + } + +### Open issues + +- Component type declares usages. +- Binary declares artifacts and dependencies for a given usage. + +## Feature: Build user views the dependencies for the custom libraries of a project + +Change the `dependencies`, `dependencyInsight` and HTML dependencies report so that it can report +on the dependencies of a custom component, plus whatever binaries the component happens to produce. + +## Feature: Build author declares target platform for custom library + +Change the sample plugin to allow a target JVM based platform to be declared: + + apply plugin: 'my-sample' + + platforms { + // Several target platforms are visible here + } + + libraries { + myCustomLib { + minSdk 12 // implies: buildFor platforms.mySdk12 + } + } + +## Feature: Java library produces multiple variants + +For example: + + apply plugin: 'new-java' + + libraries { + myLib { + buildFor platforms.java6, platforms.java8 + } + } + +Builds a binary for Java 6 and Java 8. + +Dependency resolution selects the best binary from each dependency for the target platform. + +## Feature: Dependency resolution for native components + +## Feature: Build user views the dependencies for the native components of a project + +# Open issues and Later work + +- Should use rules mechanism. +- Expose the source and javadoc artifacts for local binaries. +- Reuse the local component and binary meta-data for publication. + - Version the meta-data schemas. + - Source and javadoc artifacts. +- Legacy war and ear plugins define binaries. +- Java component plugins support variants. +- Gradle runtime defines Gradle plugin as a type of jvm component, and Gradle as a container that runs-on the JVM. +- Deprecate and remove support for resolution via configurations. +- Add a report that shows the details for the components and binaries produced by a project. diff --git a/design-docs/dependency-management.md b/design-docs/dependency-management.md index 917f4e2d1029..36c017db04ac 100644 --- a/design-docs/dependency-management.md +++ b/design-docs/dependency-management.md @@ -47,49 +47,80 @@ A component instance with an associated (group, module, version) identifier. See also the completed stories in [dependency-management.md](done/dependency-management.md). -## Story: Dependency reports indicate the source of a component +## Story: IDE plugins use dependency resolution result to determine IDE class paths -The dependency reporting will change to give some indication of the source of the component: +This story changes the `idea` and `eclipse` plugins to use the resolution result to determine the IDE project classpath. -For an external component instance, this will be unchanged: +- Change `IdeDependenciesExtractor` and `JavadocAndSourcesDownloader` to use the resolution result to determine the project and + external dependencies. - +- group:name:1.2 - +- group:other:1.3 -> group:other:1.3.1 +## Story: Allow the source and Javadoc artifacts for an external Java library to be queried (✓) -For a local component that is not a module version, this will look something like: +This story introduces an API which allows the source and Javadoc artifacts for a Java library to be queried + +- Should be possible to query the artifacts as a single batch, so that, for example, we will be able to resolve and download artifacts + in parallel. +- The API should expose download failures. +- A component may have zero or more source artifacts associated with it. +- A component may have zero or more Javadoc artifacts associated with it. +- Should introduce the concept of a Java library to the result. +- Should have something in common with the story to expose component artifacts, above. +- Initial implementation should use the Maven style convention to currently used by the IDE plugins. The a later story will improve this for Ivy repositories. - +- project :some:path - +- project :some:path -> group:other:1.2 +### Test cases -For a local component that is a module version, this will look something like +- Query the source artifacts only +- Query the Javadoc artifacts only +- Query which artifacts could not be resolved or downloaded. +- Caching is applied as appropriate. - +- project :some:path (group:name:1.2) - +- project :some:path (group:name:1.2) -> group:other:1.2 +## Story: IDE plugins use new artifact resolution API to download sources and javadoc (✓) -1. Change the `RenderableDependency` hierarchy to use the component id and module version id, if not null. -2. Update the the dependency report tests as appropriate. +This story changes the `idea` and `eclipse` plugins to use the resolution result to determine the IDE classpath artifacts. -The HTML dependency report should change in a similar way. +- Change `IdeDependenciesExtractor` and `JavadocAndSourcesDownloader` to use the resolution result to determine the source and Javadoc artifacts. +- Should ignore project components. -### Test coverage +## Story: Dependency resolution uses conventional schemes to locate source and Javadoc artifacts for Ivy modules (✓) -- Update the existing test coverage for the new display values. -- Ensure there is coverage for the dependency report and the dependency HTML report where - - There are a mix of external and project dependencies in the graph -- Ensure there is coverage for the dependency insight report where: - - There are a mix of external and project dependencies in the graph - - There are a mix of external and project dependencies in the graph and the `--dependency` option is used. +This story improves the convention used to locate the source and Javadocs to cover some common Ivy conventions. -## Story: IDE plugins use dependency resolution result to determine IDE class paths +### User visible changes -This story changes the `idea` and `eclipse` plugins to use the resolution result to determine the IDE project classpath. +Source artifacts contained in a 'sources' configuration in ivy.xml will be now be automatically downloaded and linked into an IDE project. Similar for javadoc artifacts in a 'javadoc' configuration. -- Change `IdeDependenciesExtractor` and `JavadocAndSourcesDownloader` to use the resolution result to determine the project and - external dependencies. +### Implementation + +* Make it possible to use ResolveIvyFactory to create a DependencyToModuleVersionResolver without a configuration: use a default ResolutionStrategy and supplied name. +* Create a `DependencyMetaData` for each supplied `ModuleComponentIdentifier`, and use this to obtain the ModuleVersionMetaData for the component. + * Fail for any other types of `ComponentIdentifier` +* Add a new method: `ArtifactResolver.resolve(ModuleVersionMetaData, Class, BuildableMultipleArtifactResolveResult)` + * Note that this is a transitional API: long term the second parameter may be generalised in some way + * `BuildableMultipleArtifactResolveResult` allows the collection of multiple downloaded artifacts of the type, or multiple failures, or a combination. +* Add a method to `ModuleVersionRepository` that provides the `ModuleVersionArtifactMetaData` for candidate artifacts + given a particular ModuleVersionMetaData + JvmLibraryArtifact class. + * This method should not require remote access to the repository. + * For `MavenResolver` and `IvyDependencyResolverAdapter`, this would return artifacts defined with the appropriate classifiers. + * For `IvyResolver`, this would inspect the `ModuleVersionMetaData` to determine the candidate artifacts. + * This method should be used to implement the new `resolve` method on `UserResolverChain.ModuleVersionRepositoryArtifactResolverAdapter`. + +### Test cases + +* Where ivy.xml contains a 'sources' and/or 'javadoc' configuration: + * Defined artifacts are included in generated IDE files + * Defined artifacts are available via Artifact Query API + * Detect and report on artifacts that are defined in ivy configuration but not found + * Detect and report error for artifacts that are defined in ivy configuration where download fails +* Use ivy scheme to retrieve source/javadoc artifacts from a local ivy repository +* Resolve source/javadoc artifacts by maven conventions where no ivy convention can be used: + * Flatdir repository + * No ivy.xml file for module + * Ivy module with no source/javadoc configurations defined in metadata +* Maven conventions are not used if ivy file declares empty sources and javadoc configuration ## Story: Dependency resolution result exposes a consumer that is not a module version -This story exposes different kinds of consumers for a dependency graph. +This story exposes different kinds of consumers for a dependency graph. The consumer is represented as the root of the dependency resolution result. - Result `root.id` should return a `ProjectComponentIdentifier` when a project configuration is resolved. - Result `root.id` should return an opaque `ComponentIdentifier` implementation when any other kind of configuration is resolved. @@ -122,179 +153,195 @@ This story exposes different kinds of consumers for a dependency graph. - Rename `ResolvedComponentResult.getId()` to something that is more explicit about the lack of guarantees. Maybe `getLocalId()` or ... - Extract a `ModuleComponentMetadataDetails` out of `ComponentMetadataDetails` and use `ComponentIdentifier` instead of `ModuleVersionIdentifier` as id. -## Story: Allow the artifacts for a component instance to be queried +## Story: Query the artifacts for all components defined by a configuration Currently, there is no way to determine which artifacts in a resolution result are associated with a given component. The artifacts are currently exposed -as `ResolvedArtifact` instances. These artifacts have a module version identifier associated with them, which is used to match against the component's -module version identifier. This only work when the component has a module version associated with it, which is not always the case. +as `ResolvedArtifact` instances, which reference a module version identifier but not a component identifier. As such, there is no way +to match a `ResolvedArtifact` to a component that is not uniquely identified by a module version. -TBD: provide some API to query the artifacts associated for a given component instance in a resolution result. +This story makes it possible to obtain an `ArtifactResolutionResult` directly from a `Configuration`, providing the same set of +artifacts as returned by `Configuration.getResolvedArtifacts()`. In doing so, the artifacts for a configuration are provided per component. -- It should be possible to query only the graph and not the artifacts. This should not download any artifacts. -- It should be possible to query the artifacts as a single batch, so that, for example, we will be able to resolve and download artifacts - in parallel. -- The API should expose download failures. -- A component may have zero or more artifacts associated with it. +This story also adds convenience mechanisms for obtaining all artifacts for an `ArtifactResolutionResult`, in addition to the existing +way to get artifacts per component and query for artifact download failures. -### Test cases +### User visible changes -- Can query the artifacts of an external component. -- Can query the artifacts of a project component. -- Can query those artifacts that could not be resolved or downloaded. -- Caching is applied as appropriate. +Download all artifact files for a configuration, failing if the graph could not be resolved: -## Story: IDE plugins use the resolution result to determine library artifacts + copy { + from configurations.compile.incoming.artifactResolutionResult.artifactFiles + into "libs" + } -This story changes the `idea` and `eclipse` plugins to use the resolution result to determine the IDE classpath artifacts. +Report on failed artifact downloads for a configuration: -- Change `IdeDependenciesExtractor` and `JavadocAndSourcesDownloader` to use the resolution result to determine the project and - external artifacts. + configurations.compile.incoming.artifactResolutionResult.artifacts.each { ArtifactResult artifact -> + if (artifact instanceof UnresolvedArtifactResult) { + println "Failed to download artifact ${artifact.id} for component ${artifact.id.componentIdentifier}: ${artifact.failure.message}" + } + } -## Story: Dependency resolution resolves all artifacts as a batch +### Implementation -Change dependency resolution implementation to resolve all artifacts as a single batch, when any artifact is requested. +- Add `ResolverResults.getArtifactQueryResult()`: the result should be constructed by combining the existing `ResolverResults` and `ResolvedConfiguration`. + - Provides the same set of artifacts as `ResolvedConfiguration.getResolvedArtifacts()` + - If resolution of the dependency graph fails, then `ResolverResults.getArtifactResolutionResult()` should throw a descriptive exception + - If artifacts for a component cannot be determined or downloaded, then the `ArtifactResolutionResult` should encapsulate those failures. +- Add `Configuration.incoming.getArtifactResolutionResult()` produces an `ArtifactResolutionResult` for the configuration. + - This result should contain the same set of artifacts currently returned by `ResolvedConfiguration.getResolvedArtifacts()` +- Move `ComponentArtifactIdentifier` onto the public API, and return that from new method `ArtifactResult.getId()` +- Add `ResolvedComponentArtifactsResult.getArtifacts()` that returns the set of all `ArtifactResult` instances for a component. +- Add convenience methods to `ArtifactResolutionResult`: + - `getArtifacts()` returns the set of all `ArtifactResult` instances for all resolved components, failing if the result contains any + `UnresolvedComponentResult` instances. + - `getFiles()` returns a `FileCollection` containing all files associated with `ArtifactResult` instances for all resolved components, + throwing an exception on access for any `UnresolvedArtifactResult` -- Use progress logging to report on the batch resolution progress. +### Test cases -## Story: Profile report displays artifact resolution time +- Refactor existing test cases to verify: + - Can query artifacts for configuration consisting of project and external components + - Can query artifacts for configuration with classifier set on dependency + - Can query artifacts for configuration with artifact defined on dependency + - Can query artifacts for configuration with dependency on a configuration other than default +- Caching of artifacts resolved from configuration +- Reports failure to resolve dependency graph +- Reports failures for all artifacts that could not be resolved or downloaded. +- Reports composite failure on attempt to get all artifacts where multiple artifacts could not be downloaded +- Use `Configuration.incoming.artifactResolutionResult` after first using `Configuration.incoming.resolutionResult`: artifact result is not regenerated -TBD +### Open issues -## Story: Allow the source and Javadoc artifacts for an external Java library to be queried +- Replacement for `ResolvedArtifact.name`, `ResolvedArtifact.extension` etc +- Need a way to query Artifact model without downloading artifact files -This story introduces an API which allows the source and Javadoc artifacts for a Java library to be queried +## Story: Access the ivy and maven metadata artifacts via the Artifact Query API -- Should be possible to query the artifacts as a single batch, so that, for example, we will be able to resolve and download artifacts - in parallel. -- The API should expose download failures. -- A component may have zero or more source artifacts associated with it. -- A component may have zero or more Javadoc artifacts associated with it. -- Should introduce the concept of a Java library to the result. -- Should have something in common with the story to expose component artifacts, above. -- Initial implementation should use the Maven style convention to currently used by the IDE plugins. The a later story will improve this for Ivy repositories. +### User visible changes -### Test cases +Access the ivy.xml files for a ivy components with the specified id: -- Query the source artifacts only -- Query the Javadoc artifacts only -- Query which artifacts could not be resolved or downloaded. -- Caching is applied as appropriate. + def result = dependencies.createArtifactResolutionQuery() + .forComponents(ivyModuleComponentId1, ivyModuleComponentId2) + .withArtifacts(IvyModule, IvyDescriptorArtifact) + .execute() -### API design proposals - -#### Resolve and iterate over all jvm libraries, without resolving artifacts - -Not supported because this API is all about resolving artifacts. - -#### Resolve jvm libraries together with their main and source artifacts, iterate over artifacts - -``` -def componentIds = ... // ComponentIdentifier's whose artifacts are to be resolved. Can be obtained from `configuration.incoming` API. -def result = dependencies.createArtifactResolutionQuery() - .forComponents(componentIds) - .forArtifacts(JvmLibrary, JvmLibraryMainArtifact, JvmLibrarySourceArtifact) - .execute() -for (jvmLibrary in result.getComponents(JvmLibrary)) { // separate type for each type of component - for (artifact in jvmLibrary.artifacts) { // separate type for each type of artifact - println artifact.id - println artifact.file - } -} -``` - -#### Resolve jvm libraries together with their main and source artifacts, inspect component resolution failures - -``` -def componentIds = ... // ComponentIdentifier's whose artifacts are to be resolved. Can be obtained from `configuration.incoming` API. -def result = dependencies.createArtifactResolutionQuery() - .forComponents(componentIds) - .forArtifacts(JvmLibrary) // shorthand for resolving all of the component's artifacts - .execute() -for (component in result.unresolvedComponents) { // same representation for all components - println component.id - println component.failure - } -} -``` + Set ivyFiles = result.artifactFiles() -### Open issues +Get the pom files for all maven modules in a configuration: -* API for artifact download failures -* How to implement API for artifact download failures (`LenientConfiguration` only exposes module resolution failures) -* How to determine what the main artifacts of a `JvmLibrary` component are (or more specifically, how to deal with Maven artifacts with classifiers; - the current API just provides the component ID). Resolving main artifacts isn't required for this story, but is related. + def artifactResult = dependencies.createArtifactResolutionQuery() + .forComponents(configurations.compile) + .withArtifacts(MavenModule, MavenPomArtifact) + .execute() + Set pomFiles = artifactResult.getArtifactFiles() -## Story: IDE plugins use the resolution result to determine library source and Javadoc artifacts +### Test cases -This story changes the `idea` and `eclipse` plugins to use the resolution result to determine the IDE classpath artifacts. +- Invalid component type and artifact type + - Cannot call `withArtifacts` multiple times for query + - Cannot mix `JvmLibrary` with metadata artifact types + - Cannot mix `IvyModule` and `MavenModule` component types with jvm library artifact types +- Unsupported artifact types: + - When requesting `IvyModule` artifacts, the result for a maven component is `UnresolvedComponentResult` with a useful failure. + - When requesting `MavenModule` artifacts, the result for an ivy component is `UnresolvedComponentResult` with a useful failure. + - When requesting `IvyModule` or `MavenModule` artifacts, the result for a project component is `UnresolvedComponentResult` with a useful failure. +- Optional artifacts: + - Request an ivy descriptor for an ivy module with no descriptor, and get empty set of artifacts. + - Request a pom for a maven module with no pom, and get empty set of artifacts. +- Metadata artifacts are cached + - Updates `IvyDescriptorArtifact` for changing module + - Updates `MavenPomArtifact` for maven snapshot + - Updates both with `--refresh-dependencies` -- Change `IdeDependenciesExtractor` and `JavadocAndSourcesDownloader` to use the resolution result to determine the source and Javadoc artifacts. -- Should ignore project components. +### Open issues -## Story: Dependency resolution uses conventional schemes to locate source and Javadoc artifacts for Ivy modules +- Typed domain model for IvyModule and MavenModule -This story improves the convention used to locate the source and Javadocs to cover some common Ivy conventions. +## Story: Reliable mechanism for checking for success with new resolution result APIs + +- Add `rethrowFailure()` to `ArtifactResolutionResult` and `ResolutionResult` +- Update JvmLibraryArtifactResolveTestFixture to rethrow failures and verify the exception messages and causes directly in the tests + +## Story: Directly access the source and javadoc artifacts for a configuration using the Artifact Query API ### User visible changes -Certain source/javadoc artifacts that were not automatically linked into an IDE project will now be successfully downloaded and linked: +Get JvmLibrary components with source and javadoc artifacts for a configuration: -* Artifacts contained in a 'sources' configuration in ivy.xml (or 'javadocs') -* Artifacts with a 'source' type in ivy.xml (or 'javadoc') -* ??? Other conventions ??? + def artifactResult = dependencies.createArtifactResolutionQuery() + .forComponents(configurations.compile) + .withArtifacts(JvmLibrary, JvmLibrarySourcesArtifact, JvmLibraryJavadocArtifact) + .execute() + def libraries = artifactResult.getComponents(JvmLibrary) -### Implementation +## Story: IDE plugins use the resolution result to determine library artifacts -* Make it possible to use ResolveIvyFactory to create a DependencyToModuleVersionResolver without a configuration: use a default ResolutionStrategy and supplied name. -* Create a `DependencyMetaData` for each supplied `ModuleComponentIdentifier`, and use this to obtain the ModuleVersionMetaData for the component. - * Fail for any other types of `ComponentIdentifier` -* Add a new method: `ArtifactResolver.resolve(ModuleVersionMetaData, Class, BuildableMultipleArtifactResolveResult)` - * Note that this is a transitional API: long term the second parameter may be generalised in some way - * `BuildableMultipleArtifactResolveResult` allows the collection of multiple downloaded artifacts of the type, or multiple failures, or a combination. -* Add a method to `ModuleVersionRepository` that provides the `ModuleVersionArtifactMetaData` for candidate artifacts - given a particular ModuleVersionMetaData + JvmLibraryArtifact class. - * This method should not require remote access to the repository. - * For `MavenResolver` and `IvyDependencyResolverAdapter`, this would return artifacts defined with the appropriate classifiers. - * For `IvyResolver`, this would inspect the `ModuleVersionMetaData` to determine the candidate artifacts. - * This method should be used to implement the new `resolve` method on `UserResolverChain.ModuleVersionRepositoryArtifactResolverAdapter`. +This story changes the `idea` and `eclipse` plugins to use the resolution result to determine the IDE classpath artifacts. -### Test cases +- Change `IdeDependenciesExtractor` and `JavadocAndSourcesDownloader` to use the resolution result to determine the project and + external artifacts. -* For each defined ivy convention: - * Resolve source/javadoc artifacts from Ivy repository - * Report on artifacts that are defined in ivy metadata but not found - * Report on artifacts that are defined in ivy metadata where download fails -* Fall back to maven convention if no artifacts defined in custom ivy convention -* Resolve source/javadoc artifacts by maven conventions where no ivy convention can be used: - * Flatdir repository - * Ivy repository with no metadata: jar-only - * Ivy module with no source/javadoc artifacts defined in metadata +## Story: Dependency resolution resolves all artifacts as a batch -### Open issues +Change dependency resolution implementation to resolve all artifacts as a single batch, when any artifact is requested. -* If the files defined by a ivy-specific scheme are not available, should we then use the maven convention to look for artifacts? - Or, for backward-compatibility should we first use the maven scheme, trying the ivy-specific scheme if not found? +- Use progress logging to report on the batch resolution progress. + +## Story: Profile report displays artifact resolution time + +TBD -## Story: Source and javadoc artifacts are updated for maven snapshot based on configured cache expiry +## Story: Source and javadoc artifacts are updated when Maven snapshot changes -Currently the Artifact Query API relies on `detachedConfiguration`, which uses an unconfigurable ResolutionStrategy. This means that -the source and javadoc for a changing module may not be updated when the corresponding artifact is updated. +- Use the timestamp as part of the component identifier for unique Maven snapshots. +- A unique snapshot is no longer considered a changing module. ### Test cases -Some test cases that are not directly related, but require this feature to be implemented: +* New artifacts are used when snapshot has expired: + * Resolve the source and javadoc artifacts for a Maven snapshot. + * Publish a new snapshot with changed artifacts. + * With `cacheChangingModules` set to 0, verify that the new source and javadoc artifacts are used. + +* Old artifacts are used when snapshot has not expired: + * Resolve a Maven snapshot, but not the source and javadoc artifacts. + * Publish a new snapshot with changed artifacts + * With `cacheChangingModules` set to default, verify that the old source and javadoc artifacts are used. -* Source and javadoc for Maven SNAPSHOT modules are updated when cacheChangingModulesFor 0, 'seconds' is applied to configuration containing module. -* No requests for source and javadoc are made with build is executed with `--offline`, even when cache has expired +* No requests for Maven snapshot source and javadoc are made with build is executed with `--offline`, even when cache has expired. * Can recover from a broken HTTP request by switching to use `--offline`. -## Story: Source and Javadoc artifacts are exposed for a local Java component +## Story: Source and javadoc artifacts are updated for changing module based on configured cache expiry -TBD +Currently it is not possible to configure how often the Artifact Query API should check for changes to artifacts. +This means that the source and javadoc for a changing module may not be updated when the corresponding artifact is updated. -## Story: Source and Javadoc artifacts are published for a Java component +This story introduces a new cache control DSL that can apply to both dependency graph and artifact resolution: -This story changes the `ivy-publish` and `maven-publish` plugins to publish the source and Javadocs for a Java component. +- Introduce a 'check for changes' cache control DSL, as a replacement for `ResolutionStrategy`. +- Cache control DSL allows a frequency at which changing things should be checked. + - Should be possible to declare 'never', 'always' and some duration. +- Cache control DSL allows a rule to be declared that specifies the frequency at which changing things from a given module should be checked. +- DSL should be reusable in some form for plugin resolution and external build script caching (but not wired up to these things yet). +- The existing DSL on `ResolutionStrategy` should win over the new cache control DSL. +- User guide explains how to use the cache control DSL, and DSL is documented in the DSL guide. + +### Test cases + +* New DSL can be used to control caching for all types of cached dependency resolution state: + - version list + - module version meta-data + - downloaded artifacts + - resolved artifact meta-data + - Maven snapshot timestamp + +Some test cases that are not directly related, but require this feature to be implemented: + +* Source and javadoc for a non-unique Maven snapshot is updated when check-for-changes is 'always'. +* No requests for source and javadoc are made with build is executed with `--offline`, even when cache has expired. +* Can recover from a broken HTTP request by switching to use `--offline`. ## Story: Dependency resolution result exposes local component instances that are not module versions @@ -382,39 +429,6 @@ meet the requirements. - A registry of requirement -> buildable file collection converters. - Add some way to query the resolved include roots, link files and runtime files for a native binary. -## Story: Implement native binary dependency resolution using self resolving dependencies - -This story starts to push the resolution mechanism introduced in the above story down into the dependency management engine. For this story, -native binary dependencies will be converted to self-resolving dependencies which are then fed into the dependency management engine. - -This story is simply a refactoring. No new user-visible behaviour will be added. - -## Story: Native component dependencies are visible in the dependency reports - -### Open issues - -- Dependencies need to shown per-variant. - -## Story: Plugin contributes a component type implementation - -Allow a plugin to contribute a component type implementation, which is responsible for defining some component type. For this story, the definition is -responsible for extracting the component meta-data from some local component instance. Later stories will generalise this to make the definition -reusable for other purposes, such as publishing. - -- Use this in the native binary plugins to convert native library and binary instances to meta-data. - -### Open issues - -- Add some way to influence the target of a project dependency -- Generalise so that the meta-data model can be reused for publishing and resolving external components - - Version the model -- Detangle the usage context from the dependency implementation and pass through to the resolvers - - Needs to be considered when caching stuff -- Add some sugar to infer the meta-data model from some static types -- Expose the component instance graph from the new requirements API -- Remove `NativeDependencySet` and `LibraryResolver` interfaces -- Replace the existing headers and files configurations - ## Story: Conflict resolution prefers local components over other components When two components have conflicting external identifiers, select a local component. @@ -477,6 +491,11 @@ Allow some substitutions to be expressed declaratively, rather than imperatively - Get meta-data of a component - Get certain artifacts of a component. Includes meta-data artifacts +## Story: Resolution result exposes excluded dependencies + +TBD + # Open issues - When resolving a pre-built component, fail if the specified file does not exist/has not been built (if buildable). +- In-memory caching for the list of artifacts for a component diff --git a/design-docs/ci-improvements.md b/design-docs/dev-infrastructure.md similarity index 71% rename from design-docs/ci-improvements.md rename to design-docs/dev-infrastructure.md index af498293fa42..0d8d514de7bd 100644 --- a/design-docs/ci-improvements.md +++ b/design-docs/dev-infrastructure.md @@ -1,4 +1,3 @@ - Some ideas to improve feedback from the CI pipeline: # Background @@ -17,15 +16,14 @@ Here is a rough overview of the current structure of the CI pipeline This pipeline is replicated for the release and master branches. -# Reduce memory consumption of the full tooling API test suite - -Currently, the full cross version integration test suite for the tooling API starts daemons for every Gradle version, and starts -multiple daemons for each version. +# Reduce memory consumption of daemon processes started by test suite -- Verify that many daemon processes are running while the test suite is executing. - Verify that daemon processes are started with relatively small heap and permgen limits, rather than the defaults for the daemon, and fix if not. -- Change test execution for the tooling API test suite so that the tests for a single Gradle version (or small set of versions) are completed before starting - tests on another Gradle version. One potential implementation is to introduce a test task per Gradle version. +- Kill daemons at the end of the build. + +# Compile source against baseline Java version early in the pipeline + +To fail early when later Java APIs are used. # Automate installation of TeamCity agents on Windows build VM @@ -92,22 +90,22 @@ Also remove the fast feedback agent pool # Leverage parallel execution -# Proactively clean disks to avoid accumulation +# Install BarKeep code review tool + +Set up [BarKeep](http://getbarkeep.org/) on `dev1` as an alternative to Crucible. -- To avoid accumulation of cruft (e.g. old wrappers, old dependencies), we should periodically recreate the build VMs. +# Build machines provisioning -# Run all Windows builds with virtual agents +## Add monitoring to all build machines -At the moment running multiple Windows builds with virtual agents in parallel may cause memory issues. As a result the build fails. One of the observed error message -we see is the following: +- add New Relic monitoring to Linux machines without Salt +- add Nex Relic monitoring to Windows machines - Error occurred during initialization of VM - Could not reserve enough space for object heap +## Extend Salt to not yet managed machines -This error mainly occurs if one of the builds spawns new Gradle processses. To mitigate this situation the following builds are configured to only use the physical -Windows machine `winagent perf1`: +Start with user management and installation of packages. +This applies to unmanaged machines running Linux and one Windows box. -- Windows - Java 1.5 - Daemon integration tests -- Windows - Java 1.6 - Cross-version tests +## Use the same Linux distribution on all machines -All other builds are still using the virtual agents. After identifying and fixing the root cause for the error, we should change back the configuration. +Upgrade existing Saucy machines and probably also Precise. \ No newline at end of file diff --git a/design-docs/done/dependency-management-bug-fixes.md b/design-docs/done/dependency-management-bug-fixes.md index c5d6cb2ad18f..97463678a5c3 100644 --- a/design-docs/done/dependency-management-bug-fixes.md +++ b/design-docs/done/dependency-management-bug-fixes.md @@ -338,3 +338,148 @@ for now we'll need an implementation backed by ExternalResourceRepository#list ( Later we may add a ModuleVersionLister backed by an Artifactory REST listing, our own file format, etc... The idea would be that these ModuleVersionLister implementations will be pluggable in the future, so you could combine maven-metadata.xml with ivy.xml in a single repository, for example. + +# GRADLE-2861 Handle parent pom with unknown placeholders (DONE) + +See [GRADLE-2861](http://issues.gradle.org/browse/GRADLE-2861) + +Currently, the POM parser (inherited from Ivy) attaches special extra attributes to the `ModuleDescriptor` for a POM. These are later used by the POM parser +when it parses a child POM. Sometimes these attributes cause badly formed XML to be generated, hence the failure listed in the jira issue. + +The solution is to have the parser request the parent POM artifact directly, rather than indirectly via the module meta-data: + +1. Add a `LocallyAvailableExternalResource getArtifact(Artifact)` method to `DescriptorParseContext`. + - Implementation can reuse the `ModuleVersionResolveResult` from the existing `getModuleDescriptor()` method. This result includes an `ArtifactResolver` which + can be used to resolve an `Artifact` to a `File`. There's an example of how to adapt a `File` to a `LocallyAvailableExternalResource` instance in + `AbstractModuleDescriptorParser.parseMetaData()`. +2. Change the `GradlePomModuleDescriptorParser.parseOtherPom()` to use this new method to fetch and parse the parent POM artifact, rather than using the parsed + `ModuleDescriptor` for the parent. For this step, can continue to represent the parent pom using a `ModuleDescriptor` inside the parser. +3. Change `GradlePomModuleDescriptorParser` to represent the parent POM using a `PomReader` rather than a `ModuleDescriptor`. +4. Clean out `GradlePomModuleDescriptorBuilder` so that it no longer defines any extra properties on the parsed `ModuleDescriptor`. +5. Change `IvyXmlModuleDescriptorParser.parseOtherIvyFile()` to use the new method to fetch and parse the Ivy descriptor artifact. +6. Remove `DescriptorParseContext.getModuleDescriptor()`. It should no longer be required. + +## Test coverage + +* Unignore the existing test case in `BadPomFileResolveIntegrationTest`. +* Add a test case to `MavenParentPomResolveIntegrationTest` to cover two Maven modules that share a common parent. +* Add a test case to `MavenParentPomResolveIntegrationTest` to cover a Maven module that has a parent and grandparent module. + +# Latest status dynamic versions work across multiple repositories (DONE) + +See [GRADLE-2502](http://issues.gradle.org/browse/GRADLE-2502) + +### Test coverage + +1. Using `latest.integration` + 1. Empty repository fails with not found. + 2. Publish `1.0` and `1.1` with status `integration`. Resolves to `1.1`. + 3. Publish `1.2` with status `release`. Resolves to `1.2` + 4. Publish `1.3` with no ivy.xml. Resolves to `1.3`. +2. Using `latest.milestone` + 1. Empty repository fails with not found. + 2. Publish `2.0` with no ivy.xml. Fails with not found. + 3. Publish `1.3` with status `integration`. Fails with not found. + 4. Publish `1.0` and `1.1` with ivy.xml and status `milestone`. Resolves to `1.1`. + 5. Publish `1.2` with status `release`. Resolves to `1.2` +3. Using `latest.release` + 1. Empty repository fails with not found. + 2. Publish `2.0` with no ivy.xml. Fails with not found. + 3. Publish `1.3` with status `milestone`. Fails with not found. + 4. Publish `1.0` and `1.1` with ivy.xml and status `release`. Resolves to `1.1`. +4. Multiple repositories. +5. Checking for changes. Using `latest.release` + 1. Publish `1.0` with status `release` and `2.0` with status `milestone`. + 2. Resolve and assert directory listing and `1.0` artifacts downloaded. + 3. Resolve and assert directory listing downloaded. + 4. Publish `1.1` with status `release`. + 5. Resolve and assert directory listing and `1.1` artifacts downloaded. +6. Maven integration + 1. Publish `1.0`. Check `latest.integration` resolves to `1.0` and `latest.release` fails with not found. + 2. Publish `1.1-SNAPSHOT`. Check `latest.integration` resolves to `1.1-SNAPSHOT` and `latest.release` fails with not found. +7. Version ranges +8. Repository with multiple patterns. +9. Repository with `[type]` in pattern before `[revision]`. +10. Multiple dynamic versions match the same remote revision. + +### Implementation strategy + +Change ExternalResourceResolver.getDependency() to use the following algorithm: +1. Calculate an ordered list of candidate versions. + 1. For a static version selector the list contains a single candidate. + 2. For a dynamic version selector the list is the full set of versions for the module. + * For a Maven repository, this is determined using `maven-metadata.xml` if available, falling back to a directory listing. + * For an Ivy repository, this is determined using a directory listing. + * Fail if directory listing is not available. +2. For each candidate version: + 1. If the version matcher does not accept the module version, continue. + 2. Fetch the module version meta-data, as described below. If not found, continue. + 3. If the version matcher requires the module meta-data and it does not accept the meta-data, continue. + 4. Use the module version. +3. Return not found. + +To fetch the meta-data for a module version: +1. Download the meta data descriptor resource, via the resource cache. If found, parse. + 1. Validate module version in meta-data == the expected module version. +2. Check for a jar artifact, via the resource cache. If found, use default meta-data. The meta-data must have `default` set to `true` and `status` set to `integration`. +3. Return not found. + +# Correct handling of packaging and dependency type declared in poms (DONE) + +* GRADLE-2188: Artifact not found resolving dependencies with packaging/type "orbit" + +### Description + +Our engine for parsing Maven pom files is borrowed from ivy, and assumes the 'packaging' element equals the artifact type, with a few exceptions (ejb, bundle, eclipse-plugin, maven-plugin). +This is different from the way Maven does the calculation, which is: + +* Type defaults to 'jar' but can be explicitly declared. +* Maven maps the type to an [extension, classifier] combination using some hardcoded rules. Unknown types are mapped to [type, ""]. +* To resolve the artefact, maven looks for an artefact with the given artifactId, version, classifier and extension. + +### Strategic solution + +At present, our model of an Artifact is heavily based on ivy; for this fix we can introduce the concept of mapping between our internal model and a repository-centric +artifact model. This will be a small step toward an independent Gradle model of artifacts, which then maps to repository specific things link extension, classifier, etc. + +### User visible changes + +* When the dependency declaration has no 'type' specified, or a 'type' that maps to the extension 'jar' + * Resolution of a POM module with packaging in ['', 'pom', 'jar', 'ejb', 'bundle', 'maven-plugin', 'eclipse-plugin'] will not change + * Resolution of a POM with packaging 'foo' that maps to 'module.foo', a deprecation warning will be emitted and the artifact 'module.foo' will be used + * Resolution of a POM with packaging 'foo' that maps to 'module.jar', the artifact 'module.jar' will be successfully found. (ie 'orbit'). An extra HTTP + request will be required to first look for 'module.foo'. +* When the dependency declaration has a 'type' specified that maps to an extension 'ext' (other than 'jar') + * Resolution of a POM module with packaging in ['pom', 'jar', 'ejb', 'bundle', 'maven-plugin', 'eclipse-plugin'] will emit a deprecation warning before using 'module.jar' if it exists + * Resolution of a POM with packaging 'foo' and actual artifact 'module.foo', a deprecation warning will be emitted and the artifact 'module.foo' will be used + * Resolution of a POM with packaging 'foo' and actual artifact 'module.ext', the artifact 'module.ext' will be successfully found. An extra HTTP + request will be required to first look for 'module.foo'. + +### Integration test coverage + +* Coverage for resolving pom dependencies referenced in various ways: + * Need modules published in maven repositories with packaging = ['', 'pom', 'jar', 'war', 'eclipse-plugin', 'custom'] + * Test resolution of artifacts in these modules via + 1. Direct dependency in a Gradle project + 2. Transitive dependency in a maven module (pom) which is itself a dependency of a Gradle project + 3. Transitive dependency in an ivy module (ivy.xml) which is itself a dependency of a Gradle project + * For 1. and 2., need dependency declaration with and without type attribute specified + * Must verify that deprecation warning is logged appropriately +* Sad-day coverage for the case where neither packaging nor type can successfully locate the maven artifact. Error message should report 'type'-based location. + +### Implementation approach + +* Determine 2 locations for the primary artifact: + * The 'packaging' location: apply the current logic to determine location from module packaging attribute + * Retain current packaging->extension mapping for specific packaging types + * The 'type' location: Use maven3 rules to map type->extension+classifier, and construct a location +* If both locations are the same, use the artifact at that location. +* If not, look for the artifact in the packaging location + * If found, emit a deprecation warning and use that location + * If not found, use the artifact from the type location +* In 2.0, we will remove the packaging->extension mapping and the deprecation warning + +# Handle pom-only modules in mavenLocal (DONE) + +* GRADLE-2034: Existence of pom file requires that declared artifacts can be found in the same repository +* GRADLE-2369: Dependency resolution fails for mavenLocal(), mavenCentral() if artifact partially in mavenLocal() diff --git a/design-docs/done/dependency-management.md b/design-docs/done/dependency-management.md index 60a4d0120f4b..c90912b79988 100644 --- a/design-docs/done/dependency-management.md +++ b/design-docs/done/dependency-management.md @@ -102,3 +102,36 @@ This will allow a consumer to extract the external and project components as fol - for an external module component: - `id` is a `ModuleComponentIdentifier` with correct `group`, `module`, `version` values. - `moduleVersion` has the same attributes as `id`. + +## Story: Dependency reports indicate the source of a component (DONE) + +The dependency reporting will change to give some indication of the source of the component: + +For an external component instance, this will be unchanged: + + +- group:name:1.2 + +- group:other:1.3 -> group:other:1.3.1 + +For a local component that is not a module version, this will look something like: + + +- project :some:path + +- project :some:path -> group:other:1.2 + +For a local component that is a module version, this will look something like + + +- project :some:path (group:name:1.2) + +- project :some:path (group:name:1.2) -> group:other:1.2 + +1. Change the `RenderableDependency` hierarchy to use the component id and module version id, if not null. +2. Update the the dependency report tests as appropriate. + +The HTML dependency report should change in a similar way. + +### Test coverage + +- Update the existing test coverage for the new display values. +- Ensure there is coverage for the dependency report and the dependency HTML report where + - There are a mix of external and project dependencies in the graph +- Ensure there is coverage for the dependency insight report where: + - There are a mix of external and project dependencies in the graph + - There are a mix of external and project dependencies in the graph and the `--dependency` option is used. diff --git a/design-docs/done/gradle-2.0.md b/design-docs/done/gradle-2.0.md new file mode 100644 index 000000000000..c088ab2eaf6d --- /dev/null +++ b/design-docs/done/gradle-2.0.md @@ -0,0 +1,29 @@ +## Remove support for the Gradle Open API implementation (DONE) + +Now that we have reasonable tooling support via the tooling API, remove the Open API. + +* Implement a stub to fail with a reasonable error message when attempting to use Gradle from the Open API. +* Add integration test coverage that using the Open API fails with a reasonable error message. + +Note that the `openAPI` project must still remain, so that the stubs fail in the appropriate way when used by Open API clients. +This will be removed in Gradle 3.0. + +## Remove the `GradleLauncher` API (DONE) + +The public API for launching Gradle is now the tooling API. The `GradleBuild` task can also be used. + +* Replace internal usages of the static `GradleLauncher` methods. +* Move the `GradleLauncher` type from the public API to an internal package. + +## Remove usages of JNA and JNA-Posix (DONE) + +Replace all usages of JNA and JNA-Posix with native-platform. Currently, this means that console support and +UNIX file permissions with JVMs earlier than Java 7 will not be supported on the following platforms: + +* Linux-ia64 +* Solaris-x86, -amd64, -sparc, -sparcv9 + +## Misc API tidy-ups (DONE) + +* Remove unused `IllegalOperationAtExecutionTimeException`. +* Remove unused `AntJavadoc`. diff --git a/design-docs/done/task-configuration-from-command-line.md b/design-docs/done/task-configuration-from-command-line.md index 868a6ed0a801..63318ef11364 100644 --- a/design-docs/done/task-configuration-from-command-line.md +++ b/design-docs/done/task-configuration-from-command-line.md @@ -44,3 +44,90 @@ TBD ### Implementation approach TBD + +## Help task shows basic details about a task (DONE) + +Add some command line interface for discovering details about a task (name, type, path, description) + +### User visible changes + +Running `gradle help --task test` shows a usage message for the `test` task. + +If multiple tasks match, details of the matching tasks are shown + +* all matched tasks have the same type + * print one report of the task type and include all matching paths in the report + +* matched tasks have different types + * print one detail output for each different task type including all available paths + +### Test coverage + +* integration tests + * `gradle help --task` on simple task + * `gradle help --task` on task referenced by full path (e.g. `:someProj:dependencies`) + * `gradle help --task` on implicit task (e.g. `tasks`) + * `gradle help --task` on task defined via placeholder + * `gradle help --task` on non existing task displays reasonable error message, including candidate matches + * `gradle help --task` on multiple matching tasks + * `gradle help --task` using camel-case matching to select task + +### Implementation approach + +- Change the `help` task: + - add `--task` commandline property + - change displayHelp implementation to print task details when `--task` is set + - lookup project tasks and implicit tasks using the task selector + - throw decent error message when requested task cannot be found + - task details (task name, task type, path) + - the default message informs the user about using `gradle help --task n` + +- Update the 'using Gradle from the command-line' user guide chapter to mention the help task. + +## Help task shows command-line options for a task (DONE) + +Commandline options of the task passed to help are listed including a description. The legal values for each property are not shown - this +is added in a later story. + +### User visible changes + +The usage message of running `gradle help --task ` lists commandline options of the selected tasks. + +### Test coverage + +* integration tests + * `gradle help` on task with no commandline properties + * `gradle help` on task with commandline properties + * `gradle help` on implicit task no commandline properties + * `gradle help` on implicit task with no commandline properties + * `gradle help --tassk help` (should print hint to `gradle help --task help`) + +### Implementation approach + +- Change configuration error message in `CommandLineTaskConfigurer` to suggest that the user run `gradle help --task `. +- Update the 'using Gradle from the command-line' user guide chapter. + +## Help task shows legal values for each command-line option + +### User visible changes + +The usage message of running `gradle help --task init` includes the available values for the task command line options (e.g --type) + +### Test coverage + +* integration tests + * `gradle help` on task with enum property type mapped to commandline option + * `gradle help` on task with boolean property type mapped to commandline option + * `gradle help` on task with String property mapped to commandline option + * `gradle help --task init` shows all available init types + +### Implementation approach + +- Introduce marker annotation `Option("optionName")` to mark a task property mapped to a commandline option. +- `@Option` with not provided "optionName" is mapped to option with same name as the annotated field +- `@Option("optionName")` annotated on Enums includes enum values as possible option values +- `@Option("optionName")` annotated on boolean includes true/false as possible option values +- `@Option("optionName")` annotated on a setter method evaluates the available options from the parameter type) +- Introduce marker annotation `OptionValues("optionName")` to to allow a dynamic value lookup in the task implementation itself. +- Adapt InitBuild task to use `@OptionValues` to map values for the `--type` command line option. +- Update the 'using Gradle from the command-line' user guide chapter. diff --git a/design-docs/done/tooling-api-improvements.md b/design-docs/done/tooling-api-improvements.md new file mode 100644 index 000000000000..08e27acf653e --- /dev/null +++ b/design-docs/done/tooling-api-improvements.md @@ -0,0 +1,164 @@ +## Story: Tooling API client requests build model for old Gradle version (DONE) + +This story adds support for the `GradleBuild` model for older target Gradle versions. + +### Implementation + +Change the implementations of `ConsumerConnection.run(type, parameters)` so that when asked for a `GradleBuild` model, they instead +request the `GradleProject` model and then convert it to a `DefaultGradleBuild` instance. See `ConnectionVersion4BackedConsumerConnection.doGetModel()` +for an example of this kind of thing. + +For the `ModelBuilderBackedConsumerConnection` implementation, if the provider Gradle version supports the `GradleBuild` model (is >= 1.8-rc-1) then +forward to the provider, as it does now. + +To implement this cleanly, one option might be to introduce some chain of model producers into the `ConsumerConnection` subclasses, so that each producer is +asked in turn whether it can produce the requested model. The last producer can delegate to the provider connection. Stop at the first producer that can +produce the model. + +### Test cases + +- For all Gradle versions, can request the `GradleBuild` model via `ProjectConnection`. This basically means removing the `@TargetGradleVersion` from + the test case in `GradleBuildModelCrossVersionSpec`. + +## Story: Deprecate support for Tooling API clients earlier than Gradle 1.2 (DONE) + +When any of the following methods are called on the provider connection treat the client version as deprecated: + +- `ConnectionVersion4.getModel()` and `executeBuild()`. +- `InternalConnection.getTheModel()`. +- `configureLogging(boolean)`. + +Whenever an operation is invoked on the provider connection by a deprecated client version, the connection implementation should report to +the user that the client version is deprecated and support for it will be removed in Gradle 2.0. +The logging output should be received through the stream attached to `LongRunningOperation.setStandardOutput()`. + +### Test cases + +- Running a build generates a warning when using a client < 1.2, and does not generate a warning when using a client >= 1.2. +- Fetching a model generates a warning when using a client < 1.2, and does not generate a warning when using a client >= 1.2. + +## Story: Deprecate support for Gradle versions earlier than Gradle 1.0-milestone-8 (DONE) + +When the provider connection does not implement `InternalConnection` then treat the provider version as deprecated. + +Whenever an operation is invoked on a deprecated provider version, the client implementation should report to the user that the provider +version is deprecated and support for it will be removed in Gradle 2.0. +The logging output should be received through the stream attached to `LongRunningOperation.setStandardOutput()`. + +### Test cases + +- Running a build generates a warning when using a provider version < 1.0-milestone-8, and does not generate a warning when using a provider version >= 1.0-milestone-8. +- Fetching a model generates a warning when using a provider version < 1.0-milestone-8, and does not generate a warning when using a provider version >= 1.0-milestone-8. + +## Story: Expose the publications of a project (DONE) + +This story allows an IDE to map dependencies between different Gradle builds and and between Gradle and non-Gradle builds. +For incoming dependencies, the Gradle coordinates of a given library are already exposed through `ExternalDependency`. This +story exposes the outgoing publications of a Gradle project. + +1. Add a `GradlePublication` type with the following properties: + 1. An `id` property with type `GradleModuleVersion`. +2. Add a `publications` property to `GradleProject` with type `DomainObjectSet`. +3. Include an `@since` javadoc tag and an `@Incubating` annotation on the new types and methods. +4. Introduce a project-scoped internal service which provides some detail about the publications of a project. + This service will also be used during dependency resolution. See [dependency-management.md](dependency-management.md#story-dependency-resolution-result-exposes-local-component-instances-that-are-not-module-versions) + 1. The `publishing` plugin registers each publication defined in the `publishing.publications` container. + For an instance of type `IvyPublicationInternal`, use the publication's `identity` property to determine the publication identifier to use. + For an instance of type `MavenPublicationInternal`, use the publication's `mavenProjectIdentity` property. + 2. For each `MavenResolver` defined for the `uploadArchives` task, register a publication. Use the resolver's `pom` property to determine the + publication identifier to use. Will need to deal with duplicate values. + 3. When the `uploadArchives` task has any other type of repository defined, then register a publication that uses the `uploadArchives.configuration.module` + property to determine the publication identifier to use. +5. Change `GradleProjectBuilder` to use this service to populate the tooling model. + +An example usage: + + GradleProject project = connection.getModel(GradleProject.class); + for (GradlePublication publication: project.getPublications()) { + System.out.println("project " + project.getPath() + " produces " + publication.getId()); + } + +### Test coverage + +- Add a new `ToolingApiSpecification` integration test class that covers: + - For a project that does not configure `uploadArchives` or use the publishing plugins, verify that the tooling model does not include any publications. + - A project that uses the `ivy-publish` plugin and defines a single Ivy publication. + - A project that uses the `maven-publish` plugin and defines a single Maven publication. + - A project that uses the `maven` plugin and defines a single remote `mavenDeployer` repository on the `uploadArchives` task. + - A project that defines a single Ivy repository on the `uploadArchives` task. +- Verify that a decent error message is received when using a Gradle version that does not expose the publications. + +## Story: Expose the build script of a project (DONE) + +This story exposes via the tooling API some basic information about the build script of a project. + +1. Add a `GradleScript` type with the following properties: + 1. A `file` property with type `File`. +2. Add a `buildScript` property to `GradleProject` with type `GradleScript`. +3. Include an `@since` javadoc tag and an `@Incubating` annotation on the new types and methods. +4. Change `GradleProjectBuilder` to populate the model. + +An example usage: + + GradleProject project = connection.getModel(GradleProject.class); + System.out.println("project " + project.getPath() + " uses script " + project.getBuildScript().getFile()); + +### Test coverage + +- Add a new `ToolingApiSpecification` integration test class that covers: + - A project with standard build script location + - A project with customized build script location +- Verify that a decent error message is received when using a Gradle version that does not expose the build scripts. + - Request `GradleProject` directly. + - Using `GradleProject` via an `EclipseProject` or `IdeaModule`. + +## Story: GRADLE-2434 - Expose the aggregate tasks for a project (DONE) + +This story allows an IDE to implement a way to select the tasks to execute based on their name, similar to the Gradle command-line. + +1. Add an `EntryPoint` model interface, which represents some arbitrary entry point to the build. +2. Add a `TaskSelector` model interface, which represents an entry point that uses a task name to select the tasks to execute. +3. Change `GradleTask` to extend `EntryPoint`, so that each task can be used as an entry point. +4. Add a method to `GradleProject` to expose the task selectors for the project. + - For new target Gradle versions, delegate to the provider. + - For older target Gradle versions, use a client-side mix-in that assembles the task selectors using the information available in `GradleProject`. +5. Add methods to `BuildLauncher` to allow a sequence of entry points to be used to specify what the build should execute. +6. Add `@since` and `@Incubating` to the new types and methods. + +Here are the above types: + + interface EntryPoint { + } + + interface TaskSelector extends EntryPoint { + String getName(); // A display name + } + + interface GradleTask extends EntryPoint { + ... + } + + interface GradleProject { + DomainObjectSet getTaskSelectors(); + ... + } + + interface BuildLauncher { + BuildLauncher forTasks(Iterable tasks); + BuildLauncher forTasks(EntryPoint... tasks); + ... + } + +TBD - maybe don't change `forTasks()` but instead add an `execute(Iterable tasks)` method. + +### Test cases + +- Can request the entry points for a given project hierarchy + - Task is present in some subprojects but not the target project + - Task is present in target project but no subprojects + - Task is present in target project and some subprojects +- Executing a task selector when task is also present in subprojects runs all the matching tasks, for the above cases. +- Can execute a task selector from a child project. Verify the tasks from the child project are executed. +- Executing a task (as an `EntryPoint`) when task is also present in subprojects run the specified task only and nothing from subprojects. +- Can request the entry points for all target Gradle versions. + diff --git a/design-docs/gradle-2.0.md b/design-docs/gradle-2.0.md index d150a2e211e6..3563493fdfb0 100644 --- a/design-docs/gradle-2.0.md +++ b/design-docs/gradle-2.0.md @@ -6,18 +6,26 @@ list of ideas to consider before shipping Gradle 2.0. Note: for the change listed below, the old behaviour or feature to be removed should be deprecated in a Gradle 1.x release, probably no later than Gradle 1.9. Similarly for changes to behaviour. -# Planned +# Planned for 2.0 The following stories are to be included in Gradle 2.0. -## Remove all features deprecated as at Gradle 1.9 +## Un-deprecate using the packaging declared in a Maven POM to probe for the module artifacts -In the Gradle 2.0-rc-1 release, remove all features that are deprecated as at Gradle 1.9 or earlier: +Leave this behaviour in until the mechanisms to better infer the artifacts for a module have been implemented. + +## Remove all features deprecated as at Gradle 1.12 + +In the Gradle 2.0-rc-1 release, remove all features that are deprecated as at Gradle 1.12 or earlier: * Search for usages of `DeprecationLogger`, `@Deprecated`, `@deprecated` and remove the associated feature. * Review usages of `DeprecationLogger.whileDisabled()`. * Remove `JavaPluginGoodBehaviourTest#changing debug flag does not produce deprecation warning` +## Replace deprecation warnings with errors + +* Convert deprecated behaviours with errors. + ## Remove Ivy types from the Gradle repository API These types expose the implementation details of dependency management and force a certain implementation on Gradle. Removing these types from the API @@ -26,24 +34,11 @@ allows us to implement new features and remove some internal complexity. * Remove methods from `ArtifactRepositoryContainer` and `RepositoryHandler` that accept an Ivy `DependencyResolver` as parameter. * Remove methods from `ArtifactRepositoryContainer` that return `DependencyResolver`. * Remove `RepositoryHandler.mavenRepo()`. -* Change the `MavenResolver` implementation so that it no longer extends `DependencyResolver`. +* Change the `AbstractMavenResolver` so that it no longer extends `DependencyResolver`. * Change the `FlatDirRepository` implementation so that it no longer uses a `DependencyResolver` implementation. +* Remove Ivy packages from the Gradle API filter. * Remove Ivy version from the output of `gradle -v`. - -## Remove the Gradle Open API - -Now that we have reasonable tooling support via the tooling API, remove the Open API. - -* Implement a stub to fail with a reasonable error message when attempting to use Gradle from the Open API. -* Remove the remaining Open API classes and project. -* Add integration test coverage. - -## Remove the `GradleLauncher` API - -The public API for launching Gradle is now the tooling API. The `GradleBuild` task can also be used. - -* Replace internal usages of the static `GradleLauncher` methods. -* Move the `GradleLauncher` type from the public API to an internal package. +* Remove loopback resolver, ModuleVersionRepository -> Ivy adapter. ## Remove tooling API support for Gradle 1.1 clients and earlier @@ -51,8 +46,9 @@ Gradle 1.2 was released on 12th sept 2012. This change means that tooling more t will not be able to invoke Gradle 2.0 or later. * Change the implementation of methods on `ConnectionVersion4` and `InternalConnection` to fail with a decent error message. -* The model implementations no longer need to implement `ProjectVersion3` or the protocol interfaces. -* Add integration test coverage. +* The model implementations no longer need to implement `ProjectVersion3` of the protocol interfaces. +* Change test suite to default to tooling API versions >= 1.2. +* Add integration test coverage that tooling API versions <1.2 fail with a reasonable error message, when running build or fetching model. ## Remove tooling API support for Gradle providers 1.0-milestone-7 and earlier @@ -60,13 +56,10 @@ Gradle 1.0-milestone-8 was release on 14th feb 2012. This change means that tool approximately 2 years old as of the Gradle 2.0 release. * Consumer fails with a decent error message instead of falling back to the methods on `ConnectionVersion4`. -* Remove the appropriate ConsumerConnection implementations. -* Add integration test coverage. - -## Misc API tidy-ups - -* Remove unused `IllegalOperationAtExecutionTimeException`. -* Remove unused `AntJavadoc`. +* Add support for fetching partial `BuildEnvironment` model for unsupported versions. +* Change the test suite to default to target Gradle version >= 1.0-milestone-8 +* Add integration test coverage that running build with Gradle version < 1.0-milestone-8 fails with a reasonable error message, when running build or fetching model. +* Add integration test coverage that can fetch a partial `BuildEnvironment` model for Gradle version < 1.0-milestone-8. ## Reset deprecation warnings @@ -75,10 +68,37 @@ approximately 2 years old as of the Gradle 2.0 release. ## All Gradle scripts use UTF-8 encoding * Change Gradle script parsing to assume UTF-8 encoding. +* Prefer character encoding specified by the server, if any. +* Update user guide to mention this. + +## Upgrade to most recent Groovy 2.2.x -## Upgrade to most recent Groovy 2.x +* Change the version of Groovy exposed via the Gradle API to most recent Groovy 2.2.x version. +* Change to use `groovy` instead of `groovy-all`. + * Change Groovy runtime detector to deal with this change. +* Add int test coverage for building and groovydoc for permutations of Groovy versions and (`groovy` or `groovy-all`) + +## Remove support for running Gradle on Java 5 -* Change the version of Groovy exposed via the Gradle API to most recent Groovy 2.x. +In order to add support for Java 8, we will need to upgrade to Groovy 2.3, which does not support Java 5. +Would still be able to build for Java 5. + +* Add cross-compilation int tests for Java 5 - 8. +* Document how to build for Java 5. +* Compile wrapper, launcher and tooling API connection entry points separately for Java 5. +* Update CI builds to use newer Java versions. +* Entry points complain when executed using Java 5. +* Drop support for running with Java 5. + +## Add support for Java 8 + +* Change the version of Groovy exposed via the Gradle API to most recent Groovy 2.3.x version. +* Remove source exclusions for jdk6. +* Change `InetAddressFactory` so that it no longer uses reflection to inspect `NetworkInterface`. +* Remove the special case logging from `LogbackLoggingConfigurer`. +* Replace usages of `guava-jdk5`. +* Clean up usages of `TestPrecondition.JDK5` and related preconditions. +* Add warning when using Java version > 8 to inform the user that the Java version may not be supported. ## Archive tasks + base plugin @@ -92,176 +112,3 @@ The current defaults for the outputs of tasks of type `Test` conflict with each * Change the default result and report directory for the `Test` type to include the task's name, so that the default does not conflict with the default for any other `Test` task. * Change the default TestNG output directory. - -## Rename this spec - -# Candidates - -The following stories are candidates to be included in a major release of Gradle. Currently, they are *not* scheduled to be included in Gradle 2.0. - -## Remove `group` and `status` from project - -Alternatively, default the group to `null` and status to `integration`. - -## Remove the Ant-task based Scala compiler - -* Change the default for `useAnt` to `false` and deprecate the `useAnt` property. - -## Don't inject tools.jar into the system ClassLoader - -Currently required for in-process Ant-based compilation on Java 5. Dropping support for one of (in-process, ant-based, java 5) would allow us to remove this. - -## Decouple publishing DSL from Maven Ant tasks - -* Change the old publishing DSL to use the Maven 3 classes instead of Maven 2 classes. This affects: - * `MavenResolver.settings` - * `MavenDeployer.repository` and `snapshotRepository`. - * `MavenPom.dependencies`. -* Remove `MavenDeployer.addProtocolProviderJars()`. -* Change `PublishFilter` so that it accepts a `PublishArtifact` instead of an `Artifact`. - -## Copy tasks - -There are several inconsistencies and confusing behaviours in the copy tasks and copy spec: - -* Change copy tasks so that they no longer implement `CopySpec`. Instead, they should have a `content` property which is a `CopySpec` that contains the main content. - Leave behind some methods which operate on the file tree as a whole, eg `eachFile()`, `duplicatesStrategy`, `matching()`. -* Change the copy tasks so that `into` always refers to the root of the destination file tree, and that `destinationDir` (possibly with a better name) is instead used - to specify the root of the destination file tree, for those tasks that produce a file tree on the file system. -* Change the `Jar` type so that there is a single `metaInf` copy spec which is a child of the main content, rather than creating a new copy spec each time `metainf` - is referenced. Do the same for `War.webInf`. -* The `CopySpec.with()` method currently assumes that a root copy spec is supplied with all values specified, and no values are inherted by the attached copy spec. - Instead, change `CopySpec.with()` so that values are inherited from the copy spec. -* Change the default duplicatesStrategy to `fail` or perhaps `warn`. -* Change the `Ear` type so that the generated descriptor takes precedence over a descriptor in the main content, similar to the manifest for `Jar` and the - web XML for `War`. - -## Remove old dependency result graph - -The old dependency result graph is expensive in terms of heap usage. We should remove it. - -* Promote new dependency result graph to un-incubate it. -* Remove methods that use `ResolvedDependency` and `UnresolvedDependency`. -* Keep `ResolvedArtifact` and replace it later, as it is not terribly expensive to keep. - -## Remove API methods that are added by the DSL decoration - -Some model types hand-code the DSL conventions in their API. We should remove these and let the DSL decoration take care of this, to simplify these -types and to offer a more consistent DSL. - -* Remove all methods that accept a `Closure` when an `Action` overload is available. Add missing overloads where appropriate. -* Remove all methods that accept a `String` or `Object` when a enum overload is available. Add missing overloads where appropriate. -* Remove CharSequence -> Enum conversion code in `DefaultTaskLogging`. -* Remove all set methods that contain no custom logic. - -## Tooling API clean ups - -* Move `UnsupportedBuildArgumentException` and `UnsupportedOperationConfigurationException` up to `org.gradle.tooling`, to remove - package cycle from the API. - -## Clean up `DefaultTask` hierarchy - -* Inline `ConventionTask` and `AbstractTask` into `DefaultTask`. -* Remove `Task.dependsOnTaskDidWork()`. -* Mix `TaskInternal` in during decoration and remove references to internal types. - -## Remove references to internal classes from API - -* Remove `Configurable` from public API types. -* Remove `PomFilterContainer.getActivePomFilters()`. -* Change `StartParameter` so that it no longer extends `LoggingConfiguration`. -* Move `ConflictResolution` from public API (it's only used internally). -* Move `Module` from public API (it's only used internally). -* Move `Logging.ANT_IVY_2_SLF4J_LEVEL_MAPPER` from public API. -* Move `AntGroovydoc` and `AntScalaDoc` from public API. -* Move `BuildExceptionReporter`, `BuildResultLogger`, `TaskExecutionLogger` and `BuildLogger` from public API. - -## Remove support for convention objects - -Extension objects have been available for over 2 years and are now an established pattern. - -* Migrate core plugins to use extensions. -* Remove `Convention` type. - -## Container API tidy-ups - -* Remove the specialised subclasses of `UnknownDomainObjectException` and the overridden methods that exist simply to declare this from `PluginContainer`, `ArtifactRepositoryContainer`, - `ConfigurationContainer`, `TaskCollection`. -* Remove the specialised methods such as `whenTaskAdded()` from `PluginCollection`, `TaskCollection` -* Remove the `extends T` upper bound on the type variable of `DomainObjectCollection.withType()`. -* Remove the type varable from `ReportContainer` -* Remove unused constants from `ArtifactRepositoryContainer` -* Move `ReportContainer.ImmutableViolationException` to make top level. - -## Dependency API tidy-ups - -* Remove `equals()` implementations from `Dependency` subclasses. -* Remove `ExternalDependency.force`. Use resolution strategy instead. -* Remove `SelfResolvingDependency.resolve()` methods. These should be internal and invoked only as part of resolution. -* Remove `ClientModule` and replace with consumer-side component meta-data rules. -* Remove `ExternalModuleDependency.changing`. Use component meta-data rules instead. - -## Invocation API tidy-ups - -* Remove the public `StartParameter` constructor. -* Change `StartParameter` into an interface. - -## Misc API tidy-ups - -* Rename `IllegalDependencyNotation` to add `Exception` to the end of its name. -* Remove `ConventionProperty`, replace it with documentation. -* Remove `Settings.startParameter`. Can use `gradle.startParameter` instead. -* Remove `org.gradle.util` from default imports. -* Remove `AbstractOptions`. - -## Remove `sonar` plugin - -Promote the `sonar-runner` plugin and remove the `sonar` plugin. - -## Remove support for running Gradle on Java 5 - -Would still be able to compile for Java 5. Would need to improve cross-compilation support. - -## Remove usages of JNA and JNA-Posix - -Replace all usages of JNA and JNA-Posix with native-platform. Currently, this means that console support and -UNIX file permissions with JVMs earlier than Java 7 will not be supported on the following platforms: - -* Linux-ia64 -* Solaris-x86, -amd64, -sparc, -sparcv9 -* FreeBSD-i386, -amd64 - -## Decorate classes at load time instead of subclassing - -Decorating classes at load time is generally a more reliable approach and offers a few new interesting use cases we can support. For example, by decorating classes -at load time we can support expressions such as `new MyDslType()`, rather than requiring that Gradle control the instantiation of decorated objects. - -Switching to decoration at load time should generally be transparent to most things, except for clients of `ProjectBuilder` that refer to types -which are not loaded by Gradle, such as the classes under test. - -## Restructure plugin package hierarchy - -## buildNeeded and buildDependents - -* Rename buildDependents to buildDownstream -* Rename buildNeeded to buildUpstream -* Add a new task buildStream which is equivalent to buildDownstream buildUpstream - -## build.gradle in a multiproject build - -* A Gradle best pattern is to name the gradle file to be the same name as the subproject. -* In Gradle 2.0, let's support this out of the box, possibly as a preference to `build.gradle`, and maybe drop support for `build.gradle` in subprojects. - -## Why remind people about Maven? - -Change from: - - repositories { - mavenCentral() - } - -to: - - repositories { - central() - } diff --git a/design-docs/gradle-3.0.md b/design-docs/gradle-3.0.md new file mode 100644 index 000000000000..c5d997846596 --- /dev/null +++ b/design-docs/gradle-3.0.md @@ -0,0 +1,162 @@ +# Gradle 3.0 + +Gradle 3.0 is the next major Gradle release that offers the opportunity to make breaking changes to the public interface of Gradle. This document captures a laundry +list of ideas to consider before shipping Gradle 3.0. + +Note: for the change listed below, the old behaviour or feature to be removed should be deprecated in a Gradle 2.x release. Similarly for changes to behaviour. + +# Candidates for Gradle 3.0 + +The following stories are candidates to be included in a major release of Gradle. Currently, they are *not* scheduled to be included in Gradle 3.0. + +## Remove the Gradle Open API stubs + +* Remove the remaining Open API interfaces and stubs. +* Remove the `openApi` project. + +## Remove `group` and `status` from project + +Alternatively, default the group to `null` and status to `integration`. + +## Remove the Ant-task based Scala compiler + +* Change the default for `useAnt` to `false` and deprecate the `useAnt` property. + +## Don't inject tools.jar into the system ClassLoader + +Currently required for in-process Ant-based compilation on Java 5. Dropping support for one of (in-process, ant-based, java 5) would allow us to remove this. + +## Decouple publishing DSL from Maven Ant tasks + +* Change the old publishing DSL to use the Maven 3 classes instead of Maven 2 classes. This affects: + * `MavenResolver.settings` + * `MavenDeployer.repository` and `snapshotRepository`. + * `MavenPom.dependencies`. +* Remove `MavenDeployer.addProtocolProviderJars()`. +* Change `PublishFilter` so that it accepts a `PublishArtifact` instead of an `Artifact`. + +## Copy tasks + +There are several inconsistencies and confusing behaviours in the copy tasks and copy spec: + +* Change copy tasks so that they no longer implement `CopySpec`. Instead, they should have a `content` property which is a `CopySpec` that contains the main content. + Leave behind some methods which operate on the file tree as a whole, eg `eachFile()`, `duplicatesStrategy`, `matching()`. +* Change the copy tasks so that `into` always refers to the root of the destination file tree, and that `destinationDir` (possibly with a better name) is instead used + to specify the root of the destination file tree, for those tasks that produce a file tree on the file system. +* Change the `Jar` type so that there is a single `metaInf` copy spec which is a child of the main content, rather than creating a new copy spec each time `metainf` + is referenced. Do the same for `War.webInf`. +* The `CopySpec.with()` method currently assumes that a root copy spec is supplied with all values specified, and no values are inherted by the attached copy spec. + Instead, change `CopySpec.with()` so that values are inherited from the copy spec. +* Change the default duplicatesStrategy to `fail` or perhaps `warn`. +* Change the `Ear` type so that the generated descriptor takes precedence over a descriptor in the main content, similar to the manifest for `Jar` and the + web XML for `War`. + +## Remove old dependency result graph + +The old dependency result graph is expensive in terms of heap usage. We should remove it. + +* Promote new dependency result graph to un-incubate it. +* Remove methods that use `ResolvedDependency` and `UnresolvedDependency`. +* Keep `ResolvedArtifact` and replace it later, as it is not terribly expensive to keep. + +## Remove API methods that are added by the DSL decoration + +Some model types hand-code the DSL conventions in their API. We should remove these and let the DSL decoration take care of this, to simplify these +types and to offer a more consistent DSL. + +* Remove all methods that accept a `Closure` when an `Action` overload is available. Add missing overloads where appropriate. +* Remove all methods that accept a `String` or `Object` when a enum overload is available. Add missing overloads where appropriate. +* Remove CharSequence -> Enum conversion code in `DefaultTaskLogging`. +* Remove all set methods that contain no custom logic. + +## Tooling API clean ups + +* Move `UnsupportedBuildArgumentException` and `UnsupportedOperationConfigurationException` up to `org.gradle.tooling`, to remove + package cycle from the API. + +## Clean up `DefaultTask` hierarchy + +* Inline `ConventionTask` and `AbstractTask` into `DefaultTask`. +* Remove `Task.dependsOnTaskDidWork()`. +* Mix `TaskInternal` in during decoration and remove references to internal types. + +## Remove references to internal classes from API + +* Remove `Configurable` from public API types. +* Remove `PomFilterContainer.getActivePomFilters()`. +* Change `StartParameter` so that it no longer extends `LoggingConfiguration`. +* Move `ConflictResolution` from public API (it's only used internally). +* Move `Module` from public API (it's only used internally). +* Move `Logging.ANT_IVY_2_SLF4J_LEVEL_MAPPER` from public API. +* Move `AntGroovydoc` and `AntScalaDoc` from public API. +* Move `BuildExceptionReporter`, `BuildResultLogger`, `TaskExecutionLogger` and `BuildLogger` out of the public API. + +## Remove support for convention objects + +Extension objects have been available for over 2 years and are now an established pattern. + +* Migrate core plugins to use extensions. +* Remove `Convention` type. + +## Project no longer inherits from its parent project + +* Project should not delegate to its build script for missing properties or methods. +* Project should not delegate to its parent for missing properties or methods. +* Project build script classpath should not inherit anything from parent project. + +## Container API tidy-ups + +* Remove the specialised subclasses of `UnknownDomainObjectException` and the overridden methods that exist simply to declare this from `PluginContainer`, `ArtifactRepositoryContainer`, + `ConfigurationContainer`, `TaskCollection`. +* Remove the specialised methods such as `whenTaskAdded()` from `PluginCollection`, `TaskCollection` +* Remove the `extends T` upper bound on the type variable of `DomainObjectCollection.withType()`. +* Remove the type variable from `ReportContainer` +* Remove unused constants from `ArtifactRepositoryContainer` +* Move `ReportContainer.ImmutableViolationException` to make top level. + +## Dependency API tidy-ups + +* Remove `equals()` implementations from `Dependency` subclasses. +* Remove `ExternalDependency.force`. Use resolution strategy instead. +* Remove `SelfResolvingDependency.resolve()` methods. These should be internal and invoked only as part of resolution. +* Remove `ClientModule` and replace with consumer-side component meta-data rules. +* Remove `ExternalModuleDependency.changing`. Use component meta-data rules instead. + +## Invocation API tidy-ups + +* Remove the public `StartParameter` constructor. +* Remove the public `StartParameter` constants, `GRADLE_USER_HOME_PROPERTY_KEY` and `GRADLE_USER_HOME_PROPERTY_KEY`. +* Change `StartParameter` into an interface. + +## Misc API tidy-ups + +* Rename `IllegalDependencyNotation` to add `Exception` to the end of its name. +* Remove `ConventionProperty`, replace it with documentation. +* Remove `Settings.startParameter`. Can use `gradle.startParameter` instead. +* Remove `org.gradle.util` from default imports. +* Remove `AbstractOptions`. + +## Remove `sonar` plugin + +Promote the `sonar-runner` plugin and remove the `sonar` plugin. + +## Decorate classes at load time instead of subclassing + +Decorating classes at load time is generally a more reliable approach and offers a few new interesting use cases we can support. For example, by decorating classes +at load time we can support expressions such as `new MyDslType()`, rather than requiring that Gradle control the instantiation of decorated objects. + +Switching to decoration at load time should generally be transparent to most things, except for clients of `ProjectBuilder` that refer to types +which are not loaded by Gradle, such as the classes under test. + +## Restructure plugin package hierarchy + +## buildNeeded and buildDependents + +* Rename buildDependents to buildDownstream +* Rename buildNeeded to buildUpstream +* Add a new task buildStream which is equivalent to buildDownstream buildUpstream + +## build.gradle in a multiproject build + +* A Gradle best pattern is to name the gradle file to be the same name as the subproject. +* Let's support this out of the box, possibly as a preference to `build.gradle`, and maybe drop support for `build.gradle` in subprojects. diff --git a/design-docs/ide-integration.md b/design-docs/ide-integration.md index 1842d7f3dfd0..af90aef3bd3e 100644 --- a/design-docs/ide-integration.md +++ b/design-docs/ide-integration.md @@ -82,34 +82,80 @@ Change the task visibility logic so that the lifecycle task of all `BuildableMod A new model will be available for defining IDEA scopes: these scopes will formed by combining and excluding Gradle configurations and other scopes. The model can statically restrict the available scopes to 'compile', 'runtime', 'provided' and 'test'. -- scope `provided` should default to empty, or `configurations.providedCompile` when the war plugin is applied. -- scope `compile` should default to `configurations.compile` minus scope `provided`. -- scope `runtime` should default to `configurations.runtime` minus scope `compile`. -- scope `test` should default to (`configurations.testCompile` minus scope `compile`) union (`configurations.testRuntime` minus scope `runtime`). +#### Model -#### User visible changes +Introduce a model for binaries that run on the JVM and that are built locally: -TODO: Example DSL + interface Classpath { // This type already exists + ... + libs(Object) // Adds libraries to this classpath + } -The new DSL (and model defaults) will be used to configure an IDEA module when the current `scopes` map has not been altered by the user. -Once the new DSL is stabilised we will deprecate and remove the `scopes` map. + interface JvmPlatform { + Classpath compile + Classpath runtime + } + + interface JvmBinary { + JvmPlatform platform + Classpath compile + Classpath runtime + } -Initial proposal for the new DSL is to support complete scope definition ... + interface MainJvmBinary extends JvmBinary { + JvmBinary tests + } + +The Java base plugin registers an implementation of `MainJvmBinary`, and the Java and WAR plugins fill it in so that: + +- `configurations.compile` is added to `jvmBinary.compile` +- `configurations.runtime` is added to `jvmBinary.runtime` +- `configurations.testCompile` is added to `jvmBinary.test.compile` +- `configurations.testRuntime` is added to `jvmBinary.test.runtime` +- `configurations.providedCompile` is added to `jvmBinary.platform.compile` +- `configurations.providedRuntime` is added to `jvmBinary.platform.runtime` + +Introduce IDEA scope model: + + interface IdeaScope { + String name + List libraries // read-only, calculated on demand + } + + interface IdeaScopes { + IdeaScope provided + IdeaScope compile + IdeaScope runtime + IdeaScope test + } -``` - ideaScope.TEST.build { - filtered(base: configurations.testCompile, excluded: configurations.compile) - filtered(base: configurations.testRuntime, excluded: configurations.runtime) + class IdeaModule { // this type already exists + ... + IdeaScopes ideaScopes } -``` -... or to append to previously defined set of items for this scope like ... +The IDEA plugin calculates the contents of the scopes based on the `JvmBinary` model defined above: -``` - ideaScope.TEST.append { - configurations.integTestCompile +- scope `provided` should contain `jvmBinary.platform.compile` +- scope `compile` should contain `jvmBinary.compile` minus scope `provided`. +- scope `runtime` should contain (`jvmBinary.runtime` union `jvmBinary.platform.runtime`) minus scope `compile`. +- scope `test` should contain (`jvmBinary.test.compile` minus scope `compile`) union (`jvmBinary.test.runtime` minus scope `runtime`). + +An example customisation: + + binaries { + jvm { + test.compile configurations.integTestCompile + test.runtime configurations.integTestRuntime + + platform.compile configurations.myCustomProvided + } } -``` + +TODO: Example DSL + +The new DSL (and model defaults) will be used to configure an IDEA module when the current `scopes` map has not been altered by the user. +Once the new DSL is stabilised we will deprecate and remove the `scopes` map. #### Implementation @@ -129,7 +175,7 @@ Initial proposal for the new DSL is to support complete scope definition ... - When a java project has a dependency declared for `testRuntime` and `runtime`, the dependency appears with `runtime` scope only. - When a java project has a dependency declared for `compile` and `testCompile`, the dependency appears with `compile` scope only. - When a war project has a dependency declared for `providedCompile` and `compile`, the dependency appears with `provided` scope only. -- Current defaults are used when the `scopes` maps is configured by user +- Current defaults are used when the `scopes` maps is configured by user. - User is informed of failure when both `scopes` map and new DSL are configured. #### Open issues @@ -148,6 +194,20 @@ Later stories incrementally add more graceful cancellation handling. See [tooling-api-improvements.md](tooling-api-improvements.md#story-tooling-api-client-cancels-a-long-running-operation) +## Feature - Expose dependency resolution problems + +- For the following kinds of failures: + - Missing or broken module version + - Missing or broken jar + - Missing or broken source and javadoc artifact +- Change the IDE plugins to warn for each such problem it ignores, and fail on all others. +- Change the tooling model include the failure details for each such problem. + +### Test coverage + +- Change the existing IDE plugin int tests to verify the warning is produced in each of the above cases. +- Add test coverage for the tooling API to cover the above cases. + ## Feature - Expose build script compilation details See [tooling-api-improvements.md](tooling-api-improvements.md#story-expose-the-compile-details-of-a-build-script): @@ -204,12 +264,6 @@ Expose Scala language level and other details about a Scala component. Expose the corresponding Eclipse and IDEA model. -### Story - Expose the publications of a project - -See [tooling-api-improvements.md](tooling-api-improvements.md#story-expose-the-publications-of-a-project): - -Expose the publications of a project so that the IDE can wire together Gradle and Maven builds. - ### Story - Expose Web components to the IDE Expose Web content, servlet API version, web.xml descriptor, runtime and container classpaths, and other details about a web application. diff --git a/design-docs/incremental-java-compilation.md b/design-docs/incremental-java-compilation.md index b44268b36ed1..6e32ed04ab0e 100644 --- a/design-docs/incremental-java-compilation.md +++ b/design-docs/incremental-java-compilation.md @@ -2,14 +2,14 @@ Generally, we want the developer cycles to get faster, overall build performance better. -- faster java compilation where little input source java classes have changed - the compiler is instructed to compile a fixed set of input classes instead of all. +- faster java compilation where few input java source files have changed - the compiler is instructed to compile a fixed set of input source files instead of all. - faster multi-project build: upstream project contains changed java classes. Those classes are not used by downstream projects. Hence, some(all) downstream projects may completely skip compilation step (or at least, compile incrementally/selectively) -- less output classes are changed. This way, tools like JRebel are faster as they listen to the changed output classes, the less classes changed, the faster JRebel reload is. +- fewer output classes are changed. This way, tools like JRebel are faster as they listen to the changed output classes, the less classes changed, the faster JRebel reload is. # Story: basic incremental compilation within a single project -Faster compilation when little input classes (leaf classes) changed. The selection of classes for compilation needs to be reliable. +Faster compilation when few input source files have changed. The selection of classes for compilation needs to be reliable. Only relevant output classes are changed, remaining output classes are untouched. ## Implementation notes for the initial story @@ -30,16 +30,62 @@ Only relevant output classes are changed, remaining output classes are untouched - serialize to disk after compile task run, deserialize before compilation - use simplest possible serialization mechanism -### Coverage - -- detects deletion of class -- detects adding of a new class -- detects change in a class +## Test Coverage + +- detects deletion of a source file + - class that is not used by any other class, the output class file is removed. No other output files are changed. + - class is used by another class, compilation fails. + - removes output files for inner and anonymous classes. +- detects adding of a new source file + - output files are added. No other output files are changed. +- detects change in a source file + - class that is not used by any other class, output class file has changed. No other output files are changed. + - removes output files for inner and anonymous classes that no longer exist. + - class that is used by another class, the output class files of both are changed. No other output files are changed. - understands class dependencies, require dependents to be rebuild -- anything on the classpath that is not originating from the java source will require full rebuild upon change. - Most importantly, any jar dependency changes, directories on classpath that are from source, assume full rebuild. + - transitive dependencies. + - cycles in class dependencies. + - class that is a source-only annotation is changed, all source files are recompiled. + - class that contains a constant is changed, all source files are recompiled. +- anything on the compile classpath that does not originate from the java source will require full rebuild upon change. + - classpath jar or directory added. + - classpath jar or directory removed. + - classpath jar or directory changed. +- two compile tasks share the same source files but different classpath + +# Story: Handle transitive source dependencies + +## Test coverage + +- given `class C extends B { }; class B extends A { }; class A` then when the source file for `A` is changed, the source files for `B` and `C` should be recompiled + +# Story: Handle duplicate source files + +- when two source files that define the same class are present in the source files, then only changes to the first source file should be considered. Changes to the +second source file should be ignored +- possibly warn when duplicate source files are present in the inputs. + +## Test coverage + +TBD + +# Story: Incremental compilation ignores resources in compilation classpath + +Ignore changes to resource (eg a manifest or some other non-class file) in the compilation classpath. -# Story: basic incremental compilation across multi-project build +# Story: Incremental compilation in the presence of joint compilation + +Need to consider the classes implicitly available in the output directory, which are also included on the compile classpath. + +# Story: Incremental compilation in the presence of compile failures + +Don't switch to full compilation when previous execution failed due to compilation failures. + +# Story: Performance tests for incremental compilation + +Need to measure the performance of incremental vs full compilation + +# Story: Basic incremental compilation across tasks ### Coverage @@ -57,9 +103,16 @@ Seems simpler and might be good enough as a starter. This way, the incremental compilation knows what classes have changed in given project dependency. This approach should be reliable but it may be slower. We need to unzip the jar and hash the contents. -# Other stories / ideas +### Open issues -## Don't compile a source file when the API of its compile dependencies has not changed +- handle duplicate classes in compilation classpath + +# Story: Improve performance for cached incremental compilation state + +- share cached state between tasks +- cache state in-memory for daemon + +# Story: Don't compile a source file when the API of its compile dependencies has not changed Currently, changing the body of a method invalidates all class files that have been compiled against the method's class. Instead, only the method's class should be recompiled. Similarly, changing a resource file invalidates all class files that included that resource file in the compile classpath. Instead, resource files should be ignored @@ -67,4 +120,8 @@ when compiling. We don't necessarily need a full incremental Java compilation to improve this. For example, the Java compilation task may consider the API of the compile classpath - if it has changed, then compile all source files, and if it has not, skip the task (assuming everything else is up to date). This means that a change to a method body does not propagate -through the dependency graph. \ No newline at end of file +through the dependency graph. + +# Story: Deprecate and remove Ant based dependency analysis + +- need to promote `options.incremental` flag. diff --git a/design-docs/publishing-and-sharing-plugins.md b/design-docs/publishing-and-sharing-plugins.md index 52b4f09a2e79..38a4c2991545 100644 --- a/design-docs/publishing-and-sharing-plugins.md +++ b/design-docs/publishing-and-sharing-plugins.md @@ -49,361 +49,583 @@ community plugins and the dependencies of whatever I need to build. Similarly, the installation image of Android Studio needs to bundle the Gradle runtime, some core plugins, the Android plugin and some dependencies. -# Implementation plan +# Goals / Rationale + +## Spec to implementation mapping + +Resolving plugins is more complicated than resolving Maven or Ivy dependencies (as currently implemented/practiced). +Conceptually, an extra layer is necessary to resolve a “plugin spec” (i.e. id/version) into a description of how to use it (i.e. the software and metadata needed to 'instantiate' it). +Potentially, then the implementation of the components then need to be resolved. +Decoupling the requirement (i.e. plugin X) from how to provide it (i.e. this jar in that repository) enables many benefits. + +## Forward declaration + +Additionally, the fact that a build uses a given plugin will be “hoisted” up. +At the moment, we can't reliably identify what plugins are used by a build until we configure the whole build. +This prevents us from using that information in helpful ways. +For example, tooling could determine which plugins are in use without having to run full configuration and could provide better authoring assistance. +It also allows more optimised classloading structures to be formed. + +Another important implication here is that it opens the door to better tooling support. +Knowing which plugins are in use can potentially provide a lot of information relevant to editors in order to provide assistance (e.g. autocomplete). +By plugins being forward declared, we can know what plugins are in use without requiring the script to be well formed. + +## Isolation + +Plugins also need to be insulated from each other. +Currently plugin implementations are pushed into a common classloader in many circumstances. +This causes class versioning problems, and all of the other problems associated with loading arbitrary code into an unpartitioned class space. +Plugins under the new mechanism will be subject to isolation from each other, and collaboration made more formal. + +Plugins forward declare their dependencies on other JVM libraries and separately on other Gradle plugins. +Plugins also declare which classes from their implementation and dependencies are exported (i.e. visible to consumers of the plugin). + +_The exact mechanism and semantics of this sharing are TBD._ + +## Plugin dependencies + +Plugin dependencies of plugins are now forward declared, which is available as part of the plugin metadata. +When a user declares a dependency on a plugin, Gradle manages transitive resolution of all plugin dependencies and guarantees that all plugin dependencies have been _applied_ before the plugin is applied. + +Therefore, ideally, plugins will no longer use project.apply() to apply plugins but will rely on Gradle applying the plugin because the dependency was declared. +Because use of plugins, and the dependencies of those plugins, is forward declared we can understand which plugins are used by a build without executing any “configuration” code. + +This means that plugin application is never conditional. +More fine grained mechanisms will be available to plugin implementations for implementing conditional logic (i.e. model configuration rules). + +## Plugin ids + +Plugin ids will now be namespaced. +This avoids collision by partitioning the namespace. +Importantly, it also allows us (Gradle core team) to provide an “official” implementation of a previously available plugin by creating our own version, or taking ownership of the existing, and releasing under our namespace. -There are several main parts to the solution: +Plugin ids… -A [public repository](https://bintray.com/gradle/gradle-plugins) hosted at [bintray](http://bintray.com) will be created to make plugins -available to the community. The bintray UI provides plugin authors with a simple way publish and share their plugin with the community. -It also allows build authors with a simple way to discover plugins. +1. may contain any alpha numeric ascii character, '.', '_' and - +1. must contain at least one '.' character, that separates the namespace from the name +1. consist of a namespace (everything before the last '.') and a name (everything after the last '.') +1. conventionally use a lowercase reverse domain name convention for the namespace component +1. conventionally use only lowercase characters for the name component +1. 'org.gradle' and 'com.gradleware' namespaces are reserved (users are actively prevented from using these namespaces) +1. Cannot start or end with '.' +1. Cannot contain '..' -The second part of the solution will be to improve the plugin resolution mechanism in Gradle to resolve plugins from -this repository in some convenient way. +Plugin specs can be made of qualified («namespace.name») or unqualified («name») plugin ids. -Implementation-wise, we plan to change the mechanism for resolving a plugin declaration to a plugin implementation class -to add an extension point. This will allow any particular strategy to be implemented, and strategies to be combined. +Qualified: `org.gradle.java` +Unqualified: `java` -Built on top of this extension point will be a resolver implementation that uses the package meta-data from the bintray -repository to resolve a plugin reference to a bintray package, and then to an implementation class. +Individual plugin resolvers are at liberty to implicitly qualify unqualified ids. +For example, the `plugins.gradle.org` based plugin resolver implicitly qualifies all unqualified ids to the `'org.gradle'` namespace. -A "plugin development" plugin will be added to help plugin authors to build, test and publish their plugin to a repository, -with additional integration with bintray to add the appropriate meta-data. +### Open Questions -Some additional reporting will also be added to help build authors understand the plugins that are used in their build and -to discover new versions of the plugins that they are using. +- Should we support all unicode alphanums? (it seems there is less agreement about what this set is) +- When there is more than one implicit namespace, collisions can change over time as new plugins get added to the earlier resolved namespaces -## Declaring and applying plugins +## plugins.gradle.org -A new DSL block will be introduced to apply plugins from a Gradle script. The plugins declared in this block will be made -available to the script's compile classpath: +Gradle will ask questions of this service via a JSON-over-HTTP API that is tailored for Gradle. +Initially the focus of the service will be to power plugin _use_; over time it will expand to include browsing/searching and publishing. +The `plugins.gradle.org` service will use a [Gradle plugin specific bintray repository](https://bintray.com/gradle/gradle-plugins) (implicitly) as the source of data. +Bintray provides hosting, browsing, searching, publishing etc. + +# Implementation plan + +## Declaring plugins + +A new DSL block will be introduced to apply plugins from a Gradle script. plugins { - // Apply the given plugin to the target of the script - apply plugin: 'some-plugin' + // Declare that + id "some-plugin" + id("some-plugin") + // Apply the given version of the plugin to the target of the script - apply plugin: 'some-plugin', version: '1.2+' - // Apply the given script to the target of this script - apply from: 'some-script.gradle' + id 'some-plugin' version '1.2+' + id('some-plugin').version('1.2+') } The block will be supported in build, settings and init scripts. -Using a separate DSL block means that: - -- The block can be executed before the rest of the script is compiled. This way, the public API of each plugin can be made available to the script. -- The block can be executed early in the build, to allow configuration-time dependencies on other projects to be declared. -- The plugin DSL can be evolved without affecting the current DSL. - -The `plugins {} ` block is executed after the `buildscript {}` blocks, and must appear after it in the script. Script execution becomes: -1. Parse the script to extract the `buildscript {}` and `plugins {}` blocks. -2. Compile the blocks, using the Gradle API as compile classpath. This means that the classpath declared in the `buildscript {}` block - will not be available to the `plugins {}` block. -2. Execute the blocks in the order they appear in the script. -3. Resolve the script compile classpath. This means resolving the `classpath` declared in the `buildscript {}` block, plus the public API - declared for each plugin, and detecting and resolving conflicts. -4. Compile the script. -5. Execute the script. - -The `plugins {}` block does not delegate to the script's target object. Instead, each script has its own plugin handler. This handler represents -a context for resolving plugin declarations. The plugin handler is responsible for taking a plugin declaration, resolving it, and then -applying the resulting plugin implementation to the script's target object. +1. Parse the script to extract the `buildscript {}` and `plugins {}` blocks +1. Compile the blocks, using only the Gradle API as compile classpath +1. Execute the blocks in the order they appear in the script +1. Resolve the script compile classpath according to `buildscript {}` +1. Resolve the plugins according to `plugins {}` (details in subsequent section) +1. Merge the script compile classpath contributions from `plugins {}` with `buildscript {}` +1. Compile the “rest” of the script +1. Apply the plugins defined by `plugins {}` to the target (details in subsequent section) +1. Execute the "rest" of the script as per normal Note that plugins that are declared in the `plugins {}` block are not visible in any classpath outside the declaring script. This contrasts to the classpath declared in a build script `buildscript {}` block, which is visible to the build scripts of child projects. +The `plugins {}` block is a _heavily_ restricted DSL. +The only allowed constructs are: + +1. Calls to the `id(String)` method with a `String` literal object, and potentially a call to the `version(String)` with a string literal method of this methods return. + +Attempts to do _anything_ else will result in a _compile_ error. +This guarantees that we can execute the `plugins {}` block at any time to understand what plugins are going to be used. + +The order of plugin declarations is insignificant. +The natural ordering of plugin application is alphabetical based on plugin _name_ (not id), respecting plugin dependencies (i.e. depended on plugins are guaranteed to be applied before application). + +**Note:** `allprojects {}` and friends are not compatible with this new DSL. +Targets cannot impose plugins on other targets. +A separate mechanism will be available to perform this kind of generalised configuration in a more managed way (discussed later). + +**Note:** Eventually, the `plugin {}` mechanism as described will also support script plugins. +Script plugins will be mapped to ids/versions elsewhere in the build, allowing them to be consumed via the same mechanism (discussed later). + +**Note:** Plugins applied through the new mechanism _cannot_ apply plugins using `project.apply()`. +However, for backwards compatibility, they can apply plugins that have already been applied for backwards compatibility reasons (discussed later). + ### Open issues -- Select the target to apply the plugin to, eg `allprojects { ... }`. -- Allow scripts to be applied from this block, and make them visible to the script classpath. -- What happens when the API is used to apply a plugin? -- A hierarchy of plugin containers: - - Build-level plugin resolver (mutable) - - One for each init script - - One for each settings script - - One for each build script -- How to make a plugin implementation visible to all scripts? -- How to declare plugin repositories? -- How to make classes visible without applying the plugin? -- How apply a plugin that is built in the same project? +- How practical is it to lock down the `plugins {}` DSL so tightly -## Declaring plugin repositories +## Plugin spec to implementation mappings -The `plugins {}` block will allow plugin repositories to be declared. +Plugin clients declare plugin dependencies in terms of specification. +The settings.gradle file, and init scripts provide the mappings from plugin specs to implementations. - plugins { + pluginMappings { repositories { - // uses the public bintray repo for plugin meta-data and modules, and jcenter for additional modules - gradlePlugins() - // uses the given bintray repo for plugin meta-data and modules - bintray { - url '...' - } - // uses the given repo for modules - maven { - url '...' - } + // DSL for declaring external sources of information about plugins } } -If no repositories are declared, this will default to `gradlePlugins()`. Some mechanism (TBD) will be available to configure this default in some way. +The default list of repositories will be: + + repositories { + defaultScriptDir() // script plugins in `$rootDir/gradle/plugins` + gradlePlugins() // plugins.gradle.org + } + +The 'core plugin repository' is always implicitly the first repository and cannot be disabled. +If any plugin repositories are declared, the `defaultScriptDir()` and `gradlePlugins()` defaults are removed. +Some mechanism (TBD) will be available to configure the defaults (i.e. repositories used when none specified) in some way. + +### Potential repository types + +- Directory containing script plugins (other than the default convention) +- HTTP “directory” containing script plugins +- Remote directory available over SFTP +- Other instance of plugin portal ### Open issues -- This means that a given plugin implementation can end up with different implementation classpaths in different scripts. Allow this? Fail? -- Separate plugin meta-data and module repository declarations? -- Give a name to the protocol used to resolve from bintray and use this name for bintray and artifactory repositories. -- An init script should be able to define how to resolve plugins for all settings and build scripts and for API usage. Possibly for buildSrc as well. -- An settings script should be able to define how to resolve plugins for all build scripts, and for API usage. -- A root build script should be able to define how to resolve plugins for all build scripts (including self) and for API usage. +- What does the repositories DSL look like? +- Does the `pluginMappings` block apply to the settings/init plugins? If not, how does one specify the mappings there? +- Does the `pluginMappings` block get extracted and executed in isolation like the `plugins` block? With similar strictness? +- Can plugins contribute to the `pluginMappings` block in some way? +- How do buildSrc plugins work with mapping? +- How do `pluginMappings` blocks in multiple init scripts and then the settings script compose? +- Should an individual build script have its own mapping overrides? +- Could an `inline {}` DSL be used here to give the location of arbitrary detached Gradle plugin projects that need to be built? (i.e. buildSrc replacement) + +## Script plugins + +Script plugins and binary plugins will be unified from a consumption perspective. +A script plugin is simply another way to implement a plugin. + +A convention will be established that maps plugin id to script implementation. + +`id("foo")` = `$rootDir/gradle/plugins/foo.gradle` +`id("foo.bar")` = `$rootDir/gradle/plugins/foo.bar.gradle` +`id("foo").version("1.0")` = `$rootDir/gradle/plugins/foo_1.0.gradle` +`id("foo.bar").version("1.0")` = `$rootDir/gradle/plugins/foo.bar_1.0.gradle` -## Examples +Explicit mappings will also be possible via the `pluginMappings {}` DSL (details TBD). + +This requires that script plugins can express all things that binary plugins can in terms of usage requirements: + +1. Dependencies on other plugins - specified by the plugin script's `plugins {}` block +1. Dependencies on JVM libraries - specified by the plugin script's `buildscript {}` block +1. Entry point/implementation - script body +1. Exported classes - the public classed declared in the script + +As new capabilities are added to plugins (particularly WRT new configuration model), consideration should be given to how script plugins express the same thing. + +### Open questions + +- How are unqualified ids of plugin dependencies to be interpreted? (i.e. script plugins can be used across builds, with potentially different qualifying rules) +- Do these 'new' script plugins need to declare that they are compatible with the new mechanism? Are there any differences WRT their implementation? ## Plugin resolution -The general problem is, given a plugin declaration +Each plugin spec is independently resolvable to an implementation. + +### Specs - apply plugin: 'some-plugin' +A spec consists of a: -we need to resolve the plugin name `some-plugin` to an implementation class. +* plugin id (qualified or unqualified) +* compatibility constraints -The current mechanism searches the classpath declared by the script associated with the target object for a -plugin with that name. So, in the case of a `Project` object, the classpath declared in the build script is searched. -The setting script classpath is used in the case of a `Settings` object, and the init script classpath in the case of a `Gradle` object. +Compatibility constraints consist of: -Plugin declarations will be generalised to become a kind of dependency declaration, so that: +* version constraints (may be empty) +* target Gradle runtime - apply +#### Open questions -means: 'apply a plugin implementation that meets the given criteria'. Initially, the criteria will be limited to plugin name and -version. +- Should the other plugins in play be considered part of the spec? (i.e. find the “best” version that works with everything else that is in play) -As for other kinds of dependency resolution in Gradle, there will be a number of resolvers that Gradle will use to -search for plugin implementations. A resolver may search some repository or other location for a plugin. There will be several such resolvers baked -into Gradle, but it will be possible to add custom implementations: +### Resolver types -1. A Gradle core plugin resolver. This will use a hard-coded set of mappings from plugin name to implementation module. It may use the - implementation module included the distribution, if present, or may resolve the implementation module from the public bintray repository. - This resolver allows the core Gradle plugins to be moved out of the Gradle distribution archives, - changing Gradle into a logical distribution of a small runtime and a collection of plugins that can be downloaded separately as - required, similar to, say, a Linux distribution. It further allows some plugins to be moved out of the distribution entirely, - via deprecation of a particular mapping. -1. The classpath resolver. This will use the appropriate search classpath to locate a plugin implementation. -1. The public bintray repository. This will resolve plugin implementations using meta-data and files from the pubic bintray repository. -1. Possibly also a resolver that uses mappings rules provided by the build. This would allow, for example, for a build to say things like: - map plugin name 'x' to this Gradle script, or this implementation module, or this implementation `Class` instance. +A spec is resolved into an implementation by iterating through the following resolvers, stopping at the first match. +Each resolver must respect `--offline`, in that if it needs to reach out over the network to perform the resolution and `--offline` has been specified then the resolution will fail. +This doesn't apply to loading implementations (e.g. local scripts) from disk. -Given a plugin declaration `apply plugin: $name`, search for an implementation in the following locations, stopping when a match is found: +#### Core plugin resolver -1. Search the Gradle runtime's (hard-coded) set of core plugins. -1. Search for a plugin with the given name in the search classpath. -1. If not found, fail. Possibly search bintray for candidate versions to include in the error message. +The core plugin resolver is responsible for resolving plugins that are considered to be core to Gradle and are versioned with Gradle. -Given a plugin declaration `apply plugin: $name, version: $version` +The list (and metadata) of core plugins is hardcoded within a Gradle release. +Core plugins are NOT necessarily distributed with the release. +The implementation components may be obtainable from jcenter, allowing the distribution to be thinned with components obtained on demand. -If `$version` is a static version selector, then search for a candidate implementation in the following locations, stopping when a match is found. -If `$version` is a dynamic version selector, then search for candidate implementation in all of the following locations, selecting the highest version found: +Core plugins are always in the `org.gradle` namespace. -1. Search for plugin with the given name in the Gradle runtime's mappings. If found, verify that the implementation meets the version criteria. -1. Search for plugin with the given name in the search classpath. If found, verify that the implementation meets the version criteria. -1. Attempt to resolve the plugin name using bintray, as described below. -1. If not found, fail. Possibly search bintray for candidate versions to include in the error message. +*Note:* If a Gradle release includes a new plugin in the core namespace, this needs to be advertised. +Technically, it's a breaking change. +If the to-be-upgraded build was using an unqualified id to depend on a plugin where there is now a core plugin with the same unqualified id, the build will fail because core plugin dependencies cannot contain version numbers. +The resolution is to fully qualify the plugin id. -### Examples +#### Script plugin resolver -Apply the core `java` plugin, the implementation is either bundled in the distribution or fetched from a repository: +Resolver for conventional script plugin locations (see above). - apply plugin: `java` +#### plugins.gradle.org resolver -Apply version >= 0.5 and < 0.6 of the `android` plugin fetched from the Gradle plugins bintray repository: +This resolver will ask the `plugins.gradle.org` web service to resolve plugin specs into implementation metadata, that Gradle can then use to obtain the implementation. - apply plugin: `android`, version: `0.5.+` +Plugin specs will be serialized into `plugins.gradle.org` URLs. +Requests to such URLs yield JSON documents that act as the plugin metadata, and provide information on how to obtain the implementation of the plugin. +Or, they may yield JSON documents that indicate the known versions of the requested plugin that meet the specification. -### Resolution of a plugin declaration using a plugin repository +#### User mapping resolver -Resolution of a plugin declaration using a plugin repository is made up of two steps: First, the plugin declaration is resolved to a plugin implementation component. -Second, the plugin component and its dependencies are resolved to a classpath and the plugin implementation class is loaded from this classpath. +This resolver uses the explicit rules defined by the build environment (i.e. init scripts, settings scripts) to map the spec to an implementation. -The provided repository may be used to perform one or both resolution steps. For example, the Gradle core plugin resolver does not use the repository to determine the -implementation component, but may use the repository resolve the component to a classpath. +### Dynamic versions -Step 1: Given a plugin name and version, use repository to resolve to a plugin component: +Version constraints may be dynamic. +In this case, each plugin resolver is asked for all of the versions of the plugin that it knows about that are otherwise compatible. +The best version available, considering all resolvers, will be used. -1. If the given name and version have already been resolved to a plugin component in this build, reuse the mapping. -1. If the given name and version to component mapping is present in the persistent cache and has not expired, reuse the mapping. -1. If running `--offline`, fail. -1. Fetch from the repository the list of packages that have the plugin name associated with the package. Select the highest version that - meets the version criteria and which is compatible with the current Gradle version. Fail if there are no such packages. -1. Cache the result. +Resolvers are responsible for providing the potential versions. +Selecting the actual version to use based on the version constraint is performed by Gradle. -Step 2: Given a plugin name and plugin component: +Dynamic versions are specified using the same syntax that is currently used… -1. If the component has been resolved in this build, reuse the mapping. -1. Resolve the component and its runtime dependencies from the repository to produce a runtime classpath -1. Load the runtime classpath into a `ClassLoader` whose parent is the Gradle API `ClassLoader` (see [ClassLoader graph](https://docs.google.com/drawings/d/1-hEaN0HDSbyw_QSuK8rUOqELohbufyl7osAQvCd7COk/edit?usp=sharing)). -1. Load the plugin implementation class from this `ClassLoader`. -1. Cache the result. + id("foo").version("0.5.+") -Note that no core plugins will be visible to the plugin implementation by default. These will be declared as explicit dependencies of the plugin (TBD). +## Plugin implementation backwards compatibility -# Stories +TBD. -## Story: Spike plugin resolution from bintray +# Milestone 1 - “usable” -Add some basic DSL and resolver infrastructure to demonstrate plugin resolution from the public plugin repository. +## Story: Introduce plugins DSL block -## Story: Introduce plugins DSL block (✓) +Adds the `plugins {}` DSL to build scripts (settings, init or arbitrary script not supported at this point). Plugin specs can be specified in the DSL, but they don't do anything yet. -Adds the initial DSL support and APIs. At this stage, can only be used to apply core plugins to the script's target object. Later stories make this more useful. +### Implementation + +1. Add a `PluginSpecDsl` service to all script service registries (i.e. “delegate” of `plugins {}`) +1. Add a compile transform that rewrites `plugins {}` to be `ConfigureUtil.configure(services.get(PluginSpecDsl), {})` or similar - we don't want to add a `plugins {}` method to any API + - This should probably be added to the existing transform that extracts `buildscript {}` +1. Add an `id(String)` method to `PluginSpecDsl` that returns `PluginSpec`, that has a `version(String)` method that returns `PluginSpecDsl` (self) +1. Update the `plugin {}` transform to disallow everything except calling `id(String)` and optionally `version(String)` on the result +1. Update the transform to error if encountering any statement other than a `buildscript {}` statement before a `plugins {}` statement +1. Update the transform to error if encountering a `plugins {}` top level statement in a script plugin +1. `PluginSpecDsl` should validate plugin ids (see format specification above) ### Test cases -- Script can use a `plugins { ... }` block to apply a core plugin. (✓) -- Can use both `buildscript { ... }` and `plugins { ... }` blocks in a script to apply plugins. (✓) -- Build author receives a nice error message when: - - A statement other than `buildscript { ... }` precedes the `plugins { ... }` statement. (✓) - - A `buildscript { ... }` statement follows any `plugins { ... }` statements. (✓) - - Attempting to apply an unknown plugin in a `plugins { ... }` block. - - Should provide information on how to find which plugins are available. (✓) - - Attempting to apply a core plugin with a version selector in a `plugins { ... }` block. (✓) - - Attempting to apply a plugin declared in the script's `buildscript { ... }` from the `plugins { ... }` block. (✓) - - Attempting to apply a plugin declared a parent project's build script `buildscript { ... }` from the `plugins { ... }` block. (✓) -- The script's delegate object is not visible to the `plugins { ... }` block. (✓) - -## Story: Resolve hard-coded set of plugins from public bintray repository - -Adds a basic mechanism to load plugins from a repository. Adds a plugin resolver that uses a hard-coded mapping from plugin name + version to implementation component, -then resolves the implementation from the public repository and `jcenter`. At this stage, the repository is used to resolve the plugin implementation, but the -plugin meta-data is not used. - -Cache the implementation ClassLoader within a single build invocation, so that if multiple scripts apply the same plugin, then the same implementation Class is used -in each location. The implementation ClassLoader should be wrapped in a filtering ClassLoader so that the plugin id resources `/META-INF/gradle-plugins/**` are not -visible. - -Change the construction of the script ClassLoaders so that: - -- Each script has a 'parent scope' ClassLoader. - - For the build script of a non-root project, this is the 'public scope' of the parent project's build script (for backwards compatibility). - - For all other scripts, this is the root ClassLoader, which exposes the Gradle API and core plugins. -- Each script has a 'public scope' ClassLoader: - - When the `buildscript { ... }` block does not declare any classpath, this is the same as the 'parent scope' ClassLoader. - - When the `buildscript { ... }` block declares a classpath, these classes are loaded a ClassLoader whose parent is the 'parent scope' ClassLoader. - This is 'public scope' ClassLoader for the script. -- The script classes are loaded in a ClassLoader whose parents are the 'public scope' ClassLoader plus and implementation ClassLoaders for any plugins declared - in the `plugins { ... }` block. -- The 'public scope' of a project's build script is used to find plugins by `Project.apply()` - -The Gradleware developers will select a small set of plugins to include in this hard-coded mapping. The mapping should ideally include the Android plugins. - -At this stage, dependencies on other plugins are not supported. Dependencies on other components are supported. +- ~~`plugins {}` block is available to build scripts~~ +- ~~`plugins {}` block in init, settings and arbitrary scripts yields suitable 'not supported' method~~ +- ~~Statement other than `buildscript {}` before `plugins {}` statement causes compile error, with correct line number of offending statement~~ +- ~~`buildscript {}` is allowed before `plugins {}` statement~~ +- ~~multiple `plugins {}` blocks in a single script causes compile error, with correct line number of first offending plugin statement~~ +- ~~`buildscript {}` after `plugins {}` statement causes compile error, with correct line number of offending buildscript statement~~ +- ~~Disallowed syntax/constructs cause compile errors, with correct line number of offending statement and suitable explanation of what is allowed (following list is not exhaustive)~~ + - ~~Cannot access `Script` api~~ + - ~~Cannot access script target API (e.g. `Gradle` for init scripts, `Settings` for settings script, `Project` for build)~~ + - ~~Cannot use if statement~~ + - ~~Cannot define local variable~~ + - ~~Cannot use GString values as string arguments to `id()` or `version()`~~ +- ~~Plugin ids contain only valid characters~~ +- ~~Plugin id cannot begin or end with '.'~~ +- ~~Plugin id cannot be empty string~~ +- ~~Plugin version cannot be empty string~~ + +## Story: Can use plugins {} in build script to use core plugin + +This story makes it possible for the user to use the new application mechanism to apply core plugins. +At this point, there's no real advantage to the user or us in this, other than fleshing out the mechanics + +1. Add an internal service that advertises the core plugins of Gradle runtime (at this stage, all plugins shipped with the distribution) +1. Change the implementation/use of `PluginSpecDsl` to make the specified plugins available +1. After the execution of the plugins {} block, but before the “body” of the script, iterate through the specified plugins +1. For each plugin specified, resolve the specification against the plugin resolvers - only the core plugin resolver at this stage +1. If the plugin spec can't be satisfied (i.e. has a version constraint, or is not the _name_ of a core plugin), the build should fail indicating that the plugin spec could not be satisfied by the available resolvers (future stories will address providing more information to users, e.g. a list of available core plugins) + +At this stage, applying a core plugin with this mechanism effectively has the same semantics as having `apply plugin: "«name»"` as the first line of the build script. + +Note: plugins from buildSrc are not core plugins. ### Test cases -- The classes from plugins declared in a script's `plugins { ... }` block are visible: - - when compiling the script. (✓) -- The classes from plugins declared in a script's `plugins { ... }` block are NOT visible: - - from classes declared in a script's `buildscript { ... }` block. (✓) -- When a parent project's build script uses a `plugins { ... }` block to apply non-core plugins: - - The classes from plugins are not visible when compiling a child project's build script. (✓) - - The plugins are not visible via a child project's `Project.apply()` method. (✓) -- Verify that a plugin applied using `plugins { ... }` block is not visible via the project's `Project.apply()` method. (✓) -- When multiple scripts apply the same plugin to different targets, the plugin implementation is downloaded from remote repository once only and cached. (✓) -- When multiple scripts apply the same plugin to different targets, the plugin classes are the same. (✓) +- ~~`plugins { id "java" }` applies the java plugin to the project when used in a _build_ script (equally for any core plugin)~~ +- ~~`plugins { id "java" version "«anything»" }` produces error stating that core plugins cannot have version constraints~~ +- ~~`plugins { id "java"; id "java" }` produces error stating that the same plugin was specified twice~~ +- ~~`plugins { id "org.gradle.java" }` is equivalent to `plugins { id "java" }`~~ +- ~~plugins already on the classpath (buildscript, buildSrc) are not considered core, and cannot be applied using `plugins {}`~~ +- `plugins { id "«non core plugin»" }` produces suitable 'not found' type error message +- ~~Using project.apply() to apply a plugin that was already applied using the plugins {} mechanism works (i.e. has no effect)~~ +- Plugins are applied alphabetically based on name -### Open issues +### Open questions -- Which classes to make visible from a given plugin? -- Should possibly allow `buildscript { }` classes to see `plugins { }` classes, so that a custom plugin can extend a public plugin. +- Should a qualified plugin id of a namespace other than 'org.gradle', with no version constraint, yield a special error message? i.e. only 'org.gradle' plugins can omit version -## Story: Resolve plugins from public plugin repository +## Story: User uses plugin “from” `plugins.gradle.org` of static version, with no plugin dependencies, with no exported classes -Extend the above mechanism to use plugin meta-data from the public plugin repository to map a plugin name + version to implementation component. +This story covers adding a plugin “resolver” that uses the plugins.gradle.org service to resolve a plugin spec into an implementation. -Uses meta-data manually attached to each package in the repository. Again, the Gradleware developers will select a small set of plugins to include in the repository. +Dynamic versions are not supported. +Plugins obtained via this method must have no dependencies on any other plugin, including core plugins, and do not make any of their implementation classes available to the client project/scripts (i.e. no classes from the plugin can be used outside the plugin implementation). +No resolution caching is performed; if multiple projects attempt to use the same plugin it will be resolved each time and a separate classloader built from the implementation (address in later stories). -Implementation should use `http://plugins.gradle.org` as the entry point to the public plugin repository. +A new plugin resolver will be implemented that queries the plugin portal, talking JSON over HTTP. +See the plugin portal spec for details of the protocol. +This resolver will be appended to the list of resolvers used (i.e. currently only containing the core plugin resolver). -### Test cases +Plugin specs can be translated into metadata documents using urls such as: `plugins.gradle.org/api/gradle/«gradle version»/plugin/use/«plugin id»/«version»`. -- When multiple scripts apply the same plugin to different targets, the plugin resolution is done against the remote repository once only and cached. -- Build author receives a nice error message when using the `plugins { ... }` block to: - - Attempt to apply a plugin from a remote repository without declaring a version selector. (✓) - - Attempt to apply an unknown plugin. - - Should list some candidates that are available, including those in the remote repositories. - - Attempting to apply an unknown version of a plugin. - - Should list some candidate versions that are available. - - Plugins with -SNAPSHOT versions are requested (Bintray does not allow snapshot versions) -- Plugins can be resolved with status version numbers (e.g. latest.release) -- Plugins can be resolved with version ranges (e.g. 2.+, ]1.0,2.0]) +There are 4 kinds of responses that need to be considered for this story: -## Story: External plugins are usable when offline +1. 3xx redirect +1. 200 response with expected JSON payload (see plugin portal spec) +1. 404 response with JSON payload indicating no plugin for that id/version found (see plugin portal spec) +1. Anything else -Cache the plugin mapping. Periodically check for new versions when a dynamic version selector is used. Reuse cached mapping when `--offline`. +Subsequent stories refine the error handling. This story encompasses the bare minimum. -## Story: Plugins included in Gradle public repository are smoke tested +The “plugin found” JSON response contains two vital datum, among other data. -For plugins to be listed in the public repository, there must be some external (i.e. not performed by plugin author) verification that the plugin is not completely broken. -That is, the plugin should be: +1. A “«group»:«artifact»:«version»” dependency notation string +1. A URL to an m2 repo that is accessible without authentication -1. Able to be applied via the new plugin mechanism -2. Not produce errors after simply applying +The m2 repository is known to contain the dependency denoted in the dependency notation string. +The runtime usage resolution (i.e. module artifact + dependencies) of the dependency from the given repository is expected to form a classpath that contains a plugin implementation mapped to the qualified id (i.e. a `/META-INF/gradle-plugins/«qualified id».properties` file with `implementation-class` property). -This will (at least) need to be able to be performed _before_ the plugin is included in the public repository. +The dependencies of the plugin implementation must also be available from the specified maven repository. +That is, this is the only repository available for the resolve. -### Open issues +The plugin resolver will resolve the maven module as per typical Gradle maven dependency resolution. +No configuration (e.g. username/password, exclude rules) of the resolve is possible. +Anything other than successful resolution of the implementation module is fatal to the plugin resolution. -1. Are existing plugins periodically tested? Or only upon submission (for each new version)? -1. What action is taken if a plugin used to work but no longer does? +The successfully resolved module forms an implementation classpath. +A new classloader is created from this classpath, with the gradle api classloader (_not_ the plugin classloader) as its parent. +The `Plugin` implementation mapped to the plugin id from this classpath is applied to the project. +No classes from the plugin implementation classpath are made available to scripts, other plugins etc. + +As much of the HTTP infrastructure used in dependency resolution as possible should be used in communicating with the plugin portal. + +### Test Coverage + +- 404 responses that indicate that the plugin or plugin version do not exist are not fatal - try next resolver +- generic 404 responses are considered fatal +- If plugin portal response indicates that the plugin is known, but not by that version (also a 404), failure message to user should include this information (later stories might include information about what versions are known about) +- Attempt to use -SNAPSHOT or a dynamic version selector produces helpful 'not supported' error message + - As there is only the core resolver and the portal resolver at this point, this logic could be hardcoded at the start of the resolver list potentially +- Success response document of incompatible schema produces error +- Success response document of compatible schema, but with extra data elements, is ok +- Failed resolution of module implementation from specified repository fails, with error message indicating why resolve was happening +- Successful resolution of module implementation, but no plugin with id found in resultant classpath, yields useful error message +- Successful resolution of module implementation, but unexpected error encountered when loading `Plugin` implementation class, yields useful error message +- Successful resolution of module implementation, but exception encountered when _applying_ plugin, yields useful error message +- Plugin is available in build script via `PluginContainer` - incl. `withType()` and `withId()` methods (note: plugin class is not visible to build script, but could be obtained reflectively) +- Plugin implementation classes are not visible to build script (or to anything else) +- Plugin cannot access classes from core Gradle plugins +- Plugin can access classes from Gradle API +- Plugin cannot access Gradle internal implementation classes +- Plugin resolution fails when --offline is specified +- Client follows redirect from server +- Unicode characters in the response are interpreted correctly and don't cause strange behaviour +- Plugin id and version numbers can contain URL meta chars and unicode chars (regardless of valid plugin ids not being allowed to contain non ascii alphanum or -) - request URLs should be well formed +- Reasonable error message on network failure talking to plugin portal or repository containing plugin implementation + +### Open questions + +- Is it worth validating the id/version returned by the service against what we asked for? -## Story: Make plugin DSL public +## Story: Structured error response from plugin portal (when resolving plugin spec) is “forwarded to user” + +The plugin portal has a standardised JSON payload for errors. +This story adds understanding of this to Gradle's interactions with the portal, by way of extracting the error information and presenting it to the user instead of a generic failure message. + +Any request to the plugin portal may return a “structured error response”. +In some cases this may be part of the standard protocol for that endpoint. +For example, a request for plugin metadata that targets the plugin metadata endpoint but resolves to a non existent plugin will yield a structured error response. +The detail of the error response differentiates the response from a generic 404. + +### Test coverage + +- 4xx..500 response that isn't a structured error response (e.g. HTML) is handled +- Response advertised as structured error response is of incompatible schema +- Response advertised as structured error response is malformed JSON +- Response advertised as structured error response is of compatible schema, but has extra unexpected elements + +### Open questions + +- What to use to unmarshall JSON responses? Jackson? Should the API couple to a marshaller at this level? + +## Story: Gradle is routinely tested against real plugins.gradle.org codebase + +This story covers setting up continuous testing of Gradle against the real plugin portal code, but not the real instance. + +This provides some verification that the test double that is used in the Gradle build to simulate the plugin portal is representative, and that the inverse (i.e. plugin portal's assumptions about Gradle behaviour) holds. + +This does not replace the double based tests in the Gradle codebase. + +## Story: Plugin author uses plugin development plugin to build a plugin + +This story adds a plugin development plugin to help plugin authors build a plugin. Later stories will add the ability to test and publish the plugin. + +## Story: Plugin author uses plugin development plugin to publish a plugin + +This story extends the plugin development plugin to generate the meta-data and publish a plugin. + +## Story: Plugins are able to declare exported classes + +This is the first story where we require changes to how plugins are published and/or implemented (i.e. exported class information is needed). + +The plugin development plugin should provide some mechanism to declare the exported classes of a plugin (possibly a DSL, possibly annotations in code, or something else). +This should end up in the generated meta-data. + +Plugin authors should be able to write their plugin in such a way that it works with the new mechanism and the old project.apply() mechanism (as long as it has no dependency on any other, even core, plugin). + +## Story: Plugins are able to declare dependency on core plugin + +The plugin development plugin should provide some mechanism to declare a dependency on a core plugin (possibly a DSL, possibly annotations in code, or something else). +This should end up in the generated meta-data. + +Plugin authors should be able to write their plugin in such a way that it works with the new mechanism and the old project.apply() mechanism (as long as it has no dependency a non core plugin). + +# Milestone 2 - “announceable” + +## Story: User is notified that Gradle version is no longer supported by plugin portal + +## Story: User is notified of use of 'deprecated' plugin + +## Story: Plugin resolution is cached across the entire build + +Don't make the same request to plugins.gradle.org in a single build, reuse implementation classloaders. + +## Story: Plugin resolution is cached between builds + +i.e. responses from plugins.gradle.org are cached to disk (`--offline` support) + +## Story: Build author searches for plugins using central Web UI + +## Story: New plugin mechanism can be used in external scripts to apply plugins to `Project` + +## Story: Plugins are able to depend on other non core plugins + +Plugin dependencies can not be dynamic. +Plugin dependencies can not be cyclic. + +## Story: Make new plugin resolution mechanism public + +Story is predicated on plugins.gradle.org providing a searchable interface for plugins. - Include new DSL in DSL reference. +- Add link to further documentation in relevant error message (at least the compile time validation of plugin {} syntax) - Include types in the public API. +- Add links to user guide to `org.gradle.plugin.use` types Javadoc - Add some material to the user guide discussion about using plugins. - Update website to replace references to the 'plugins' wiki page to instead reference `http://plugins.gradle.org` - Update the 'plugins' wiki page to direct build authors and plugin authors to `http://plugins.gradle.org` instead. - Announce in the release notes. -## Story: Plugin author requests that plugin version be included in the Gradle plugins repository +Note: Plugin authors cannot really contribution to plugins.gradle.org at this point. The content will be “hand curated”. -For now, the set of plugins available via the public plugin repository will be curated by Gradleware, such that some manual action is required to add -a new plugin (version) to the public repository. +## Story: Plugin author submits plugin for inclusion in plugins.gradle.org -TBD - perhaps implement this using the bintray 'contact' UI plus some kind of reference from the Gradle website. +Includes: -Retire the 'plugins' wiki page some point after this. +- Includes generating all necessary metadata (e.g. exported classes, plugin dependencies) +- Tooling support for publishing in manner suitable for inclusion in plugins.gradle.org +- Admin processes for including plugin, including acceptance policies etc. +- Prevention of use of 'org.gradle' and 'com.gradleware' namespaces + +# Milestone 3 - “parkable” + +## Story: Plugin author reasonably tests realistic use of plugin with dependencies + +Plugin authors need to be able to verify that their plugin works with the classloader structure it would have in a real build ## Story: Build author searches for plugins using Gradle command-line Introduce a plugin and implicit task that allows a build author to search for plugins from the central plugin repository, using the Gradle command-line. -## Story: Plugins declare dependencies on other plugins +## Story: User specifies centrally that a plugin should be applied to multiple projects -Should include dependencies on core plugins. +## Story: New plugin mechanism can be used to apply `Gradle` plugin -When two plugins declare a dependency on some other plugin, the same plugin implementation ClassLoader should be used in both cases. Similarly, when -a build script and a plugin declare a dependency on the same plugin, the same implementation ClassLoader should be used in both cases. +## Story: New plugin mechanism can be used to apply `Settings` plugin -## Story: Plugin author publishes plugin to bintray +## Story: All plugins are resolved before any plugin is applied to a target -Add a basic plugin authoring plugin, that adds support for publishing to bintray with the appropriate meta-data. +Before actually applying plugins (potentially expensive), all required plugins should be resolved in the spirit of fail fast. -## Story: Plugin author can test use of plugin +## Story: Plugin declares minimum Gradle version requirement -Authors should be able to test that their plugins are compatible with the new mechanism. +## Story: User specifies non static plugin version constraint (i.e. dynamic plugin dependencies) -- Provide mechanism to simulate plugin application at unit test level (new mechanism has functional differences at application time) -- Provide mechanism to functionally test new plugin metadata (i.e. correctly declared dependencies on other plugins) +## Story: Local script is used to provide implementation of plugin -(note: overlap with [design-docs/testing-user-build-logic.md](https://github.com/gradle/gradle/blob/master/design-docs/testing-user-build-logic.md)) +### Open questions -## Story: Build author searches for plugins using central Web UI +- Is it worth considering a testing mechanism for script plugins at this point? -Introduce a Web UI that allows a build author to search for and view basic details about available Gradle plugins. Backed by the meta-data hosted in the -public Bintray repository. +# Future work -TBD - where hosted, how implemented, tested and deployed +## Story: Pathological comms errors while resolving plugins produce reasonable error messages + +1. Non responsive server (accepts request but never responds) +1. Server responds extremely slowly (data is transferred frequently enough to avoid idle/response timeout, but is really too slow to let continue) +1. Server responds with inaccurate content length (lots of HTTP clients get badly confused by this) +1. Server responds with extremely large document (protect against blowing out memory trying to read the response) + +--- + +Stories below are still to be realigned after recent direction changes. There is some duplication with what is above, that needs to be folded in. In progress. + +## Story: Plugins included in Gradle public repository are smoke tested + +For plugins to be listed in the public repository, there must be some external (i.e. not performed by plugin author) verification that the plugin is not completely broken. +That is, the plugin should be: + +1. Able to be applied via the new plugin mechanism +2. Not produce errors after simply applying + +This will (at least) need to be able to be performed _before_ the plugin is included in the public repository. + +### Open issues + +1. Are existing plugins periodically tested? Or only upon submission (for each new version)? +1. What action is taken if a plugin used to work but no longer does? ## Story: Resolve plugins relative to Gradle distribution @@ -422,15 +644,6 @@ Allow the plugin repositories to use to be declared. Multiple plugin and module repositories can be declared. -## Story: Resolve local script plugins - -Add a resolver that uses some convention to map plugin id to a build script. - - plugins { - // Looks for a script in $rootDir/gradle/plugins/groovy-project.gradle - apply plugin: 'groovy-project' - } - ## Story: Daemon reuses plugin implementation across builds Cache the implementation ClassLoader across builds. More details in the [performance spec](performance.md). @@ -449,6 +662,7 @@ Deprecate the bin Gradle distribution some time after this. These are yet to be mixed into the above plan: +- Credentials and caching for script plugin repositories. - Build-init plugin custom build types by resolving build type name to plugin implementation using plugin repository - Resolve tooling model to provider plugin implementation using plugin repository - Plugin (script) declares public API diff --git a/design-docs/repository-extensibility.md b/design-docs/repository-extensibility.md new file mode 100644 index 000000000000..14f96e8e8d44 --- /dev/null +++ b/design-docs/repository-extensibility.md @@ -0,0 +1,63 @@ +Our current repository implementations are not particularly extensible, composable, or pluggable. In some cases users must resort +to using a custom Ivy DependencyResolver in order to handle a specific repository. This spec attempts to address these shortcomings. + +# Use cases + +- Enable caching for a file repository +- Resolve files from a googlecode project +- Resolve files from a Nuget repository +- Resolve files from a GitHub repository +- Resolve files from a SourceForge project +- Use a custom module version listing protocol, such as the Artifactory API + +# Stories + +## Allow caching for a slow file-backed repository (GRADLE-1964) + +Currently, the only way to enable caching for file-backed repository (eg when backed by a slow NFS share) is to set local attribute to false for a FileSystemResolver. +We need a way to prevent very slow resolution in these cases: + +* Add caching for file-backed repositories. Once per build, check for updates to a given resource and copy into cache. Should use the same +up-to-date mechanism used for incremental build. +* Detect remote file systems and don't check for updates for these file systems when running with `--offline`. + * Use `native-platform` to detect remote file systems and cache these automatically. + +See [GRADLE-1964](http://issues.gradle.org/browse/GRADLE-1964) + +## Detect servers that do not correctly handle HTTP HEAD requests + +Provide an automatic workaround for https://code.google.com/p/support/issues/detail?id=660. + +- Hopefully this could detect bad servers due something unique in the HTTP HEAD response. +- Or we could maintain a list of servers that we know are badly behaved +- Or we could probe by sending both GET and HEAD requests periodically + +One issue with probing is that this behaviour is non-deterministic. If the file has been recently retrieved, then HEAD will not return 404. + +## Resolve artifact files from a GoogleCode repository + +In this story, we add the ability to resolve files from a Google Code repository: + +* 'googlecode' repository type +* version listing via googlecode api +* no meta-data handling (file-only repository) + +See: https://github.com/Ullink/gradle-repositories-plugin/blob/master/src/main/groovy/com/ullink/RepositoriesPlugin.groovy#L122 + +## Resolve artifact files from a Nuget repository + +In this story, we simply add the ability to resolve files in a Nuget repository: + +* 'nuget' repository type +* version listing via Nuget api +* No meta-data handling (file-only repository) + +See: https://github.com/Ullink/gradle-repositories-plugin/blob/master/src/main/groovy/com/ullink/RepositoriesPlugin.groovy#L150 + +## Resolve artifact files from a GitHub repository + +## Allow meta-data file format to be specified for repositories + +This story would permit the metadata file format (ivy/pom/etc) to be specified for a googlecode, nuget or other repository. + + diff --git a/design-docs/repository-transports.md b/design-docs/repository-transports.md index 1aa792df93c7..38018ef819a3 100644 --- a/design-docs/repository-transports.md +++ b/design-docs/repository-transports.md @@ -89,9 +89,9 @@ The plan will be to include this coverage for HTTP transport in a later story. ### Open issues -- Add an `AbstractMultiTestRunner` implementation to permit a test to be run with different repository tranports. +- Add an `AbstractMultiTestRunner` implementation to permit a test to be run with different repository transports. -## Support a maven repository declared with 'sftp' as the URL scheme, using password credentials +## Support resolving from a maven repository declared with 'sftp' as the URL scheme, using password credentials ### User visible changes @@ -112,13 +112,25 @@ Configuring a repository for sftp transport: In many cases, this may be a matter of adapting existing test coverage to run against multiple transports. - Resolve via 'sftp' from maven repository. -- Publish via 'sftp' to maven repository (with maven-publish) - Resolve dynamic version from maven repository with 'sftp' - Reasonable error message produced when: - attempt to resolve missing module with valid credentials - - publish or resolve with invalid credentials - - publish or resolve where cannot connect to server - - publish or resolve where server throws exception + - resolve with invalid credentials + - resolve where cannot connect to server + - resolve where server throws exception + +## Support publishing to a maven repository declared with 'sftp' as the URL scheme, using password credentials + +### Test cases + +In many cases, this may be a matter of adapting existing test coverage to run against multiple transports. + +- Un `@Ignore` `MavenPublishSftpIntegrationTest`. +- Publish via 'sftp' to maven repository (old and new plugins) +- Reasonable error message produced when: + - publish with invalid credentials + - publish where cannot connect to server + - publish where server throws exception ## Run more remote publish and resolve integration tests against an sftp repository @@ -130,6 +142,10 @@ Adapt more existing test coverage to execute against an 'sftp' repository: - `org.gradle.api.publish.ivy.IvyPublishMultipleRepositoriesIntegTest` - `org.gradle.api.publish.maven.MavenPublishHttpIntegTest` +## All repository transports support using `sha1` resources to avoid downloads + +Currently only the HTTP transports support using for a `.sha1` resource. + ## Support 'scp' scheme for ivy and maven repository URL ## Use public key authentication when accessing sftp/scp/https repository @@ -158,3 +174,4 @@ use a remote resource that requires authentication, and where no credentials hav ## Prompt IDE user for credentials when not provided Allow tooling API clients to provide a credentials provider. This will allow IDE integrations to prompt the user for and manage their credentials. + diff --git a/design-docs/task-configuration-from-command-line.md b/design-docs/task-configuration-from-command-line.md index bb5eda6c6212..983a0427bf61 100644 --- a/design-docs/task-configuration-from-command-line.md +++ b/design-docs/task-configuration-from-command-line.md @@ -75,106 +75,22 @@ TBD * Annotation is missing 'options' value. * Annotation is missing 'description' value. -## Help task shows basic details about a task - -Add some command line interface for discovering details about a task (name, type, path, description) - -### User visible changes - -Running `gradle help --task test` shows a usage message for the `test` task. - -If multiple tasks match, details of the matching tasks are shown - -* all matched tasks have the same type - * print one report of the task type and include all matching paths in the report - -* matched tasks have different types - * print one detail output for each different task type including all available paths - -### Test coverage - -* integration tests - * `gradle help --task` on simple task - * `gradle help --task` on task referenced by full path (e.g. `:someProj:dependencies`) - * `gradle help --task` on implicit task task - * `gradle help --task` on task defined via placeholder - * `gradle help --task` on non existing task displays reasonable error message, including candidate matches - * `gradle help --task` on multiple matching tasks - * `gradle help --task` using camel-case matching to select task - -### Implementation approach - -- Change the `help` task: - - add `--task` commandline property - - change displayHelp implementation to print task details when `--task` is set - - lookup project tasks and implicit tasks using the task selector - - throw decent error message when requested task cannot be found - - task details (task name, task type, path) - - the default message informs the user about using `gradle help --task n` - -- Update the 'using Gradle from the command-line' user guide chapter to mention the help task. - -## Help task shows command-line options for a task - -Commandline options of the task passed to help are listed including a description. The legal values for each property are not shown - this -is added in a later story. +## Add task validator for task options ### User visible changes -The usage message of running `gradle help --task ` lists commandline options of the selected tasks. - -### Test coverage - -* integration tests - * `gradle help` on task with no commandline properties - * `gradle help` on task with commandline properties - * `gradle help` on implicit task no commandline properties - * `gradle help` on implicit task with no commandline properties - * `gradle help --tassk help` (should print hint to `gradle help --task help`) +When task options that have unsupported option values, will throw an Exception pointing to the wrong assigned option value and hints +what values are supported. ### Implementation approach -- Change configuration error message in `CommandLineTaskConfigurer` to suggest that the user run `gradle help --task `. -- Update the 'using Gradle from the command-line' user guide chapter. - -## Help task shows legal values for each command-line option - -### User visible changes - -The usage message of running `gradle help --task init` includes the available values for the task command line options (e.g --type) - -### Test coverage +- Add a task validator that validates a string property has a legal value at execution time. -* integration tests - * `gradle help` on task with enum property type mapped to commandline option - * `gradle help` on task with boolean property type mapped to commandline option - * `gradle help` on task with String property mapped to commandline option - * `gradle help --task init` shows all available init types +### Test cases - A reasonable error message is provided when user specified an illegal value for an enum property from the command-line. - A reasonable error message is provided when user specified an illegal value for an string property from the command-line. -### Implementation approach - -- Introduce marker annotation `Option("optionName")` to mark a task property mapped to a commandline option. -- `@Option` with not provided "optionName" is mapped to option with same name as the annotated field -- `@Option("optionName")` annotated on Enums includes enum values as possible option values -- `@Option("optionName")` annotated on boolean includes true/false as possible option values -- `@Option("optionName")` annotated on a setter method evaluates the available options from the parameter type) -- Introduce marker annotation `OptionValues("optionName")` to to allow a dynamic value lookup in the task implementation itself. -- Adapt InitBuild task to use `@OptionValues` to map values for the `--type` command line option. -- Update the 'using Gradle from the command-line' user guide chapter. - -## Add task validator for task options - -### User visible changes - -When task options that have unsupported option values, will throw an Exception pointing to the wrong assigned option value and hints -what values are supported. - -### Implementation approach -- Add a task validator that validates a string property has a legal value at execution time. - ## Support camel-case matching for task commandline property values ### Test coverage @@ -220,10 +136,12 @@ TBD # Open issues -1. Figure out what to do if multiple tasks of different types are selected *and* there are clashing command line options. +- Figure out what to do if multiple tasks of different types are selected *and* there are clashing command line options. For example, 'foo' option that requires a string value in one task type but is a boolean flag in some other task type. This is not a blocker because we have very little command line options, yet. -1. Decide on precedence order if task is configured from the command line and in the build script. Add coverage, etc. -1. If a method marked with `@Option` accepts varargs or a Collection type as parameter, allow the command-line option to be specified multiple - time on the command-line. -1. Output of `gradle help --task x` provides link to task documentation. +- Decide on precedence order if task is configured from the command line and in the build script. Add coverage, etc. +- If a method marked with `@Option` accepts varargs or a Collection type as parameter, allow the command-line option to be specified multiple + time on the command-line. +- Output of `gradle help --task x` provides link to task documentation. +- Remove the 'chrome' from the output of `gradle help` and other command-line tasks. +- Remove the `implicitTasks` container from Project. diff --git a/design-docs/tooling-api-improvements.md b/design-docs/tooling-api-improvements.md index ff4abc024b75..f7e4f79f0b99 100644 --- a/design-docs/tooling-api-improvements.md +++ b/design-docs/tooling-api-improvements.md @@ -42,30 +42,6 @@ to use this same mechanism is one step in this direction. # Implementation plan -## Story: Expose the build script of a project - -This story exposes via the tooling API some basic information about the build script of a project. - -1. Add a `GradleScript` type with the following properties: - 1. A `file` property with type `File`. -2. Add a `buildScript` property to `GradleProject` with type `GradleScript`. -3. Include an `@since` javadoc tag and an `@Incubating` annotation on the new types and methods. -4. Change `GradleProjectBuilder` to populate the model. - -An example usage: - - GradleProject project = connection.getModel(GradleProject.class); - System.out.println("project " + project.getPath() + " uses script " + project.getBuildScript().getFile()); - -### Test coverage - -- Add a new `ToolingApiSpecification` integration test class that covers: - - A project with standard build script location - - A project with customized build script location -- Verify that a decent error message is received when using a Gradle version that does not expose the build scripts. - - Request `GradleProject` directly. - - Using `GradleProject` via an `EclipseProject` or `IdeaModule`. - ## Story: Expose the compile details of a build script This story exposes some information about how a build script will be compiled. This information can be used by an @@ -74,9 +50,12 @@ IDE to provide some basic content assistance for a build script. 1. Introduce a new hierarchy to represent a classpath element. Retrofit the IDEA and Eclipse models to use this. - Should expose a set of files, a set of source archives and a set of API docs. 2. Add `compileClasspath` property to `GradleScript` to expose the build script classpath. -3. Include the Gradle API and core plugins in the script classpath. +3. Script classpath includes the Gradle API and core plugins - Should include the source and Javadoc -4. Add a `groovyVersion` property to `GradleScript` to expose the Groovy version that is used. +4. Script classpath includes the libraries declared in the `buildscript { }` block. +5. Script classpath includes the plugins declared in the `plugins { }` block. +6. Script classpath includes the libraries inherited from parent project. +7. Add a `groovyVersion` property to `GradleScript` to expose the Groovy version that is used. ### Open issues @@ -93,44 +72,6 @@ IDE to provide some basic content assistance for a build script. - Source and Javadoc artifacts for the above are included in the classpath. - Verify that a decent error message is received when using a Gradle version that does not expose the build script classpath. -## Story: Expose the publications of a project - -This story allows an IDE to map dependencies between different Gradle builds and and between Gradle and non-Gradle builds. -For incoming dependencies, the Gradle coordinates of a given library are already exposed through `ExternalDependency`. This -story exposes the outgoing publications of a Gradle project. - -1. Add a `GradlePublication` type with the following properties: - 1. An `id` property with type `GradleModuleVersion`. -2. Add a `publications` property to `GradleProject` with type `DomainObjectSet`. -3. Include an `@since` javadoc tag and an `@Incubating` annotation on the new types and methods. -4. Introduce a project-scoped internal service which provides some detail about the publications of a project. - This service will also be used during dependency resolution. See [dependency-management.md](dependency-management.md#story-dependency-resolution-result-exposes-local-component-instances-that-are-not-module-versions) - 1. The `publishing` plugin registers each publication defined in the `publishing.publications` container. - For an instance of type `IvyPublicationInternal`, use the publication's `identity` property to determine the publication identifier to use. - For an instance of type `MavenPublicationInternal`, use the publication's `mavenProjectIdentity` property. - 2. For each `MavenResolver` defined for the `uploadArchives` task, register a publication. Use the resolver's `pom` property to determine the - publication identifier to use. Will need to deal with duplicate values. - 3. When the `uploadArchives` task has any other type of repository defined, then register a publication that uses the `uploadArchives.configuration.module` - property to determine the publication identifier to use. -5. Change `GradleProjectBuilder` to use this service to populate the tooling model. - -An example usage: - - GradleProject project = connection.getModel(GradleProject.class); - for (GradlePublication publication: project.getPublications()) { - System.out.println("project " + project.getPath() + " produces " + publication.getId()); - } - -### Test coverage - -- Add a new `ToolingApiSpecification` integration test class that covers: - - For a project that does not configure `uploadArchives` or use the publishing plugins, verify that the tooling model does not include any publications. - - A project that uses the `ivy-publish` plugin and defines a single Ivy publication. - - A project that uses the `maven-publish` plugin and defines a single Maven publication. - - A project that uses the `maven` plugin and defines a single remote `mavenDeployer` repository on the `uploadArchives` task. - - A project that defines a single Ivy repository on the `uploadArchives` task. -- Verify that a decent error message is received when using a Gradle version that does not expose the publications. - ## Story: Gradle plugin provides a custom tooling model to the tooling API client This story allows a custom plugin to expose a tooling model to any tooling API client that shares compatible model classes. @@ -248,28 +189,6 @@ Note: there is a breaking change here. - Requests a model from an unknown project. - Requests an unknown model. -## Story: Tooling API client requests build model for old Gradle version (DONE) - -This story adds support for the `GradleBuild` model for older target Gradle versions. - -### Implementation - -Change the implementations of `ConsumerConnection.run(type, parameters)` so that when asked for a `GradleBuild` model, they instead -request the `GradleProject` model and then convert it to a `DefaultGradleBuild` instance. See `ConnectionVersion4BackedConsumerConnection.doGetModel()` -for an example of this kind of thing. - -For the `ModelBuilderBackedConsumerConnection` implementation, if the provider Gradle version supports the `GradleBuild` model (is >= 1.8-rc-1) then -forward to the provider, as it does now. - -To implement this cleanly, one option might be to introduce some chain of model producers into the `ConsumerConnection` subclasses, so that each producer is -asked in turn whether it can produce the requested model. The last producer can delegate to the provider connection. Stop at the first producer that can -produce the model. - -### Test cases - -- For all Gradle versions, can request the `GradleBuild` model via `ProjectConnection`. This basically means removing the `@TargetGradleVersion` from - the test case in `GradleBuildModelCrossVersionSpec`. - ## Story: Gradle provider builds build model efficiently When the `GradleBuild` model is requested, execute only the settings script, and don't configure any of the projects. @@ -292,88 +211,70 @@ This story adds support for conditionally requesting a model, if it is available Fix the `ClassLoader` caching in the tooling API so that it can deal with changing implementations. -## Story: GRADLE-2434 - Expose the aggregate tasks for a project +## Story: Tooling API client launches a build using task selectors from different projects -This story allows an IDE to implement a way to select the tasks to execute based on their name, similar to the Gradle command-line. - -1. Add an `EntryPoint` model interface, which represents some arbitrary entry point to the build. -2. Add a `TaskSelector` model interface, which represents an entry point that uses a task name to select the tasks to execute. -3. Change `GradleTask` to extend `EntryPoint`, so that each task can be used as an entry point. -4. Add a method to `GradleProject` to expose the task selectors for the project. - - For new target Gradle versions, delegate to the provider. - - For older target Gradle versions, use a client-side mix-in that assembles the task selectors using the information available in `GradleProject`. -5. Add methods to `BuildLauncher` to allow a sequence of entry points to be used to specify what the build should execute. -6. Add `@since` and `@Incubating` to the new types and methods. - -Here are the above types: - - interface EntryPoint { - } +TBD - interface TaskSelector extends EntryPoint { - String getName(); // A display name - } +### Test cases - interface GradleTask extends EntryPoint { - ... - } +- Can execute task selectors from multiple projects, for all target Gradle versions +- Can execute overlapping task selectors. - interface GradleProject { - DomainObjectSet getTaskSelectors(); - ... - } +## Story: Tooling API exposes project's implicit tasks as launchable - interface BuildLauncher { - BuildLauncher forTasks(Iterable tasks); - BuildLauncher forTasks(EntryPoint... tasks); - ... - } +Change the building of the `BuildInvocations` model so that: -TBD - maybe don't change `forTasks()` but instead add an `execute(Iterable tasks)` method. +- `getTasks()` includes the implicit tasks of the project. +- `getTaskSelectors()` includes the implicit tasks of the project and all its subprojects. ### Test cases -- Can request the entry points for a given project hierarchy - - Task is present in some subprojects but not the target project - - Task is present in target project but no subprojects - - Task is present in target project and some subprojects -- Executing a task selector when task is also present in subprojects runs all the matching tasks, for the above cases. -- Executing a task (as an `EntryPoint`) when task is also present in subprojects run the specified task only and nothing from subprojects. -- Can request the entry points for all target Gradle versions. +- `BuildInvocations.getTasks()` includes `help` and other implicit tasks. + - Launching a build using one of these task instances runs the appropriate task. +- `BuildInvocations.getTaskSelectors()` includes the `help` and other implicit tasks. + - Launching a build using the `dependencies` selector runs the task in the default project only (this is the behaviour on the command-line). +- A project defines a task placeholder. This should be visible in the `BuildInvocations` model for the project and for the parent of the project. + - Launching a build using the selector runs the task. ## Story: Expose information about the visibility of a task This story allows the IDE to hide those tasks that are part of the implementation details of a build. -## Story: Deprecate support for Tooling API clients earlier than Gradle 1.2 (DONE) +- Add a `visibility` property to `Launchable`. +- A task is considered `public` when it has a non-empty `group` property, otherwise it is considered `private`. +- A task selector is considered `public` when any task it selects is `public`, otherwise it is considered `private`. -When any of the following methods are called on the provider connection treat the client version as deprecated: - -- `ConnectionVersion4.getModel()` and `executeBuild()`. -- `InternalConnection.getTheModel()`. -- `configureLogging(boolean)`. +### Test cases -Whenever an operation is invoked on the provider connection by a deprecated client version, the connection implementation should report to -the user that the client version is deprecated and support for it will be removed in Gradle 2.0. -The logging output should be received through the stream attached to `LongRunningOperation.setStandardOutput()`. +- A project defines a public and private task. + - The `BuildInvocations` model for the project includes task instances with the correct visibility. + - The `BuildInvocations` model for the project includes task selectors with the correct visibility. + - The `BuildInvocations` model for the parent project includes task selectors with the correct visibility. -### Test cases +## Story: Allow options to be specified for tasks -- Running a build generates a warning when using a client < 1.2, and does not generate a warning when using a client >= 1.2. -- Fetching a model generates a warning when using a client < 1.2, and does not generate a warning when using a client >= 1.2. +For example, allow something similar to `gradle test --tests SomePattern` -## Story: Deprecate support for Gradle versions earlier than Gradle 1.0-milestone-8 (DONE) +## Story: Tooling API build action requests a tooling model for a Gradle build -When the provider connection does not implement `InternalConnection` then treat the provider version as deprecated. +This story adds support to build models that have a scope of a whole Gradle build (not just a project) -Whenever an operation is invoked on a deprecated provider version, the client implementation should report to the user that the provider -version is deprecated and support for it will be removed in Gradle 2.0. -The logging output should be received through the stream attached to `LongRunningOperation.setStandardOutput()`. +1. Add a new `GradleBuildToolingModelBuilder` similar to `ToolingModelBuilder`. Possibly an abstract class since this is an SPI. +2. Extend `ToolingModelBuilderRegistry` to allow registration of such a builder. +3. Change the way how models are queried from `ProjectConnection` to use this new builders (there is no project context passed). + The only special case is the EclipseModel, which is actually built from the default project instead of the root project, so we'd need a specific implementation for that. +4. Extend `BuildController.getModel()` to support `GradleBuild` as model parameter or add `BuildController.getBuildModel(Class)`. + Those would be using gradle model builder rather than project model builders ### Test cases -- Running a build generates a warning when using a provider version < 1.0-milestone-8, and does not generate a warning when using a provider version >= 1.0-milestone-8. -- Fetching a model generates a warning when using a provider version < 1.0-milestone-8, and does not generate a warning when using a provider version >= 1.0-milestone-8. +- Can register new model builder and + - query it from client via `ProjectConnection`. + - query it from client via build action. +- Can request a model via build action: + - And get result from `GradleBuildToolingModelBuilder` for gradle build scope if passing `GradleBuild` as target. + - And get result from `ToolingModelBuilder` for project scope if passing one of parameters describing project. +- Client receives decent feedback when requests an unknown model. ## Story: Tooling API client cancels a long running operation @@ -386,20 +287,21 @@ Represent the execution of a long running operation using a `Future`. This `Futu } interface ModelBuilder { - BuildInvocation fetch(); // starts building the model, does not block + BuildFuture fetch(); // starts building the model, does not block ... } interface BuildLauncher { - BuildInvocation start(); // starts running the build, does not block + BuildFuture start(); // starts running the build, does not block ... } interface BuildActionExecuter { - BuildInvocation start(); // starts running the build, does not block + BuildFuture start(); // starts running the build, does not block ... } + // TBD - fetch() should be called start() as well? BuildFuture model = connection.model(GradleProject.class).fetch(); model.cancel(true); @@ -416,7 +318,7 @@ to the build: Use futures to represent the existing asynchronous behaviour: -1. Change internal class `BlockingResultHandler` to implement `BuildInvocation` and reuse this type to implement the futures. +1. Change internal class `BlockingResultHandler` to implement `BuildFuture` and reuse this type to implement the futures. 2. Implementation should discard handlers once they have been notified, so they can be garbage collected. Push asynchronous execution down to the provider: @@ -444,10 +346,12 @@ For target versions that do not support cancellation, `Future.cancel()` always r - Client receives failure when using `get()` and operation fails - Client receives timeout exception when blocking with timeout - Client can cancel operation - - Stops the operation for all target versions that support cancellation - - Returns `false` for all older target versions -- Client is notified when result is available -- Client is notified when operation fails + - Stops the operation for all target versions that support cancellation. Does not block. + - Returns `false` for all older target versions. + - Client listener added to future is notified that operation failed due to cancellation. + - When a thread is blocked on `get()`, a call to `cancel()` will unblock the thread and the call to `get()` will fail with an exception. +- Client listener added to future is notified when result is available. +- Client listener added to future is notified when operation fails. ## Story: Expose the IDE output directories @@ -492,24 +396,5 @@ Need to allow a debug port to be specified, as hard-coded port 5005 can conflict * Replace `LongRunningOperation.standardOutput` and `standardError` with overloads that take a `Writer`, and (later) deprecate the `OutputStream` variants. * Handle cancellation during the Gradle distribution download. * Daemon cleanly stops the build when cancellation is requested. - -## Story: Tooling API build action requests a tooling model for a Gradle build - -This story adds support to build models that have a scope of a whole Gradle build (not just a project) - -1. Add a new `GradleBuildToolingModelBuilder` similar to `ToolingModelBuilder`. Possibly an abstract class since this is an SPI. -2. Extend `ToolingModelBuilderRegistry` to allow registration of such a builder. -3. Change the way how models are queried from `ProjectConnection` to use this new builders (there is no project context passed). - The only special case is the EclipseModel, which is actually built from the default project instead of the root project, so we'd need a specific implementation for that. -4. Extend `BuildController.getModel()` to support `GradleBuild` as model parameter or add `BuildController.getBuildModel(Class)`. - Those would be using gradle model builder rather than project model builders - -### Test cases - -- Can register new model builder and - - query it from client via `ProjectConnection`. - - query it from client via build action. -- Can request a model via build action: - - And get result from `GradleBuildToolingModelBuilder` for gradle build scope if passing `GradleBuild` as target. - - And get result from `ToolingModelBuilder` for project scope if passing one of parameters describing project. -- Client receives decent feedback when requests an unknown model. +* Test fixtures should stop daemons at end of test when custom user home dir is used. +* Introduce a `GradleExecutor` implementation backed by the tooling API. diff --git a/design-docs/unified-configuration-and-task-model.md b/design-docs/unified-configuration-and-task-model.md index 99dfdb102470..53c1fdabff1d 100644 --- a/design-docs/unified-configuration-and-task-model.md +++ b/design-docs/unified-configuration-and-task-model.md @@ -319,6 +319,7 @@ provided by the build logic, and are visible to build logic with uses the tasks. - Integration with old DSL. - Enforcement of model element lifecycle (eg detect attempts to mutate a model element that has been closed). - Configure model elements on demand. +- Native plugins: allow configuration of a default toolchain, without replacing the default toolchains. e.g. tweak `clang` without discarding `gcc`. - Error handling and reporting. - IDE integration. - Flesh out the JVM language plugins. @@ -459,6 +460,7 @@ Reuse the domain object lifecycle mechanism to warn when: # Open issues - Fire events before and after configuration of domain objects. +- Allow task outputs to be used as inputs to rules. eg a `Report` produced by a task. # Spike diff --git a/subprojects/core/src/main/groovy/org/gradle/plugin/internal/NonPluggableTargetPluginHandler.java b/gradle/analytics.gradle similarity index 54% rename from subprojects/core/src/main/groovy/org/gradle/plugin/internal/NonPluggableTargetPluginHandler.java rename to gradle/analytics.gradle index 068023fdbb75..2b965182d416 100644 --- a/subprojects/core/src/main/groovy/org/gradle/plugin/internal/NonPluggableTargetPluginHandler.java +++ b/gradle/analytics.gradle @@ -14,25 +14,21 @@ * limitations under the License. */ -package org.gradle.plugin.internal; - -import org.gradle.plugin.PluginHandler; - -import java.util.Map; - -public class NonPluggableTargetPluginHandler implements PluginHandler { - - private final Object target; - - public NonPluggableTargetPluginHandler(Object target) { - this.target = target; +buildscript { + repositories { + maven { url 'http://repo.gradle.org/gradle/libs' } + ivy { url 'http://repo.gradle.org/gradle/gradleware' } } - public void apply(Map attributes) { - throw fail(); + dependencies { + classpath group: 'com.gradleware', name: 'analytics-plugin', version: '+' } +} - private RuntimeException fail() { - return new UnsupportedOperationException("Script target " + target + " cannot have plugins applied to it"); - } +//apply plugin: 'gradleware-analytics' +// apparently it is a known issue that you can't use the plugin id here +apply plugin: com.gradleware.monitoring.plugin.AnalyticsPlugin +analytics { + serverUrl = "http://discovery-prod.herokuapp.com" + key = "1234567890" } diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index e8ff4465702c..8000b35f8ae8 100755 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -25,13 +25,14 @@ libraries.ant = dependencies.module('org.apache.ant:ant:1.9.3') { dependency 'org.apache.ant:ant-launcher:1.9.3@jar' } -libraries.asm = 'org.ow2.asm:asm-all:5.0_BETA@jar' +libraries.asm = 'org.ow2.asm:asm-all:5.0.2' libraries.commons_cli = 'commons-cli:commons-cli:1.2@jar' libraries.commons_io = dependencies.module(versions.commons_io) libraries.commons_lang = 'commons-lang:commons-lang:2.6@jar' libraries.commons_collections = 'commons-collections:commons-collections:3.2.1@jar' +libraries.jsch = "com.jcraft:jsch:0.1.51" libraries.ivy = dependencies.module('org.apache.ivy:ivy:2.2.0'){ - dependency "com.jcraft:jsch:0.1.51" + dependency libraries.jsch } libraries.jcip = "net.jcip:jcip-annotations:1.0@jar" libraries.inject = dependencies.module('javax.inject:javax.inject:1') @@ -79,9 +80,9 @@ libraries += [ ant_antlr: 'org.apache.ant:ant-antlr:1.9.3@jar', antlr: 'antlr:antlr:2.7.7@jar', dom4j: 'dom4j:dom4j:1.6.1@jar', - guava: 'com.google.guava:guava-jdk5:14.0.1@jar', + guava: 'com.google.guava:guava-jdk5:17.0@jar', jsr305: 'com.google.code.findbugs:jsr305:1.3.9@jar', - groovy: 'org.codehaus.groovy:groovy-all:1.8.6@jar', + groovy: 'org.codehaus.groovy:groovy-all:2.2.2', jaxen: 'jaxen:jaxen:1.1@jar', jcip: "net.jcip:jcip-annotations:1.0@jar", jna: 'net.java.dev.jna:jna:3.2.7@jar', @@ -135,7 +136,7 @@ libraries.maven3 = dependencies.module("org.apache.maven:maven-core:3.0.4") { } libraries.spock = [ - 'org.spockframework:spock-core:0.7-groovy-1.8@jar', + 'org.spockframework:spock-core:0.7-groovy-2.0@jar', libraries.groovy, libraries.objenesis, 'cglib:cglib-nodep:2.2.2' @@ -149,4 +150,17 @@ libraries.jmock = [ libraries.objenesis, 'cglib:cglib-nodep:2.2' ] -libraries.gson = "com.google.code.gson:gson:2.2.4" \ No newline at end of file +libraries.gson = "com.google.code.gson:gson:2.2.4" +libraries.sshd = "org.apache.sshd:sshd-core:0.10.0" + +allprojects { + configurations.all { + resolutionStrategy.eachDependency { details -> + if (details.requested.group == 'org.ow2.asm') { + details.useTarget(libraries.asm) + } else if (details.requested.group == 'org.codehaus.groovy') { + details.useTarget(libraries.groovy) + } + } + } +} \ No newline at end of file diff --git a/gradle/idea.gradle b/gradle/idea.gradle index dbfd2a6ce097..68e2833db491 100644 --- a/gradle/idea.gradle +++ b/gradle/idea.gradle @@ -81,15 +81,15 @@ idea { copyrightManager.@default = "ASL2" def aslCopyright = copyrightManager.copyright.find { it.option.find { it.@name == "myName" }?.@value == "ASL2" } if (aslCopyright == null) { - copyrightManager.append(new XmlParser().parseText(''' + copyrightManager.append(new XmlParser().parseText(""" - - ''')) + """)) } // Code formatting options @@ -202,6 +202,7 @@ idea { +