Skip to content

Commit

Permalink
Merge pull request #315 from aloubyansky/domino-quarkus-extra-artifacts
Browse files Browse the repository at this point in the history
Add quarkus-junit5 and quarkus-junit5-mockito as extra artifacts to process as part of the quarkus command
  • Loading branch information
aloubyansky authored Jan 9, 2024
2 parents 1084f1b + fe0c4d1 commit db698e9
Show file tree
Hide file tree
Showing 6 changed files with 90 additions and 52 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver;
import io.quarkus.bootstrap.resolver.maven.workspace.LocalProject;
import io.quarkus.devtools.messagewriter.MessageWriter;
import io.quarkus.domino.scm.ScmRevision;
import io.quarkus.domino.scm.ScmRevisionResolver;
import io.quarkus.maven.dependency.ArtifactCoords;
import java.nio.file.Path;
Expand All @@ -18,14 +19,14 @@
import java.util.ServiceLoader;
import org.apache.maven.model.DistributionManagement;
import org.apache.maven.model.Model;
import org.apache.maven.model.resolution.UnresolvableModelException;
import org.apache.maven.project.ProjectBuildingRequest;
import org.apache.maven.project.ProjectModelResolver;
import org.eclipse.aether.RequestTrace;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.graph.Dependency;
import org.eclipse.aether.resolution.ArtifactDescriptorResult;
import org.eclipse.aether.resolution.ArtifactResult;

public class BomDecomposer {

Expand Down Expand Up @@ -76,15 +77,16 @@ public BomDecomposerConfig bomFile(Path bom) {

final BootstrapMavenContext mvnCtx = underlyingResolver.getMavenContext();
final LocalProject bomProject = mvnCtx.getCurrentProject();
bomArtifact = new DefaultArtifact(bomProject.getGroupId(), bomProject.getArtifactId(), "", "pom",
bomProject.getVersion());
bomArtifact = new DefaultArtifact(bomProject.getGroupId(), bomProject.getArtifactId(),
ArtifactCoords.DEFAULT_CLASSIFIER, ArtifactCoords.TYPE_POM, bomProject.getVersion());
bomArtifact = bomArtifact.setFile(bom.toFile());
bomSource = PomSource.of(bom);
return this;
}

public BomDecomposerConfig bomArtifact(String groupId, String artifactId, String version) {
return bomArtifact(new DefaultArtifact(groupId, artifactId, "", "pom", version));
return bomArtifact(new DefaultArtifact(groupId, artifactId, ArtifactCoords.DEFAULT_CLASSIFIER,
ArtifactCoords.TYPE_POM, version));
}

public BomDecomposerConfig bomArtifact(Artifact artifact) {
Expand Down Expand Up @@ -163,57 +165,67 @@ private DecomposedBom decompose() throws BomDecomposerException {
//bomBuilder.bomSource(PomSource.of(resolve(bomArtifact).getFile().toPath()));
final Iterable<Dependency> artifacts = this.artifacts == null ? bomManagedDeps() : this.artifacts;
for (Dependency dep : artifacts) {
try {
// filter out dependencies that can't be resolved
// if an artifact has a classifier we resolve the artifact itself
// if an artifact does not have a classifier we will try resolving its pom
final Artifact artifact = dep.getArtifact();
final String classifier = artifact.getClassifier();
if (!classifier.isEmpty() &&
!classifier.equals("sources") &&
!classifier.equals("javadoc")) {
addDependency(bomBuilder, dep);
}
return transformer == null ? bomBuilder.build() : transformer.transform(bomBuilder.build());
}

private void addDependency(DecomposedBomBuilder bomBuilder, Dependency dep) throws BomDecomposerException {
try {
// filter out dependencies that can't be resolved
// if an artifact has a classifier we resolve the artifact itself
// if an artifact does not have a classifier we will try resolving its pom
validateArtifact(dep.getArtifact());
ScmRevision revision = resolveRevision(dep.getArtifact());
bomBuilder.bomDependency(revision, dep);
} catch (ArtifactNotFoundException e) {
// there are plenty of BOMs that include artifacts that don't exist
logger().debug("Failed to resolve %s", dep);
}
}

private ScmRevision resolveRevision(Artifact artifact) throws BomDecomposerException {
return revisionResolver.resolveRevision(artifact, List.of());
}

private void validateArtifact(Artifact artifact) throws BomDecomposerException {
final String classifier = artifact.getClassifier();
if (!classifier.isEmpty() &&
!classifier.equals("sources") &&
!classifier.equals("javadoc")) {
resolve(artifact);
} else if (ArtifactCoords.TYPE_JAR.equals(artifact.getExtension())) {
final Model model = revisionResolver.readPom(artifact);
if (ArtifactCoords.TYPE_POM.equals(model.getPackaging())) {
// if an artifact has type JAR but the packaging is POM then check whether the artifact is resolvable
try {
resolve(artifact);
} else if (ArtifactCoords.TYPE_JAR.equals(artifact.getExtension())) {
final Model model = revisionResolver.readPom(artifact);
if (ArtifactCoords.TYPE_POM.equals(model.getPackaging())) {
// if an artifact has type JAR but the packaging is POM then check whether the artifact is resolvable
try {
resolve(artifact);
} catch (BomDecomposerException | ArtifactNotFoundException e) {
final DistributionManagement distr = model.getDistributionManagement();
if (distr == null || distr.getRelocation() == null) {
// there is no relocation, so it can be removed
throw e;
}
logger().debug("Found relocation for %s", artifact);
}
} catch (ArtifactNotFoundException e) {
final DistributionManagement distr = model.getDistributionManagement();
if (distr == null || distr.getRelocation() == null) {
// there is no relocation, so it can be removed
throw e;
}
logger().debug("Found relocation for %s", artifact);
}
bomBuilder.bomDependency(revisionResolver.resolveRevision(artifact, List.of()), dep);
} catch (BomDecomposerException e) {
throw e;
} catch (ArtifactNotFoundException | UnresolvableModelException e) {
// there are plenty of BOMs that include artifacts that don't exist
logger().debug("Failed to resolve %s", dep);
}
}
return transformer == null ? bomBuilder.build() : transformer.transform(bomBuilder.build());
}

private Iterable<Dependency> bomManagedDeps() throws BomDecomposerException {
private Iterable<Dependency> bomManagedDeps() {
return describe(bomArtifact).getManagedDependencies();
}

public MessageWriter logger() {
return logger == null ? logger = MessageWriter.debug() : logger;
}

private ArtifactDescriptorResult describe(Artifact artifact) throws BomDecomposerException {
private ArtifactDescriptorResult describe(Artifact artifact) {
return artifactResolver().describe(artifact);
}

private Artifact resolve(Artifact artifact) throws BomDecomposerException {
return artifactResolver().resolve(artifact).getArtifact();
private ArtifactResult resolve(Artifact artifact) {
return artifactResolver().resolve(artifact);
}

private ProjectModelResolver getModelResolver() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ public interface DecomposedBomBuilder {

void bomArtifact(Artifact bomArtifact);

void bomDependency(ScmRevision releaseId, Dependency dep) throws BomDecomposerException;
void bomDependency(ScmRevision revision, Dependency dep) throws BomDecomposerException;

DecomposedBom build() throws BomDecomposerException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,16 @@

import io.quarkus.bom.PomResolver;
import io.quarkus.domino.scm.ScmRevision;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.graph.Dependency;

public class DefaultDecomposedBomBuilder implements DecomposedBomBuilder {

private PomResolver bomSource;
private Artifact bomArtifact;
private final Map<ScmRevision, ProjectRelease.Builder> releases = new HashMap<>();
private final Map<ScmRevision, ProjectRelease.Builder> releases = new ConcurrentHashMap<>();

@Override
public void bomSource(PomResolver bomSource) {
Expand All @@ -24,8 +24,8 @@ public void bomArtifact(Artifact bomArtifact) {
}

@Override
public void bomDependency(ScmRevision releaseId, Dependency artifact) throws BomDecomposerException {
releases.computeIfAbsent(releaseId, ProjectRelease::builder).add(artifact);
public void bomDependency(ScmRevision revision, Dependency artifact) {
releases.computeIfAbsent(revision, ProjectRelease::builder).add(artifact);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,13 @@
import java.nio.file.Paths;
import java.time.Duration;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.maven.model.Model;
import org.apache.maven.model.resolution.UnresolvableModelException;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.repository.RemoteRepository;

Expand Down Expand Up @@ -68,7 +66,7 @@ public ScmRevisionResolver(ArtifactResolver resolver, Collection<ReleaseIdDetect
}

public ScmRevision resolveRevision(Artifact artifact, List<RemoteRepository> repos)
throws BomDecomposerException, UnresolvableModelException {
throws BomDecomposerException {
var gav = new GAV(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion());
var revision = cache.get(gav);
if (revision == null) {
Expand Down Expand Up @@ -221,16 +219,18 @@ public Model readPom(Artifact artifact, List<RemoteRepository> repos) throws Bom
}

private static class ScmRevisionCache {
private final Map<GAV, ScmRevision> gavRevisions = new WeakHashMap<>();
private final Map<String, ScmRevision> groupIdRevisions = new HashMap<>();
private final Map<GAV, ScmRevision> gavRevisions = new ConcurrentHashMap<>();
private final Map<String, ScmRevision> groupIdRevisions = new ConcurrentHashMap<>();

ScmRevision get(GAV gav) {
return gavRevisions.get(gav);
}

void put(GAV gav, ScmRevision revision) {
gavRevisions.put(gav, revision);
groupIdRevisions.put(gav.getGroupId(), revision);
if (revision.getRepository().isUrl()) {
groupIdRevisions.put(gav.getGroupId(), revision);
}
}
}
}
2 changes: 1 addition & 1 deletion domino/api/src/main/java/io/quarkus/domino/BuildTool.java
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,6 @@ public static BuildTool forProjectDir(Path projectDir) {
}
}
}
throw new IllegalArgumentException("Unable to find a determine the build tool for " + projectDir);
throw new IllegalArgumentException("Unable to determine the build tool for " + projectDir);
}
}
28 changes: 27 additions & 1 deletion domino/app/src/main/java/io/quarkus/domino/cli/Quarkus.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@
import io.quarkus.devtools.messagewriter.MessageWriter;
import io.quarkus.domino.ArtifactCoordsPattern;
import io.quarkus.domino.ArtifactSet;
import io.quarkus.domino.RhVersionPattern;
import io.quarkus.domino.inspect.DependencyTreeError;
import io.quarkus.domino.inspect.DependencyTreeInspector;
import io.quarkus.domino.inspect.DependencyTreeVisitor;
import io.quarkus.domino.inspect.quarkus.QuarkusPlatformInfo;
import io.quarkus.domino.inspect.quarkus.QuarkusPlatformInfoReader;
import io.quarkus.maven.dependency.ArtifactCoords;
import io.quarkus.maven.dependency.ArtifactKey;
import io.quarkus.paths.PathTree;
import io.quarkus.util.GlobUtil;
import java.io.BufferedReader;
Expand All @@ -40,6 +42,10 @@
+ "Various options to analyze dependencies of a Quarkus platform release.")
public class Quarkus implements Callable<Integer> {

private static final List<ArtifactKey> EXTRA_CORE_ARTIFACTS = List.of(
ArtifactKey.of("io.quarkus", "quarkus-junit5", ArtifactCoords.DEFAULT_CLASSIFIER, ArtifactCoords.TYPE_JAR),
ArtifactKey.of("io.quarkus", "quarkus-junit5-mockito", ArtifactCoords.DEFAULT_CLASSIFIER, ArtifactCoords.TYPE_JAR));

@CommandLine.Option(names = {
"--settings",
"-s" }, description = "A path to Maven settings that should be used when initializing the Maven resolver")
Expand Down Expand Up @@ -202,6 +208,26 @@ public void handleResolutionFailures(Collection<DependencyTreeError> requests) {
rootsToMembers.computeIfAbsent(pluginCoords, k -> new ArrayList<>(1)).add(m);
}
}
for (var extraKey : EXTRA_CORE_ARTIFACTS) {
var extraArtifact = ArtifactCoords.of(extraKey.getGroupId(), extraKey.getArtifactId(),
extraKey.getClassifier(), extraKey.getType(), platform.getCore().getQuarkusCoreVersion());
var d = m.bomConstraints.get(extraArtifact);
if (d == null && RhVersionPattern.isRhVersion(platform.getCore().getQuarkusCoreVersion())) {
extraArtifact = ArtifactCoords.of(extraKey.getGroupId(), extraKey.getArtifactId(),
extraKey.getClassifier(), extraKey.getType(),
RhVersionPattern.ensureNoRhQualifier(platform.getCore().getQuarkusCoreVersion()));
d = m.bomConstraints.get(extraArtifact);
}
if (d == null) {
log.warn("Failed to locate " + extraArtifact + " among "
+ platform.getCore().getBom().toCompactCoords() + " constraints");
} else if (isVersionSelected(d.getArtifact().getVersion())) {
if (tracePattern != null) {
rootsToMembers.computeIfAbsent(extraArtifact, k -> new ArrayList<>(1)).add(m);
}
treeProcessor.inspectAsDependency(d.getArtifact(), effectiveConstraints, d.getExclusions());
}
}
} else {
var tmp = readBomConstraints(m.metadata.getBom(), resolver);
effectiveConstraints = new ArrayList<>(coreConstraints.size() + tmp.size());
Expand Down Expand Up @@ -570,7 +596,7 @@ private void append(StringBuilder out) {
if (!artifact.getClassifier().isEmpty()) {
out.append(artifact.getClassifier()).append(':');
}
if (!"jar".equals(artifact.getExtension())) {
if (!ArtifactCoords.TYPE_JAR.equals(artifact.getExtension())) {
if (artifact.getClassifier().isEmpty()) {
out.append(':');
}
Expand Down

0 comments on commit db698e9

Please sign in to comment.