diff --git a/.mvn/gradle-enterprise-custom-user-data.groovy b/.mvn/gradle-enterprise-custom-user-data.groovy index 8e78dcf99e597..0f9b416591ea7 100644 --- a/.mvn/gradle-enterprise-custom-user-data.groovy +++ b/.mvn/gradle-enterprise-custom-user-data.groovy @@ -12,6 +12,17 @@ if(session?.getRequest()?.getBaseDirectory() != null) { if(!publish) { // do not publish a build scan for test builds log.debug("Disabling build scan publication for " + session.getRequest().getBaseDirectory()) + + // change storage location on CI to avoid Develocity scan dumps with disabled publication to be captured for republication + if (System.env.GITHUB_ACTIONS) { + try { + def storageLocationTmpDir = java.nio.file.Files.createTempDirectory(java.nio.file.Paths.get(System.env.RUNNER_TEMP), "buildScanTmp").toAbsolutePath() + log.debug('Update storage location to ' + storageLocationTmpDir) + gradleEnterprise.setStorageDirectory(storageLocationTmpDir) + } catch (IOException e) { + log.error('Temporary storage location directory cannot be created, the Build Scan will be published', e) + } + } } } buildScan.publishAlwaysIf(publish) diff --git a/bom/application/pom.xml b/bom/application/pom.xml index 47c37c313737c..894ac585a5537 100644 --- a/bom/application/pom.xml +++ b/bom/application/pom.xml @@ -35,7 +35,7 @@ 1.32.0-alpha 1.21.0-alpha 5.1.0.Final - 1.11.5 + 1.12.2 2.1.12 0.22.0 21.1 @@ -51,18 +51,18 @@ 2.0 3.1.1 2.2.0 - 3.5.1 - 4.0.4 + 3.5.2 + 4.1.0 4.0.0 3.8.0 - 2.6.1 + 2.7.0 6.2.6 4.4.0 2.1.0 1.0.13 3.0.1 3.8.0 - 4.14.0 + 4.15.0 2.5.0 2.1.2 2.1.1 @@ -100,17 +100,17 @@ bytebuddy.version (just below), hibernate-orm.version-for-documentation (in docs/pom.xml) and both hibernate-orm.version and antlr.version in build-parent/pom.xml WARNING again for diffs that don't provide enough context: when updating, see above --> - 6.4.0.Final + 6.4.1.Final 1.14.7 6.0.6.Final - 2.2.0.Final + 2.2.1.Final 8.0.1.Final 7.0.0.Final 7.0.0.Final 2.1 8.0.0.Final - 8.11.1 + 8.11.3 2.2.21 2.2.5.Final 2.2.2.Final @@ -202,10 +202,10 @@ 1.11.0 2.10.1 1.1.2.Final - 2.22.0 + 2.22.1 1.3.0.Final 1.11.3 - 2.5.7.Final + 2.5.8.Final 0.1.18.Final 1.19.3 3.3.4 diff --git a/build-parent/pom.xml b/build-parent/pom.xml index f7ec7b9e90255..416242a1b73f3 100644 --- a/build-parent/pom.xml +++ b/build-parent/pom.xml @@ -111,7 +111,7 @@ 6.0.11 - 3.24.2 + 3.25.1 3.3.1 7.3.0 diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java index 4e616696cbedb..a38ed23b1ecd7 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java @@ -103,6 +103,13 @@ public static BuiltInType fromString(String value) { @ConfigItem(defaultValue = "jar") public String type; + /** + * Whether the created jar will be compressed. This setting is not used when building a native image + */ + @ConfigItem + @ConfigDocDefault("false") + public Optional compressJar; + /** * Manifest configuration of the runner jar. */ diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/GraalVM.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/GraalVM.java index 1b212a0369006..b581c38415e98 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/GraalVM.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/GraalVM.java @@ -18,6 +18,8 @@ static final class VersionParseHelper { private static final String JVMCI_BUILD_PREFIX = "jvmci-"; private static final String MANDREL_VERS_PREFIX = "Mandrel-"; + private static final String LIBERICA_NIK_VERS_PREFIX = "Liberica-NIK-"; + // Java version info (suitable for Runtime.Version.parse()). See java.lang.VersionProps private static final String VNUM = "(?[1-9][0-9]*(?:(?:\\.0)*\\.[1-9][0-9]*)*)"; private static final String PRE = "(?:-(?
[a-zA-Z0-9]+))?";
@@ -68,19 +70,47 @@ static Version parse(List lines) {
                 if (vendorVersion.contains("-dev")) {
                     graalVersion = graalVersion + "-dev";
                 }
-                String mandrelVersion = mandrelVersion(vendorVersion);
-                Distribution dist = isMandrel(vendorVersion) ? Distribution.MANDREL : Distribution.GRAALVM;
-                String versNum = (dist == Distribution.MANDREL ? mandrelVersion : graalVersion);
+                String versNum;
+                Distribution dist;
+                if (isMandrel(vendorVersion)) {
+                    dist = Distribution.MANDREL;
+                    versNum = mandrelVersion(vendorVersion);
+                } else if (isLiberica(vendorVersion)) {
+                    dist = Distribution.LIBERICA;
+                    versNum = libericaVersion(vendorVersion);
+                } else {
+                    dist = Distribution.GRAALVM;
+                    versNum = graalVersion;
+                }
                 if (versNum == null) {
                     return UNKNOWN_VERSION;
                 }
-                return new Version(lines.stream().collect(Collectors.joining("\n")),
+                return new Version(String.join("\n", lines),
                         versNum, v, dist);
             } else {
                 return UNKNOWN_VERSION;
             }
         }
 
+        private static boolean isLiberica(String vendorVersion) {
+            if (vendorVersion == null) {
+                return false;
+            }
+            return !vendorVersion.isBlank() && vendorVersion.startsWith(LIBERICA_NIK_VERS_PREFIX);
+        }
+
+        private static String libericaVersion(String vendorVersion) {
+            if (vendorVersion == null) {
+                return null;
+            }
+            int idx = vendorVersion.indexOf(LIBERICA_NIK_VERS_PREFIX);
+            if (idx < 0) {
+                return null;
+            }
+            String version = vendorVersion.substring(idx + LIBERICA_NIK_VERS_PREFIX.length());
+            return matchVersion(version);
+        }
+
         private static boolean isMandrel(String vendorVersion) {
             if (vendorVersion == null) {
                 return false;
@@ -244,7 +274,7 @@ public static Version of(Stream output) {
             String stringOutput = output.collect(Collectors.joining("\n"));
             List lines = stringOutput.lines()
                     .dropWhile(l -> !l.startsWith("GraalVM") && !l.startsWith("native-image"))
-                    .collect(Collectors.toUnmodifiableList());
+                    .toList();
 
             if (lines.size() == 3) {
                 // Attempt to parse the new 3-line version scheme first.
@@ -322,6 +352,7 @@ public boolean isJava17() {
 
     enum Distribution {
         GRAALVM,
+        LIBERICA,
         MANDREL;
     }
 }
diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java
index 166562b4c1d2d..fdcbe78e85e25 100644
--- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java
+++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java
@@ -329,7 +329,7 @@ private void buildUberJar0(CurateOutcomeBuildItem curateOutcomeBuildItem,
             MainClassBuildItem mainClassBuildItem,
             ClassLoadingConfig classLoadingConfig,
             Path runnerJar) throws Exception {
-        try (FileSystem runnerZipFs = ZipUtils.newZip(runnerJar)) {
+        try (FileSystem runnerZipFs = createNewZip(runnerJar, packageConfig)) {
 
             log.info("Building uber jar: " + runnerJar);
 
@@ -530,7 +530,7 @@ private JarBuildItem buildLegacyThinJar(CurateOutcomeBuildItem curateOutcomeBuil
         Files.deleteIfExists(runnerJar);
         IoUtils.createOrEmptyDir(libDir);
 
-        try (FileSystem runnerZipFs = ZipUtils.newZip(runnerJar)) {
+        try (FileSystem runnerZipFs = createNewZip(runnerJar, packageConfig)) {
 
             log.info("Building thin jar: " + runnerJar);
 
@@ -629,7 +629,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem,
         if (!transformedClasses.getTransformedClassesByJar().isEmpty()) {
             Path transformedZip = quarkus.resolve(TRANSFORMED_BYTECODE_JAR);
             fastJarJarsBuilder.setTransformed(transformedZip);
-            try (FileSystem out = ZipUtils.newZip(transformedZip)) {
+            try (FileSystem out = createNewZip(transformedZip, packageConfig)) {
                 for (Set transformedSet : transformedClasses
                         .getTransformedClassesByJar().values()) {
                     for (TransformedClassesBuildItem.TransformedClass transformed : transformedSet) {
@@ -650,7 +650,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem,
         //now generated classes and resources
         Path generatedZip = quarkus.resolve(GENERATED_BYTECODE_JAR);
         fastJarJarsBuilder.setGenerated(generatedZip);
-        try (FileSystem out = ZipUtils.newZip(generatedZip)) {
+        try (FileSystem out = createNewZip(generatedZip, packageConfig)) {
             for (GeneratedClassBuildItem i : generatedClasses) {
                 String fileName = i.getName().replace('.', '/') + ".class";
                 Path target = out.getPath(fileName);
@@ -683,7 +683,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem,
         if (!rebuild) {
             Predicate ignoredEntriesPredicate = getThinJarIgnoredEntriesPredicate(packageConfig);
 
-            try (FileSystem runnerZipFs = ZipUtils.newZip(runnerJar)) {
+            try (FileSystem runnerZipFs = createNewZip(runnerJar, packageConfig)) {
                 copyFiles(applicationArchivesBuildItem.getRootArchive(), runnerZipFs, null, ignoredEntriesPredicate);
             }
         }
@@ -695,7 +695,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem,
             if (!rebuild) {
                 copyDependency(parentFirstKeys, outputTargetBuildItem, copiedArtifacts, mainLib, baseLib,
                         fastJarJarsBuilder::addDep, true,
-                        classPath, appDep, transformedClasses, removed);
+                        classPath, appDep, transformedClasses, removed, packageConfig);
             } else if (includeAppDep(appDep, outputTargetBuildItem.getIncludedOptionalDependencies(), removed)) {
                 appDep.getResolvedPaths().forEach(fastJarJarsBuilder::addDep);
             }
@@ -768,7 +768,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem,
             }
         }
         if (!rebuild) {
-            try (FileSystem runnerZipFs = ZipUtils.newZip(initJar)) {
+            try (FileSystem runnerZipFs = createNewZip(initJar, packageConfig)) {
                 ResolvedDependency appArtifact = curateOutcomeBuildItem.getApplicationModel().getAppArtifact();
                 generateManifest(runnerZipFs, classPath.toString(), packageConfig, appArtifact,
                         QuarkusEntryPoint.class.getName(),
@@ -783,7 +783,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem,
                     copyDependency(parentFirstKeys, outputTargetBuildItem, copiedArtifacts, deploymentLib, baseLib, (p) -> {
                     },
                             false, classPath,
-                            appDep, new TransformedClassesBuildItem(Map.of()), removed); //we don't care about transformation here, so just pass in an empty item
+                            appDep, new TransformedClassesBuildItem(Map.of()), removed, packageConfig); //we don't care about transformation here, so just pass in an empty item
                 }
                 Map> relativePaths = new HashMap<>();
                 for (Map.Entry> e : copiedArtifacts.entrySet()) {
@@ -884,7 +884,8 @@ private Set getRemovedKeys(ClassLoadingConfig classLoadingConfig) {
     private void copyDependency(Set parentFirstArtifacts, OutputTargetBuildItem outputTargetBuildItem,
             Map> runtimeArtifacts, Path libDir, Path baseLib, Consumer targetPathConsumer,
             boolean allowParentFirst, StringBuilder classPath, ResolvedDependency appDep,
-            TransformedClassesBuildItem transformedClasses, Set removedDeps)
+            TransformedClassesBuildItem transformedClasses, Set removedDeps,
+            PackageConfig packageConfig)
             throws IOException {
 
         // Exclude files that are not jars (typically, we can have XML files here, see https://github.com/quarkusio/quarkus/issues/2852)
@@ -912,7 +913,7 @@ private void copyDependency(Set parentFirstArtifacts, OutputTargetB
                 // This case can happen when we are building a jar from inside the Quarkus repository
                 // and Quarkus Bootstrap's localProjectDiscovery has been set to true. In such a case
                 // the non-jar dependencies are the Quarkus dependencies picked up on the file system
-                packageClasses(resolvedDep, targetPath);
+                packageClasses(resolvedDep, targetPath, packageConfig);
             } else {
                 Set transformedFromThisArchive = transformedClasses
                         .getTransformedClassesByJar().get(resolvedDep);
@@ -934,8 +935,8 @@ private void copyDependency(Set parentFirstArtifacts, OutputTargetB
         }
     }
 
-    private void packageClasses(Path resolvedDep, final Path targetPath) throws IOException {
-        try (FileSystem runnerZipFs = ZipUtils.newZip(targetPath)) {
+    private void packageClasses(Path resolvedDep, final Path targetPath, PackageConfig packageConfig) throws IOException {
+        try (FileSystem runnerZipFs = createNewZip(targetPath, packageConfig)) {
             Files.walkFileTree(resolvedDep, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE,
                     new SimpleFileVisitor() {
                         @Override
@@ -1649,4 +1650,12 @@ public boolean decompile(Path jarToDecompile) {
         }
     }
 
+    private static FileSystem createNewZip(Path runnerJar, PackageConfig config) throws IOException {
+        boolean useUncompressedJar = config.compressJar.map(o -> !o).orElse(false);
+        if (useUncompressedJar) {
+            return ZipUtils.newZip(runnerJar, Map.of("compressionMethod", "STORED"));
+        }
+        return ZipUtils.newZip(runnerJar);
+    }
+
 }
diff --git a/core/deployment/src/main/java/io/quarkus/deployment/recording/PropertyUtils.java b/core/deployment/src/main/java/io/quarkus/deployment/recording/PropertyUtils.java
index 02d3cf4dd5153..25959c196c0a2 100644
--- a/core/deployment/src/main/java/io/quarkus/deployment/recording/PropertyUtils.java
+++ b/core/deployment/src/main/java/io/quarkus/deployment/recording/PropertyUtils.java
@@ -2,7 +2,9 @@
 package io.quarkus.deployment.recording;
 
 import java.lang.reflect.Method;
+import java.lang.reflect.RecordComponent;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -19,6 +21,12 @@ final class PropertyUtils {
     private static final Function, Property[]> FUNCTION = new Function, Property[]>() {
         @Override
         public Property[] apply(Class type) {
+            if (type.isRecord()) {
+                RecordComponent[] recordComponents = type.getRecordComponents();
+                return Arrays.stream(recordComponents)
+                        .map(rc -> new Property(rc.getName(), rc.getAccessor(), null, rc.getType())).toArray(Property[]::new);
+            }
+
             List ret = new ArrayList<>();
             Method[] methods = type.getMethods();
 
diff --git a/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/GraalVMTest.java b/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/GraalVMTest.java
index 2914dfe0ee7cb..9af2755056560 100644
--- a/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/GraalVMTest.java
+++ b/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/GraalVMTest.java
@@ -104,6 +104,18 @@ static void assertVersion(Version graalVmVersion, Distribution distro, Version v
         }
     }
 
+    @Test
+    public void testGraalVM21LibericaVersionParser() {
+        Version graalVM21Dev = Version.of(Stream.of(("native-image 21.0.1 2023-10-17\n"
+                + "GraalVM Runtime Environment Liberica-NIK-23.1.1-1 (build 21.0.1+12-LTS)\n"
+                + "Substrate VM Liberica-NIK-23.1.1-1 (build 21.0.1+12-LTS, serial gc)").split("\\n")));
+        assertThat(graalVM21Dev.distribution.name()).isEqualTo("LIBERICA");
+        assertThat(graalVM21Dev.getVersionAsString()).isEqualTo("23.1.1");
+        assertThat(graalVM21Dev.javaVersion.toString()).isEqualTo("21.0.1+12-LTS");
+        assertThat(graalVM21Dev.javaVersion.feature()).isEqualTo(21);
+        assertThat(graalVM21Dev.javaVersion.update()).isEqualTo(1);
+    }
+
     @Test
     public void testGraalVM21VersionParser() {
         Version graalVM21Dev = Version.of(Stream.of(("native-image 21 2023-09-19\n"
diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/PackageAppTestBase.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/PackageAppTestBase.java
index 3e99369b1c29d..402f09d526284 100644
--- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/PackageAppTestBase.java
+++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/PackageAppTestBase.java
@@ -120,9 +120,15 @@ public static Collection getDeploymentOnlyDeps(ApplicationModel mode
     public static Collection getDependenciesWithFlag(ApplicationModel model, int flag) {
         var set = new HashSet();
         for (var d : model.getDependencies(flag)) {
-            if (d.isFlagSet(flag)) {
-                set.add(new ArtifactDependency(d));
-            }
+            set.add(new ArtifactDependency(d));
+        }
+        return set;
+    }
+
+    public static Collection getDependenciesWithAnyFlag(ApplicationModel model, int... flags) {
+        var set = new HashSet();
+        for (var d : model.getDependenciesWithAnyFlag(flags)) {
+            set.add(new ArtifactDependency(d));
         }
         return set;
     }
diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java
index bb1789475f99e..adaca8f5ead22 100644
--- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java
+++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java
@@ -5,6 +5,8 @@
 import java.util.HashSet;
 import java.util.Set;
 
+import org.eclipse.aether.util.artifact.JavaScopes;
+
 import io.quarkus.bootstrap.model.ApplicationModel;
 import io.quarkus.bootstrap.resolver.TsArtifact;
 import io.quarkus.bootstrap.resolver.TsDependency;
@@ -35,16 +37,20 @@ protected TsArtifact composeApplication() {
         addToExpectedLib(extA.getRuntime());
         extA.getRuntime()
                 .addDependency(extADep)
-                .addDependency(new TsDependency(extAProvidedDep, "provided"));
+                .addDependency(new TsDependency(extAProvidedDep, JavaScopes.PROVIDED));
         extA.getDeployment()
                 .addDependency(extADeploymentDep)
-                .addDependency(new TsDependency(extAOptionalDeploymentDep, "provided"));
+                .addDependency(new TsDependency(extAOptionalDeploymentDep, JavaScopes.PROVIDED));
 
         final TsQuarkusExt extB = new TsQuarkusExt("ext-b");
         this.install(extB);
 
         final TsArtifact directProvidedDep = TsArtifact.jar("direct-provided-dep");
 
+        final TsArtifact depC2 = TsArtifact.jar("dep-c", "2");
+        // make sure provided dependencies don't override compile/runtime dependencies
+        directProvidedDep.addDependency(depC2);
+
         final TsArtifact transitiveProvidedDep = TsArtifact.jar("transitive-provided-dep");
         directProvidedDep.addDependency(transitiveProvidedDep);
 
@@ -52,8 +58,8 @@ protected TsArtifact composeApplication() {
                 .addManagedDependency(platformDescriptor())
                 .addManagedDependency(platformProperties())
                 .addDependency(extA)
-                .addDependency(extB, "provided")
-                .addDependency(new TsDependency(directProvidedDep, "provided"));
+                .addDependency(extB, JavaScopes.PROVIDED)
+                .addDependency(new TsDependency(directProvidedDep, JavaScopes.PROVIDED));
     }
 
     @Override
@@ -64,5 +70,44 @@ protected void assertAppModel(ApplicationModel model) throws Exception {
         expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-deployment-dep", "1"),
                 DependencyFlags.DEPLOYMENT_CP));
         assertEquals(expected, getDeploymentOnlyDeps(model));
+
+        final Set expectedRuntime = new HashSet<>();
+        expectedRuntime.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a", "1"),
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP,
+                DependencyFlags.DIRECT,
+                DependencyFlags.RUNTIME_EXTENSION_ARTIFACT,
+                DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT));
+        expectedRuntime.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-dep", "1"),
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP));
+        expectedRuntime.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "dep-c", "1"),
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP));
+        assertEquals(expectedRuntime, getDependenciesWithFlag(model, DependencyFlags.RUNTIME_CP));
+
+        final Set expectedCompileOnly = new HashSet<>();
+        expectedCompileOnly.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-b", "1"),
+                JavaScopes.PROVIDED,
+                DependencyFlags.RUNTIME_EXTENSION_ARTIFACT,
+                DependencyFlags.DIRECT,
+                DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT,
+                DependencyFlags.COMPILE_ONLY));
+        expectedCompileOnly
+                .add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "direct-provided-dep", "1"),
+                        JavaScopes.PROVIDED,
+                        DependencyFlags.DIRECT,
+                        DependencyFlags.COMPILE_ONLY));
+        expectedCompileOnly
+                .add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "transitive-provided-dep", "1"),
+                        JavaScopes.PROVIDED,
+                        DependencyFlags.COMPILE_ONLY));
+        assertEquals(expectedCompileOnly, getDependenciesWithFlag(model, DependencyFlags.COMPILE_ONLY));
+
+        final Set compileOnlyPlusRuntime = new HashSet<>();
+        compileOnlyPlusRuntime.addAll(expectedRuntime);
+        compileOnlyPlusRuntime.addAll(expectedCompileOnly);
+        assertEquals(compileOnlyPlusRuntime,
+                getDependenciesWithAnyFlag(model, DependencyFlags.RUNTIME_CP, DependencyFlags.COMPILE_ONLY));
     }
 }
diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTestModeTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTestModeTest.java
new file mode 100644
index 0000000000000..26b8f66d583c2
--- /dev/null
+++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTestModeTest.java
@@ -0,0 +1,142 @@
+package io.quarkus.deployment.runnerjar;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.eclipse.aether.util.artifact.JavaScopes;
+
+import io.quarkus.bootstrap.model.ApplicationModel;
+import io.quarkus.bootstrap.resolver.TsArtifact;
+import io.quarkus.bootstrap.resolver.TsDependency;
+import io.quarkus.bootstrap.resolver.TsQuarkusExt;
+import io.quarkus.maven.dependency.ArtifactCoords;
+import io.quarkus.maven.dependency.ArtifactDependency;
+import io.quarkus.maven.dependency.Dependency;
+import io.quarkus.maven.dependency.DependencyFlags;
+
+public class ProvidedExtensionDepsTestModeTest extends BootstrapFromOriginalJarTestBase {
+
+    @Override
+    protected boolean isBootstrapForTestMode() {
+        return true;
+    }
+
+    @Override
+    protected TsArtifact composeApplication() {
+
+        final TsArtifact extADep = TsArtifact.jar("ext-a-dep");
+        addToExpectedLib(extADep);
+
+        final TsArtifact depC1 = TsArtifact.jar("dep-c");
+        //addToExpectedLib(depC1);
+        extADep.addDependency(depC1);
+
+        final TsArtifact extAProvidedDep = TsArtifact.jar("ext-a-provided-dep");
+
+        final TsArtifact extADeploymentDep = TsArtifact.jar("ext-a-deployment-dep");
+        final TsArtifact extAOptionalDeploymentDep = TsArtifact.jar("ext-a-provided-deployment-dep");
+
+        final TsQuarkusExt extA = new TsQuarkusExt("ext-a");
+        addToExpectedLib(extA.getRuntime());
+        extA.getRuntime()
+                .addDependency(extADep)
+                .addDependency(new TsDependency(extAProvidedDep, JavaScopes.PROVIDED));
+        extA.getDeployment()
+                .addDependency(extADeploymentDep)
+                .addDependency(new TsDependency(extAOptionalDeploymentDep, JavaScopes.PROVIDED));
+
+        final TsQuarkusExt extB = new TsQuarkusExt("ext-b");
+        addToExpectedLib(extB.getRuntime());
+        this.install(extB);
+
+        final TsArtifact directProvidedDep = TsArtifact.jar("direct-provided-dep");
+        addToExpectedLib(directProvidedDep);
+
+        final TsArtifact depC2 = TsArtifact.jar("dep-c", "2");
+        // here provided dependencies will override compile/runtime ones during version convergence
+        addToExpectedLib(depC2);
+        directProvidedDep.addDependency(depC2);
+
+        final TsArtifact transitiveProvidedDep = TsArtifact.jar("transitive-provided-dep");
+        addToExpectedLib(transitiveProvidedDep);
+        directProvidedDep.addDependency(transitiveProvidedDep);
+
+        return TsArtifact.jar("app")
+                .addManagedDependency(platformDescriptor())
+                .addManagedDependency(platformProperties())
+                .addDependency(extA)
+                .addDependency(extB, JavaScopes.PROVIDED)
+                .addDependency(new TsDependency(directProvidedDep, JavaScopes.PROVIDED));
+    }
+
+    @Override
+    protected void assertAppModel(ApplicationModel model) throws Exception {
+        Set expected = new HashSet<>();
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-deployment", "1"),
+                DependencyFlags.DEPLOYMENT_CP));
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-deployment-dep", "1"),
+                DependencyFlags.DEPLOYMENT_CP));
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-b-deployment", "1"),
+                JavaScopes.PROVIDED,
+                DependencyFlags.DEPLOYMENT_CP));
+        assertEquals(expected, getDeploymentOnlyDeps(model));
+
+        expected = new HashSet<>();
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a", "1"),
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP,
+                DependencyFlags.DIRECT,
+                DependencyFlags.RUNTIME_EXTENSION_ARTIFACT,
+                DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT));
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-dep", "1"),
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP));
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "dep-c", "2"),
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP));
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-b", "1"),
+                JavaScopes.PROVIDED,
+                DependencyFlags.RUNTIME_EXTENSION_ARTIFACT,
+                DependencyFlags.DIRECT,
+                DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT,
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP,
+                DependencyFlags.COMPILE_ONLY));
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "direct-provided-dep", "1"),
+                JavaScopes.PROVIDED,
+                DependencyFlags.DIRECT,
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP,
+                DependencyFlags.COMPILE_ONLY));
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "transitive-provided-dep", "1"),
+                JavaScopes.PROVIDED,
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP,
+                DependencyFlags.COMPILE_ONLY));
+        assertEquals(expected, getDependenciesWithFlag(model, DependencyFlags.RUNTIME_CP));
+
+        expected = new HashSet<>();
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-b", "1"),
+                JavaScopes.PROVIDED,
+                DependencyFlags.RUNTIME_EXTENSION_ARTIFACT,
+                DependencyFlags.DIRECT,
+                DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT,
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP,
+                DependencyFlags.COMPILE_ONLY));
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "direct-provided-dep", "1"),
+                JavaScopes.PROVIDED,
+                DependencyFlags.DIRECT,
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP,
+                DependencyFlags.COMPILE_ONLY));
+        expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "transitive-provided-dep", "1"),
+                JavaScopes.PROVIDED,
+                DependencyFlags.RUNTIME_CP,
+                DependencyFlags.DEPLOYMENT_CP,
+                DependencyFlags.COMPILE_ONLY));
+        assertEquals(expected, getDependenciesWithFlag(model, DependencyFlags.COMPILE_ONLY));
+    }
+}
diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigurationException.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigurationException.java
index a020bedb02852..d9042f9ee9173 100644
--- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigurationException.java
+++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigurationException.java
@@ -1,6 +1,7 @@
 package io.quarkus.runtime.configuration;
 
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.Set;
 
 import io.quarkus.dev.config.ConfigurationProblem;
@@ -55,7 +56,7 @@ public ConfigurationException(final String msg, Set configKeys) {
      */
     public ConfigurationException(final Throwable cause, Set configKeys) {
         super(cause);
-        this.configKeys = configKeys;
+        this.configKeys = forwardCauseConfigKeys(configKeys, cause);
     }
 
     /**
@@ -77,7 +78,7 @@ public ConfigurationException(final String msg, final Throwable cause) {
      */
     public ConfigurationException(final String msg, final Throwable cause, Set configKeys) {
         super(msg, cause);
-        this.configKeys = configKeys;
+        this.configKeys = forwardCauseConfigKeys(configKeys, cause);
     }
 
     public ConfigurationException(Throwable cause) {
@@ -88,4 +89,12 @@ public ConfigurationException(Throwable cause) {
     public Set getConfigKeys() {
         return configKeys;
     }
+
+    private static Set forwardCauseConfigKeys(Set configKeys, Throwable cause) {
+        if (cause instanceof ConfigurationProblem) {
+            var merged = new HashSet(configKeys);
+            merged.addAll(((ConfigurationProblem) cause).getConfigKeys());
+        }
+        return configKeys;
+    }
 }
diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/QuarkusPlugin.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/QuarkusPlugin.java
index a44fcfd3d8780..56a64e35c0dae 100644
--- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/QuarkusPlugin.java
+++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/QuarkusPlugin.java
@@ -18,6 +18,7 @@
 import org.gradle.api.UnknownTaskException;
 import org.gradle.api.artifacts.Configuration;
 import org.gradle.api.artifacts.ConfigurationContainer;
+import org.gradle.api.artifacts.ExternalModuleDependency;
 import org.gradle.api.artifacts.ProjectDependency;
 import org.gradle.api.file.FileCollection;
 import org.gradle.api.plugins.BasePlugin;
@@ -59,6 +60,10 @@
 import io.quarkus.gradle.tasks.QuarkusTestConfig;
 import io.quarkus.gradle.tasks.QuarkusUpdate;
 import io.quarkus.gradle.tooling.GradleApplicationModelBuilder;
+import io.quarkus.gradle.tooling.ToolingUtils;
+import io.quarkus.gradle.tooling.dependency.DependencyUtils;
+import io.quarkus.gradle.tooling.dependency.ExtensionDependency;
+import io.quarkus.gradle.tooling.dependency.ProjectExtensionDependency;
 import io.quarkus.runtime.LaunchMode;
 
 public class QuarkusPlugin implements Plugin {
@@ -508,16 +513,15 @@ private void visitProjectDep(Project project, Project dep, Set visited)
         if (dep.getState().getExecuted()) {
             setupQuarkusBuildTaskDeps(project, dep, visited);
         } else {
-            dep.afterEvaluate(p -> {
-                setupQuarkusBuildTaskDeps(project, p, visited);
-            });
+            dep.afterEvaluate(p -> setupQuarkusBuildTaskDeps(project, p, visited));
         }
     }
 
     private void setupQuarkusBuildTaskDeps(Project project, Project dep, Set visited) {
-        if (!visited.add(dep.getPath())) {
+        if (!visited.add(dep.getGroup() + ":" + dep.getName())) {
             return;
         }
+
         project.getLogger().debug("Configuring {} task dependencies on {} tasks", project, dep);
 
         getLazyTask(project, QUARKUS_BUILD_TASK_NAME)
@@ -555,13 +559,40 @@ protected void visitProjectDependencies(Project project, Project dep, Set {
+                        Project depProject = null;
+
                         if (d instanceof ProjectDependency) {
-                            visitProjectDep(project, ((ProjectDependency) d).getDependencyProject(), visited);
+                            depProject = ((ProjectDependency) d).getDependencyProject();
+                        } else if (d instanceof ExternalModuleDependency) {
+                            depProject = ToolingUtils.findIncludedProject(project, (ExternalModuleDependency) d);
+                        }
+
+                        if (depProject == null) {
+                            return;
+                        }
+
+                        if (depProject.getState().getExecuted()) {
+                            visitLocalProject(project, depProject, visited);
+                        } else {
+                            depProject.afterEvaluate(p -> visitLocalProject(project, p, visited));
                         }
                     });
         }
     }
 
+    private void visitLocalProject(Project project, Project localProject, Set visited) {
+        // local dependency, so we collect also its dependencies
+        visitProjectDep(project, localProject, visited);
+
+        ExtensionDependency extensionDependency = DependencyUtils
+                .getExtensionInfoOrNull(project, localProject);
+
+        if (extensionDependency instanceof ProjectExtensionDependency) {
+            visitProjectDep(project,
+                    ((ProjectExtensionDependency) extensionDependency).getDeploymentModule(), visited);
+        }
+    }
+
     private Optional> getLazyTask(Project project, String name) {
         try {
             return Optional.of(project.getTasks().named(name));
diff --git a/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/QuarkusExtensionPlugin.java b/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/QuarkusExtensionPlugin.java
index cd65914f2a6df..10b44971e697c 100644
--- a/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/QuarkusExtensionPlugin.java
+++ b/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/QuarkusExtensionPlugin.java
@@ -24,6 +24,7 @@
 import io.quarkus.extension.gradle.tasks.ExtensionDescriptorTask;
 import io.quarkus.extension.gradle.tasks.ValidateExtensionTask;
 import io.quarkus.gradle.dependency.ApplicationDeploymentClasspathBuilder;
+import io.quarkus.gradle.extension.ExtensionConstants;
 import io.quarkus.gradle.tooling.ToolingUtils;
 import io.quarkus.gradle.tooling.dependency.DependencyUtils;
 import io.quarkus.runtime.LaunchMode;
@@ -31,7 +32,7 @@
 public class QuarkusExtensionPlugin implements Plugin {
 
     public static final String DEFAULT_DEPLOYMENT_PROJECT_NAME = "deployment";
-    public static final String EXTENSION_CONFIGURATION_NAME = "quarkusExtension";
+    public static final String EXTENSION_CONFIGURATION_NAME = ExtensionConstants.EXTENSION_CONFIGURATION_NAME;
 
     public static final String EXTENSION_DESCRIPTOR_TASK_NAME = "extensionDescriptor";
     public static final String VALIDATE_EXTENSION_TASK_NAME = "validateExtension";
@@ -42,6 +43,7 @@ public class QuarkusExtensionPlugin implements Plugin {
     public void apply(Project project) {
         final QuarkusExtensionConfiguration quarkusExt = project.getExtensions().create(EXTENSION_CONFIGURATION_NAME,
                 QuarkusExtensionConfiguration.class);
+
         project.getPluginManager().apply(JavaPlugin.class);
         registerTasks(project, quarkusExt);
     }
@@ -141,17 +143,12 @@ private Project findDeploymentProject(Project project, QuarkusExtensionConfigura
             deploymentProjectName = DEFAULT_DEPLOYMENT_PROJECT_NAME;
         }
 
-        Project deploymentProject = project.getRootProject().findProject(deploymentProjectName);
+        Project deploymentProject = ToolingUtils.findLocalProject(project, deploymentProjectName);
         if (deploymentProject == null) {
-            if (project.getParent() != null) {
-                deploymentProject = project.getParent().findProject(deploymentProjectName);
-            }
-            if (deploymentProject == null) {
-                project.getLogger().warn("Unable to find deployment project with name: " + deploymentProjectName
-                        + ". You can configure the deployment project name by setting the 'deploymentModule' property in the plugin extension.");
-            }
+            project.getLogger().warn("Unable to find deployment project with name: " + deploymentProjectName
+                    + ". You can configure the deployment project name by setting the 'deploymentModule' property in the plugin extension.");
         }
+
         return deploymentProject;
     }
-
 }
diff --git a/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/dependency/DeploymentClasspathBuilder.java b/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/dependency/DeploymentClasspathBuilder.java
index 5508c2e8e1a7f..f7fef45daf664 100644
--- a/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/dependency/DeploymentClasspathBuilder.java
+++ b/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/dependency/DeploymentClasspathBuilder.java
@@ -15,7 +15,6 @@
 import io.quarkus.gradle.tooling.ToolingUtils;
 import io.quarkus.gradle.tooling.dependency.DependencyUtils;
 import io.quarkus.gradle.tooling.dependency.ExtensionDependency;
-import io.quarkus.gradle.tooling.dependency.LocalExtensionDependency;
 
 public class DeploymentClasspathBuilder {
 
@@ -32,27 +31,23 @@ public void exportDeploymentClasspath(String configurationName) {
         project.getConfigurations().create(deploymentConfigurationName, config -> {
             Configuration configuration = DependencyUtils.duplicateConfiguration(project,
                     project.getConfigurations().getByName(configurationName));
-            Set extensionDependencies = collectFirstMetQuarkusExtensions(configuration);
+            Set> extensionDependencies = collectFirstMetQuarkusExtensions(configuration);
 
             DependencyHandler dependencies = project.getDependencies();
 
-            for (ExtensionDependency extension : extensionDependencies) {
-                if (extension instanceof LocalExtensionDependency) {
-                    DependencyUtils.addLocalDeploymentDependency(deploymentConfigurationName,
-                            (LocalExtensionDependency) extension,
-                            dependencies);
-                } else {
-                    DependencyUtils.requireDeploymentDependency(deploymentConfigurationName, extension, dependencies);
-                    if (!alreadyProcessed.add(extension.getExtensionId())) {
-                        continue;
-                    }
+            for (ExtensionDependency extension : extensionDependencies) {
+                if (!alreadyProcessed.add(extension.getExtensionId())) {
+                    continue;
                 }
+
+                dependencies.add(deploymentConfigurationName,
+                        DependencyUtils.createDeploymentDependency(dependencies, extension));
             }
         });
     }
 
-    private Set collectFirstMetQuarkusExtensions(Configuration configuration) {
-        Set firstLevelExtensions = new HashSet<>();
+    private Set> collectFirstMetQuarkusExtensions(Configuration configuration) {
+        Set> firstLevelExtensions = new HashSet<>();
         Set firstLevelModuleDependencies = configuration.getResolvedConfiguration()
                 .getFirstLevelModuleDependencies();
 
@@ -64,16 +59,16 @@ private Set collectFirstMetQuarkusExtensions(Configuration
         return firstLevelExtensions;
     }
 
-    private Set collectQuarkusExtensions(ResolvedDependency dependency,
+    private Set> collectQuarkusExtensions(ResolvedDependency dependency,
             Set visitedArtifacts) {
         if (visitedArtifacts.contains(dependency.getModule().getId())) {
             return Collections.emptySet();
         } else {
             visitedArtifacts.add(dependency.getModule().getId());
         }
-        Set extensions = new LinkedHashSet<>();
+        Set> extensions = new LinkedHashSet<>();
         for (ResolvedArtifact moduleArtifact : dependency.getModuleArtifacts()) {
-            ExtensionDependency extension = DependencyUtils.getExtensionInfoOrNull(project, moduleArtifact);
+            ExtensionDependency extension = DependencyUtils.getExtensionInfoOrNull(project, moduleArtifact);
             if (extension != null) {
                 extensions.add(extension);
                 return extensions;
@@ -83,7 +78,7 @@ private Set collectQuarkusExtensions(ResolvedDependency dep
         for (ResolvedDependency child : dependency.getChildren()) {
             extensions.addAll(collectQuarkusExtensions(child, visitedArtifacts));
         }
+
         return extensions;
     }
-
 }
diff --git a/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/tasks/ValidateExtensionTask.java b/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/tasks/ValidateExtensionTask.java
index d80433615d424..172a88e778d5c 100644
--- a/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/tasks/ValidateExtensionTask.java
+++ b/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/tasks/ValidateExtensionTask.java
@@ -17,8 +17,10 @@
 
 import io.quarkus.bootstrap.model.AppArtifactKey;
 import io.quarkus.extension.gradle.QuarkusExtensionConfiguration;
+import io.quarkus.gradle.tooling.dependency.ArtifactExtensionDependency;
 import io.quarkus.gradle.tooling.dependency.DependencyUtils;
 import io.quarkus.gradle.tooling.dependency.ExtensionDependency;
+import io.quarkus.gradle.tooling.dependency.ProjectExtensionDependency;
 
 public class ValidateExtensionTask extends DefaultTask {
 
@@ -82,10 +84,20 @@ public void validateExtension() {
     private List collectRuntimeExtensionsDeploymentKeys(Set runtimeArtifacts) {
         List runtimeExtensions = new ArrayList<>();
         for (ResolvedArtifact resolvedArtifact : runtimeArtifacts) {
-            ExtensionDependency extension = DependencyUtils.getExtensionInfoOrNull(getProject(), resolvedArtifact);
+            ExtensionDependency extension = DependencyUtils.getExtensionInfoOrNull(getProject(), resolvedArtifact);
             if (extension != null) {
-                runtimeExtensions.add(new AppArtifactKey(extension.getDeploymentModule().getGroupId(),
-                        extension.getDeploymentModule().getArtifactId()));
+                if (extension instanceof ProjectExtensionDependency) {
+                    final ProjectExtensionDependency ped = (ProjectExtensionDependency) extension;
+
+                    runtimeExtensions
+                            .add(new AppArtifactKey(ped.getDeploymentModule().getGroup().toString(),
+                                    ped.getDeploymentModule().getName()));
+                } else if (extension instanceof ArtifactExtensionDependency) {
+                    final ArtifactExtensionDependency aed = (ArtifactExtensionDependency) extension;
+
+                    runtimeExtensions.add(new AppArtifactKey(aed.getDeploymentModule().getGroupId(),
+                            aed.getDeploymentModule().getArtifactId()));
+                }
             }
         }
         return runtimeExtensions;
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ApplicationDeploymentClasspathBuilder.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ApplicationDeploymentClasspathBuilder.java
index 5d4c076161efb..5282259ec4582 100644
--- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ApplicationDeploymentClasspathBuilder.java
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ApplicationDeploymentClasspathBuilder.java
@@ -31,22 +31,23 @@
 import io.quarkus.gradle.tooling.ToolingUtils;
 import io.quarkus.gradle.tooling.dependency.DependencyUtils;
 import io.quarkus.gradle.tooling.dependency.ExtensionDependency;
-import io.quarkus.gradle.tooling.dependency.IncludedBuildExtensionDependency;
-import io.quarkus.gradle.tooling.dependency.LocalExtensionDependency;
 import io.quarkus.runtime.LaunchMode;
 
 public class ApplicationDeploymentClasspathBuilder {
 
-    private static String getRuntimeConfigName(LaunchMode mode, boolean base) {
-        final StringBuilder sb = new StringBuilder();
-        sb.append("quarkus");
+    private static String getLaunchModeAlias(LaunchMode mode) {
         if (mode == LaunchMode.DEVELOPMENT) {
-            sb.append("Dev");
-        } else if (mode == LaunchMode.TEST) {
-            sb.append("Test");
-        } else {
-            sb.append("Prod");
+            return "Dev";
         }
+        if (mode == LaunchMode.TEST) {
+            return "Test";
+        }
+        return "Prod";
+    }
+
+    private static String getRuntimeConfigName(LaunchMode mode, boolean base) {
+        final StringBuilder sb = new StringBuilder();
+        sb.append("quarkus").append(getLaunchModeAlias(mode));
         if (base) {
             sb.append("Base");
         }
@@ -118,6 +119,8 @@ public static void initConfigurations(Project project) {
     private final String runtimeConfigurationName;
     private final String platformConfigurationName;
     private final String deploymentConfigurationName;
+    private final String compileOnlyConfigurationName;
+
     /**
      * The platform configuration updates the PlatformImports, but since the PlatformImports don't
      * have a place to be stored in the project, they're stored here. The way that extensions are
@@ -136,10 +139,12 @@ public ApplicationDeploymentClasspathBuilder(Project project, LaunchMode mode) {
         this.platformConfigurationName = ToolingUtils.toPlatformConfigurationName(this.runtimeConfigurationName);
         this.deploymentConfigurationName = ToolingUtils.toDeploymentConfigurationName(this.runtimeConfigurationName);
         this.platformImportName = project.getPath() + ":" + this.platformConfigurationName;
+        this.compileOnlyConfigurationName = "quarkus" + getLaunchModeAlias(mode) + "CompileOnlyConfiguration";
 
         setUpPlatformConfiguration();
         setUpRuntimeConfiguration();
         setUpDeploymentConfiguration();
+        setUpCompileOnlyConfiguration();
     }
 
     private void setUpPlatformConfiguration() {
@@ -217,11 +222,11 @@ private void setUpDeploymentConfiguration() {
                 configuration.getDependencies().addAllLater(dependencyListProperty.value(project.provider(() -> {
                     ConditionalDependenciesEnabler cdEnabler = new ConditionalDependenciesEnabler(project, mode,
                             enforcedPlatforms);
-                    final Collection allExtensions = cdEnabler.getAllExtensions();
-                    Set extensions = collectFirstMetQuarkusExtensions(getRawRuntimeConfiguration(),
+                    final Collection> allExtensions = cdEnabler.getAllExtensions();
+                    Set> extensions = collectFirstMetQuarkusExtensions(getRawRuntimeConfiguration(),
                             allExtensions);
                     // Add conditional extensions
-                    for (ExtensionDependency knownExtension : allExtensions) {
+                    for (ExtensionDependency knownExtension : allExtensions) {
                         if (knownExtension.isConditional()) {
                             extensions.add(knownExtension);
                         }
@@ -230,23 +235,13 @@ private void setUpDeploymentConfiguration() {
                     final Set alreadyProcessed = new HashSet<>(extensions.size());
                     final DependencyHandler dependencies = project.getDependencies();
                     final Set deploymentDependencies = new HashSet<>();
-                    for (ExtensionDependency extension : extensions) {
-                        if (extension instanceof IncludedBuildExtensionDependency) {
-                            deploymentDependencies.add(((IncludedBuildExtensionDependency) extension).getDeployment());
-                        } else if (extension instanceof LocalExtensionDependency) {
-                            LocalExtensionDependency localExtensionDependency = (LocalExtensionDependency) extension;
-                            deploymentDependencies.add(
-                                    dependencies.project(Collections.singletonMap("path",
-                                            localExtensionDependency.findDeploymentModulePath())));
-                        } else {
-                            if (!alreadyProcessed.add(extension.getExtensionId())) {
-                                continue;
-                            }
-                            deploymentDependencies.add(dependencies.create(
-                                    extension.getDeploymentModule().getGroupId() + ":"
-                                            + extension.getDeploymentModule().getArtifactId() + ":"
-                                            + extension.getDeploymentModule().getVersion()));
+                    for (ExtensionDependency extension : extensions) {
+                        if (!alreadyProcessed.add(extension.getExtensionId())) {
+                            continue;
                         }
+
+                        deploymentDependencies.add(
+                                DependencyUtils.createDeploymentDependency(dependencies, extension));
                     }
                     return deploymentDependencies;
                 })));
@@ -254,6 +249,16 @@ private void setUpDeploymentConfiguration() {
         }
     }
 
+    private void setUpCompileOnlyConfiguration() {
+        if (!project.getConfigurations().getNames().contains(compileOnlyConfigurationName)) {
+            project.getConfigurations().register(compileOnlyConfigurationName, config -> {
+                config.extendsFrom(project.getConfigurations().getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME));
+                config.shouldResolveConsistentlyWith(getDeploymentConfiguration());
+                config.setCanBeConsumed(false);
+            });
+        }
+    }
+
     public Configuration getPlatformConfiguration() {
         return project.getConfigurations().getByName(this.platformConfigurationName);
     }
@@ -274,6 +279,14 @@ public Configuration getDeploymentConfiguration() {
         return project.getConfigurations().getByName(this.deploymentConfigurationName);
     }
 
+    /**
+     * Compile-only configuration which is consistent with the deployment one
+     */
+    public Configuration getCompileOnly() {
+        this.getDeploymentConfiguration().resolve();
+        return project.getConfigurations().getByName(compileOnlyConfigurationName);
+    }
+
     /**
      * Forces the platform configuration to resolve and then uses that to populate platform imports.
      */
@@ -282,10 +295,10 @@ public PlatformImports getPlatformImports() {
         return platformImports.get(this.platformImportName);
     }
 
-    private Set collectFirstMetQuarkusExtensions(Configuration configuration,
-            Collection knownExtensions) {
+    private Set> collectFirstMetQuarkusExtensions(Configuration configuration,
+            Collection> knownExtensions) {
 
-        Set firstLevelExtensions = new HashSet<>();
+        Set> firstLevelExtensions = new HashSet<>();
         Set firstLevelModuleDependencies = configuration.getResolvedConfiguration()
                 .getFirstLevelModuleDependencies();
 
@@ -297,15 +310,15 @@ private Set collectFirstMetQuarkusExtensions(Configuration
         return firstLevelExtensions;
     }
 
-    private Set collectQuarkusExtensions(ResolvedDependency dependency, Set visitedArtifacts,
-            Collection knownExtensions) {
+    private Set> collectQuarkusExtensions(ResolvedDependency dependency, Set visitedArtifacts,
+            Collection> knownExtensions) {
         String artifactKey = String.format("%s:%s", dependency.getModuleGroup(), dependency.getModuleName());
         if (!visitedArtifacts.add(artifactKey)) {
             return Collections.emptySet();
         }
 
-        Set extensions = new LinkedHashSet<>();
-        ExtensionDependency extension = getExtensionOrNull(dependency.getModuleGroup(), dependency.getModuleName(),
+        Set> extensions = new LinkedHashSet<>();
+        ExtensionDependency extension = getExtensionOrNull(dependency.getModuleGroup(), dependency.getModuleName(),
                 dependency.getModuleVersion(), knownExtensions);
         if (extension != null) {
             extensions.add(extension);
@@ -317,9 +330,9 @@ private Set collectQuarkusExtensions(ResolvedDependency dep
         return extensions;
     }
 
-    private ExtensionDependency getExtensionOrNull(String group, String artifact, String version,
-            Collection knownExtensions) {
-        for (ExtensionDependency knownExtension : knownExtensions) {
+    private ExtensionDependency getExtensionOrNull(String group, String artifact, String version,
+            Collection> knownExtensions) {
+        for (ExtensionDependency knownExtension : knownExtensions) {
             if (group.equals(knownExtension.getGroup()) && artifact.equals(knownExtension.getName())
                     && version.equals(knownExtension.getVersion())) {
                 return knownExtension;
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ConditionalDependenciesEnabler.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ConditionalDependenciesEnabler.java
index 6ab5650d7b5a9..4d00b1055fcdd 100644
--- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ConditionalDependenciesEnabler.java
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ConditionalDependenciesEnabler.java
@@ -26,12 +26,12 @@ public class ConditionalDependenciesEnabler {
     /**
      * Links dependencies to extensions
      */
-    private final Map> featureVariants = new HashMap<>();
+    private final Map>> featureVariants = new HashMap<>();
     /**
      * Despite its name, only contains extensions which have no conditional dependencies, or have
      * resolved their conditional dependencies.
      */
-    private final Map allExtensions = new HashMap<>();
+    private final Map> allExtensions = new HashMap<>();
     private final Project project;
     private final Configuration enforcedPlatforms;
     private final Set existingArtifacts = new HashSet<>();
@@ -74,10 +74,9 @@ public ConditionalDependenciesEnabler(Project project, LaunchMode mode,
             }
             reset();
         }
-
     }
 
-    public Collection getAllExtensions() {
+    public Collection> getAllExtensions() {
         return allExtensions.values();
     }
 
@@ -92,7 +91,7 @@ private void collectConditionalDependencies(Set runtimeArtifac
         for (ResolvedArtifact artifact : runtimeArtifacts) {
             // Add to master list of artifacts:
             existingArtifacts.add(getKey(artifact));
-            ExtensionDependency extension = DependencyUtils.getExtensionInfoOrNull(project, artifact);
+            ExtensionDependency extension = DependencyUtils.getExtensionInfoOrNull(project, artifact);
             // If this artifact represents an extension:
             if (extension != null) {
                 // Add to master list of accepted extensions:
@@ -103,6 +102,12 @@ private void collectConditionalDependencies(Set runtimeArtifac
                         queueConditionalDependency(extension, conditionalDep);
                     }
                 }
+
+                // If the extension doesn't have any conditions we just enable it by default
+                if (extension.getDependencyConditions().isEmpty()) {
+                    extension.setConditional(true);
+                    enableConditionalDependency(extension.getExtensionId());
+                }
             }
         }
     }
@@ -121,7 +126,7 @@ private boolean resolveConditionalDependency(Dependency conditionalDep) {
                     && conditionalDep.getVersion().equals(artifact.getModuleVersion().getId().getVersion())
                     && artifact.getModuleVersion().getId().getGroup().equals(conditionalDep.getGroup())) {
                 // Once the dependency is found, reload the extension info from within
-                final ExtensionDependency extensionDependency = DependencyUtils.getExtensionInfoOrNull(project, artifact);
+                final ExtensionDependency extensionDependency = DependencyUtils.getExtensionInfoOrNull(project, artifact);
                 // Now check if this conditional dependency is resolved given the latest graph evolution
                 if (extensionDependency != null && (extensionDependency.getDependencyConditions().isEmpty()
                         || exist(extensionDependency.getDependencyConditions()))) {
@@ -141,7 +146,7 @@ private boolean resolveConditionalDependency(Dependency conditionalDep) {
         for (ResolvedArtifact artifact : resolvedArtifacts) {
             // First add the artifact to the master list
             existingArtifacts.add(getKey(artifact));
-            ExtensionDependency extensionDependency = DependencyUtils.getExtensionInfoOrNull(project, artifact);
+            ExtensionDependency extensionDependency = DependencyUtils.getExtensionInfoOrNull(project, artifact);
             if (extensionDependency == null) {
                 continue;
             }
@@ -159,7 +164,7 @@ private boolean resolveConditionalDependency(Dependency conditionalDep) {
         return satisfied;
     }
 
-    private void queueConditionalDependency(ExtensionDependency extension, Dependency conditionalDep) {
+    private void queueConditionalDependency(ExtensionDependency extension, Dependency conditionalDep) {
         // 1. Add to master list of unresolved/unsatisfied dependencies
         // 2. Add map entry to link dependency to extension
         featureVariants.computeIfAbsent(getFeatureKey(conditionalDep), k -> {
@@ -177,7 +182,7 @@ private Configuration createConditionalDependenciesConfiguration(Project project
     }
 
     private void enableConditionalDependency(ModuleVersionIdentifier dependency) {
-        final Set extensions = featureVariants.remove(getFeatureKey(dependency));
+        final Set> extensions = featureVariants.remove(getFeatureKey(dependency));
         if (extensions == null) {
             return;
         }
@@ -193,7 +198,7 @@ private boolean exists(Dependency dependency) {
                 .contains(ArtifactKey.of(dependency.getGroup(), dependency.getName(), null, ArtifactCoords.TYPE_JAR));
     }
 
-    public boolean exists(ExtensionDependency dependency) {
+    public boolean exists(ExtensionDependency dependency) {
         return existingArtifacts
                 .contains(ArtifactKey.of(dependency.getGroup(), dependency.getName(), null, ArtifactCoords.TYPE_JAR));
     }
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/extension/ConfigurationUtils.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/extension/ConfigurationUtils.java
new file mode 100644
index 0000000000000..5645a94bee2ee
--- /dev/null
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/extension/ConfigurationUtils.java
@@ -0,0 +1,47 @@
+package io.quarkus.gradle.extension;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+import org.gradle.api.GradleException;
+import org.gradle.api.provider.ListProperty;
+import org.gradle.api.provider.Property;
+import org.jetbrains.annotations.NotNull;
+
+// This is necessary because in included builds the returned `Project` instance and
+// `QuarkusExtensionConfiguration` extension is provided by a different class loader
+// which prevents us from creating some interface or casting to it directly.
+public class ConfigurationUtils {
+    private static Object callGetter(@NotNull Object extensionConfiguration, String getterName) {
+        final Method getterMethod;
+
+        try {
+            getterMethod = extensionConfiguration.getClass().getMethod(getterName);
+        } catch (NoSuchMethodException e) {
+            throw new GradleException(
+                    "Didn't find method " + getterName + " on class " + extensionConfiguration.getClass().getName(), e);
+        }
+
+        try {
+            return getterMethod.invoke(extensionConfiguration);
+        } catch (IllegalAccessException | InvocationTargetException e) {
+            throw new GradleException(
+                    "Failed to call method " + getterName + " on class " + extensionConfiguration.getClass().getName(), e);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    public static Property getDeploymentModule(@NotNull Object extensionConfiguration) {
+        return (Property) callGetter(extensionConfiguration, "getDeploymentModule");
+    }
+
+    @SuppressWarnings("unchecked")
+    public static ListProperty getConditionalDependencies(@NotNull Object extensionConfiguration) {
+        return (ListProperty) callGetter(extensionConfiguration, "getConditionalDependencies");
+    }
+
+    @SuppressWarnings("unchecked")
+    public static ListProperty getDependencyConditions(@NotNull Object extensionConfiguration) {
+        return (ListProperty) callGetter(extensionConfiguration, "getDependencyConditions");
+    }
+}
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/extension/ExtensionConstants.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/extension/ExtensionConstants.java
new file mode 100644
index 0000000000000..51d57d92dc2e5
--- /dev/null
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/extension/ExtensionConstants.java
@@ -0,0 +1,5 @@
+package io.quarkus.gradle.extension;
+
+public interface ExtensionConstants {
+    String EXTENSION_CONFIGURATION_NAME = "quarkusExtension";
+}
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/GradleApplicationModelBuilder.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/GradleApplicationModelBuilder.java
index 378a06566f018..f5b0c7451ee81 100644
--- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/GradleApplicationModelBuilder.java
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/GradleApplicationModelBuilder.java
@@ -123,10 +123,42 @@ public Object buildAll(String modelName, ModelParameter parameter, Project proje
         collectDependencies(classpathConfig.getResolvedConfiguration(), workspaceDiscovery,
                 project, modelBuilder, appArtifact.getWorkspaceModule().mutable());
         collectExtensionDependencies(project, deploymentConfig, modelBuilder);
+        addCompileOnly(project, classpathBuilder, modelBuilder);
 
         return modelBuilder.build();
     }
 
+    private static void addCompileOnly(Project project, ApplicationDeploymentClasspathBuilder classpathBuilder,
+            ApplicationModelBuilder modelBuilder) {
+        var compileOnlyConfig = classpathBuilder.getCompileOnly();
+        final List queue = new ArrayList<>(
+                compileOnlyConfig.getResolvedConfiguration().getFirstLevelModuleDependencies());
+        for (int i = 0; i < queue.size(); ++i) {
+            var d = queue.get(i);
+            boolean skip = true;
+            for (var a : d.getModuleArtifacts()) {
+                if (!isDependency(a)) {
+                    continue;
+                }
+                var moduleId = a.getModuleVersion().getId();
+                var key = ArtifactKey.of(moduleId.getGroup(), moduleId.getName(), a.getClassifier(), a.getType());
+                var appDep = modelBuilder.getDependency(key);
+                if (appDep == null) {
+                    addArtifactDependency(project, modelBuilder, a);
+                    appDep = modelBuilder.getDependency(key);
+                    appDep.clearFlag(DependencyFlags.DEPLOYMENT_CP);
+                }
+                if (!appDep.isFlagSet(DependencyFlags.COMPILE_ONLY)) {
+                    skip = false;
+                    appDep.setFlags(DependencyFlags.COMPILE_ONLY);
+                }
+            }
+            if (!skip) {
+                queue.addAll(d.getChildren());
+            }
+        }
+    }
+
     public static ResolvedDependency getProjectArtifact(Project project, boolean workspaceDiscovery) {
         final ResolvedDependencyBuilder appArtifact = ResolvedDependencyBuilder.newInstance()
                 .setGroupId(project.getGroup().toString())
@@ -191,39 +223,44 @@ private void collectExtensionDependencies(Project project, Configuration deploym
             ApplicationModelBuilder modelBuilder) {
         final ResolvedConfiguration rc = deploymentConfiguration.getResolvedConfiguration();
         for (ResolvedArtifact a : rc.getResolvedArtifacts()) {
-            if (a.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier) {
-                ProjectComponentIdentifier projectComponentIdentifier = (ProjectComponentIdentifier) a.getId()
-                        .getComponentIdentifier();
-                var includedBuild = ToolingUtils.includedBuild(project, projectComponentIdentifier);
-                Project projectDep = null;
-                if (includedBuild != null) {
-                    projectDep = ToolingUtils.includedBuildProject((IncludedBuildInternal) includedBuild,
-                            projectComponentIdentifier);
-                } else {
-                    projectDep = project.getRootProject().findProject(projectComponentIdentifier.getProjectPath());
-                }
-                Objects.requireNonNull(projectDep, "project " + projectComponentIdentifier.getProjectPath() + " should exist");
-                SourceSetContainer sourceSets = projectDep.getExtensions().getByType(SourceSetContainer.class);
-
-                SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);
-                ResolvedDependencyBuilder dep = modelBuilder.getDependency(
-                        toAppDependenciesKey(a.getModuleVersion().getId().getGroup(), a.getName(), a.getClassifier()));
-                if (dep == null) {
-                    dep = toDependency(a, mainSourceSet);
-                    modelBuilder.addDependency(dep);
-                }
-                dep.setDeploymentCp();
-                dep.clearFlag(DependencyFlags.RELOADABLE);
-            } else if (isDependency(a)) {
-                ResolvedDependencyBuilder dep = modelBuilder.getDependency(
-                        toAppDependenciesKey(a.getModuleVersion().getId().getGroup(), a.getName(), a.getClassifier()));
-                if (dep == null) {
-                    dep = toDependency(a);
-                    modelBuilder.addDependency(dep);
-                }
-                dep.setDeploymentCp();
-                dep.clearFlag(DependencyFlags.RELOADABLE);
+            addArtifactDependency(project, modelBuilder, a);
+        }
+    }
+
+    private static void addArtifactDependency(Project project, ApplicationModelBuilder modelBuilder, ResolvedArtifact a) {
+        if (a.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier) {
+            ProjectComponentIdentifier projectComponentIdentifier = (ProjectComponentIdentifier) a.getId()
+                    .getComponentIdentifier();
+            var includedBuild = ToolingUtils.includedBuild(project, projectComponentIdentifier.getBuild().getName());
+            final Project projectDep;
+            if (includedBuild != null) {
+                projectDep = ToolingUtils.includedBuildProject((IncludedBuildInternal) includedBuild,
+                        projectComponentIdentifier.getProjectPath());
+            } else {
+                projectDep = project.getRootProject().findProject(projectComponentIdentifier.getProjectPath());
             }
+            Objects.requireNonNull(projectDep,
+                    () -> "project " + projectComponentIdentifier.getProjectPath() + " should exist");
+            SourceSetContainer sourceSets = projectDep.getExtensions().getByType(SourceSetContainer.class);
+
+            SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);
+            ResolvedDependencyBuilder dep = modelBuilder.getDependency(
+                    toAppDependenciesKey(a.getModuleVersion().getId().getGroup(), a.getName(), a.getClassifier()));
+            if (dep == null) {
+                dep = toDependency(a, mainSourceSet);
+                modelBuilder.addDependency(dep);
+            }
+            dep.setDeploymentCp();
+            dep.clearFlag(DependencyFlags.RELOADABLE);
+        } else if (isDependency(a)) {
+            ResolvedDependencyBuilder dep = modelBuilder.getDependency(
+                    toAppDependenciesKey(a.getModuleVersion().getId().getGroup(), a.getName(), a.getClassifier()));
+            if (dep == null) {
+                dep = toDependency(a);
+                modelBuilder.addDependency(dep);
+            }
+            dep.setDeploymentCp();
+            dep.clearFlag(DependencyFlags.RELOADABLE);
         }
     }
 
@@ -291,11 +328,18 @@ private void collectDependencies(org.gradle.api.artifacts.ResolvedDependency res
         for (ResolvedArtifact a : resolvedDep.getModuleArtifacts()) {
             final ArtifactKey artifactKey = toAppDependenciesKey(a.getModuleVersion().getId().getGroup(), a.getName(),
                     a.getClassifier());
-            if (!isDependency(a) || modelBuilder.getDependency(artifactKey) != null) {
+            if (!isDependency(a)) {
+                continue;
+            }
+            var depBuilder = modelBuilder.getDependency(artifactKey);
+            if (depBuilder != null) {
+                if (isFlagOn(flags, COLLECT_DIRECT_DEPS)) {
+                    depBuilder.setDirect(true);
+                }
                 continue;
             }
             final ArtifactCoords depCoords = toArtifactCoords(a);
-            final ResolvedDependencyBuilder depBuilder = ResolvedDependencyBuilder.newInstance()
+            depBuilder = ResolvedDependencyBuilder.newInstance()
                     .setCoords(depCoords)
                     .setRuntimeCp()
                     .setDeploymentCp();
@@ -318,13 +362,13 @@ private void collectDependencies(org.gradle.api.artifacts.ResolvedDependency res
                 final String classifier = a.getClassifier();
                 if (classifier == null || classifier.isEmpty()) {
                     final IncludedBuild includedBuild = ToolingUtils.includedBuild(project.getRootProject(),
-                            (ProjectComponentIdentifier) a.getId().getComponentIdentifier());
+                            ((ProjectComponentIdentifier) a.getId().getComponentIdentifier()).getBuild().getName());
                     if (includedBuild != null) {
                         final PathList.Builder pathBuilder = PathList.builder();
 
                         if (includedBuild instanceof IncludedBuildInternal) {
                             projectDep = ToolingUtils.includedBuildProject((IncludedBuildInternal) includedBuild,
-                                    (ProjectComponentIdentifier) a.getId().getComponentIdentifier());
+                                    ((ProjectComponentIdentifier) a.getId().getComponentIdentifier()).getProjectPath());
                         }
                         if (projectDep != null) {
                             projectModule = initProjectModuleAndBuildPaths(projectDep, a, modelBuilder, depBuilder,
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/ToolingUtils.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/ToolingUtils.java
index 00d4568e739de..d2a945aa69ef1 100644
--- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/ToolingUtils.java
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/ToolingUtils.java
@@ -4,18 +4,23 @@
 import java.io.ObjectOutputStream;
 import java.nio.file.Files;
 import java.nio.file.Path;
+import java.util.Objects;
 
 import org.gradle.api.Project;
 import org.gradle.api.Task;
+import org.gradle.api.artifacts.ExternalModuleDependency;
 import org.gradle.api.artifacts.ModuleDependency;
-import org.gradle.api.artifacts.component.ProjectComponentIdentifier;
 import org.gradle.api.attributes.Category;
 import org.gradle.api.initialization.IncludedBuild;
+import org.gradle.api.invocation.Gradle;
+import org.gradle.composite.internal.DefaultIncludedBuild;
 import org.gradle.internal.composite.IncludedBuildInternal;
+import org.gradle.internal.composite.IncludedRootBuild;
 
 import io.quarkus.bootstrap.model.ApplicationModel;
 import io.quarkus.bootstrap.model.gradle.ModelParameter;
 import io.quarkus.bootstrap.model.gradle.impl.ModelParameterImpl;
+import io.quarkus.maven.dependency.ArtifactCoords;
 import io.quarkus.runtime.LaunchMode;
 
 public class ToolingUtils {
@@ -38,21 +43,100 @@ public static boolean isEnforcedPlatform(ModuleDependency module) {
                 || Category.REGULAR_PLATFORM.equals(category.getName()));
     }
 
-    public static IncludedBuild includedBuild(final Project project,
-            final ProjectComponentIdentifier projectComponentIdentifier) {
-        final String name = projectComponentIdentifier.getBuild().getName();
+    public static IncludedBuild includedBuild(final Project project, final String buildName) {
+        Gradle currentGradle = project.getRootProject().getGradle();
+        while (null != currentGradle) {
+            for (IncludedBuild ib : currentGradle.getIncludedBuilds()) {
+                if (ib instanceof IncludedRootBuild) {
+                    continue;
+                }
+
+                if (ib.getName().equals(buildName)) {
+                    return ib;
+                }
+            }
+
+            currentGradle = currentGradle.getParent();
+        }
+
+        return null;
+    }
+
+    public static Project includedBuildProject(IncludedBuildInternal includedBuild, final String projectPath) {
+        return includedBuild.getTarget().getMutableModel().getRootProject().findProject(projectPath);
+    }
+
+    public static Project findLocalProject(final Project project, final String projectPath) {
+        if (projectPath.startsWith(":")) {
+            return project.getRootProject().findProject(projectPath);
+        } else {
+            Project currentProject = project;
+            while (currentProject != null) {
+                final Project foundProject = currentProject.findProject(projectPath);
+                if (foundProject != null) {
+                    return foundProject;
+                }
+
+                currentProject = currentProject.getParent();
+            }
+
+            return null;
+        }
+    }
+
+    public static Project findLocalProject(final Project project, final ArtifactCoords artifactCoords) {
+        for (Project subproject : project.getRootProject().getSubprojects()) {
+            if (subproject.getGroup().equals(artifactCoords.getGroupId()) &&
+                    subproject.getName().equals(artifactCoords.getArtifactId()) &&
+                    (artifactCoords.getVersion() == null || subproject.getVersion().equals(artifactCoords.getVersion()))) {
+                return subproject;
+            }
+        }
+
+        return null;
+    }
+
+    public static Project findIncludedProject(Project project, ExternalModuleDependency dependency) {
         for (IncludedBuild ib : project.getRootProject().getGradle().getIncludedBuilds()) {
-            if (ib.getName().equals(name)) {
-                return ib;
+            if (ib instanceof IncludedRootBuild) {
+                continue;
             }
+
+            final Project includedBuildProject = findIncludedBuildProject(ib, dependency);
+            if (includedBuildProject != null) {
+                return includedBuildProject;
+            }
+        }
+
+        final Gradle parentGradle = project.getRootProject().getGradle().getParent();
+        if (parentGradle != null) {
+            return findIncludedProject(parentGradle.getRootProject(), dependency);
+        } else {
+            return null;
         }
+    }
+
+    private static Project findLocalProject(Project project, ExternalModuleDependency dependency) {
+        for (Project p : project.getRootProject().getSubprojects()) {
+            if (Objects.equals(p.getGroup(), dependency.getGroup())
+                    && Objects.equals(p.getName(), dependency.getName())
+                    && (dependency.getVersion() == null || Objects.equals(p.getVersion(), dependency.getVersion()))) {
+                return p;
+            }
+        }
+
         return null;
     }
 
-    public static Project includedBuildProject(IncludedBuildInternal includedBuild,
-            final ProjectComponentIdentifier componentIdentifier) {
-        return includedBuild.getTarget().getMutableModel().getRootProject().findProject(
-                componentIdentifier.getProjectPath());
+    private static Project findIncludedBuildProject(IncludedBuild ib, ExternalModuleDependency dependency) {
+        if (!(ib instanceof DefaultIncludedBuild.IncludedBuildImpl)) {
+            return null;
+        }
+
+        final DefaultIncludedBuild.IncludedBuildImpl dib = (DefaultIncludedBuild.IncludedBuildImpl) ib;
+        final Project rootProject = dib.getTarget().getMutableModel().getRootProject();
+
+        return findLocalProject(rootProject, dependency);
     }
 
     public static Path serializeAppModel(ApplicationModel appModel, Task context, boolean test) throws IOException {
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ArtifactExtensionDependency.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ArtifactExtensionDependency.java
new file mode 100644
index 0000000000000..a9c48817bb80f
--- /dev/null
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ArtifactExtensionDependency.java
@@ -0,0 +1,18 @@
+package io.quarkus.gradle.tooling.dependency;
+
+import java.util.List;
+
+import org.gradle.api.artifacts.Dependency;
+import org.gradle.api.artifacts.ModuleVersionIdentifier;
+
+import io.quarkus.maven.dependency.ArtifactCoords;
+import io.quarkus.maven.dependency.ArtifactKey;
+
+public class ArtifactExtensionDependency extends ExtensionDependency {
+    public ArtifactExtensionDependency(ModuleVersionIdentifier extensionId,
+            ArtifactCoords deploymentModule,
+            List conditionalDependencies,
+            List dependencyConditions) {
+        super(extensionId, deploymentModule, conditionalDependencies, dependencyConditions);
+    }
+}
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/DependencyUtils.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/DependencyUtils.java
index a669c6fd7b1ee..15682e835114d 100644
--- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/DependencyUtils.java
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/DependencyUtils.java
@@ -1,8 +1,8 @@
 package io.quarkus.gradle.tooling.dependency;
 
-import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
+import java.io.InputStream;
 import java.nio.file.FileSystem;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -11,6 +11,7 @@
 import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
+import java.util.Set;
 
 import org.gradle.api.GradleException;
 import org.gradle.api.Project;
@@ -22,16 +23,26 @@
 import org.gradle.api.artifacts.component.ProjectComponentIdentifier;
 import org.gradle.api.artifacts.dsl.DependencyHandler;
 import org.gradle.api.capabilities.Capability;
+import org.gradle.api.initialization.IncludedBuild;
+import org.gradle.api.internal.artifacts.DefaultModuleVersionIdentifier;
+import org.gradle.api.internal.artifacts.dependencies.DefaultExternalModuleDependency;
+import org.gradle.api.internal.artifacts.dependencies.DefaultProjectDependency;
+import org.gradle.api.internal.project.ProjectInternal;
+import org.gradle.api.provider.ListProperty;
 import org.gradle.api.tasks.SourceSet;
 import org.gradle.api.tasks.SourceSetContainer;
 import org.gradle.internal.composite.IncludedBuildInternal;
+import org.jetbrains.annotations.Nullable;
 
 import io.quarkus.bootstrap.BootstrapConstants;
 import io.quarkus.bootstrap.util.BootstrapUtils;
 import io.quarkus.fs.util.ZipUtils;
+import io.quarkus.gradle.extension.ConfigurationUtils;
+import io.quarkus.gradle.extension.ExtensionConstants;
 import io.quarkus.gradle.tooling.ToolingUtils;
 import io.quarkus.maven.dependency.ArtifactCoords;
 import io.quarkus.maven.dependency.ArtifactKey;
+import io.quarkus.maven.dependency.GACT;
 import io.quarkus.maven.dependency.GACTV;
 
 public class DependencyUtils {
@@ -75,96 +86,261 @@ public static String asDependencyNotation(ArtifactCoords artifactCoords) {
         return String.join(":", artifactCoords.getGroupId(), artifactCoords.getArtifactId(), artifactCoords.getVersion());
     }
 
-    public static ExtensionDependency getExtensionInfoOrNull(Project project, ResolvedArtifact artifact) {
+    public static ExtensionDependency getExtensionInfoOrNull(Project project, ResolvedArtifact artifact) {
         ModuleVersionIdentifier artifactId = artifact.getModuleVersion().getId();
-        File artifactFile = artifact.getFile();
+
+        ExtensionDependency projectDependency;
 
         if (artifact.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier) {
-            ProjectComponentIdentifier componentIdentifier = ((ProjectComponentIdentifier) artifact.getId()
-                    .getComponentIdentifier());
-            Project projectDep = project.getRootProject().findProject(
-                    componentIdentifier.getProjectPath());
-            SourceSetContainer sourceSets = projectDep == null ? null
-                    : projectDep.getExtensions().findByType(SourceSetContainer.class);
-            final String classifier = artifact.getClassifier();
-            boolean isIncludedBuild = false;
-            if ((!componentIdentifier.getBuild().isCurrentBuild() || sourceSets == null)
-                    && (classifier == null || classifier.isEmpty())) {
-                var includedBuild = ToolingUtils.includedBuild(project, componentIdentifier);
-                if (includedBuild instanceof IncludedBuildInternal) {
-                    projectDep = ToolingUtils.includedBuildProject((IncludedBuildInternal) includedBuild, componentIdentifier);
-                    sourceSets = projectDep == null ? null : projectDep.getExtensions().findByType(SourceSetContainer.class);
-                    isIncludedBuild = true;
-                }
-            }
-            if (sourceSets != null) {
-                SourceSet mainSourceSet = sourceSets.findByName(SourceSet.MAIN_SOURCE_SET_NAME);
-                if (mainSourceSet == null) {
-                    return null;
-                }
-                File resourcesDir = mainSourceSet.getOutput().getResourcesDir();
-                Path descriptorPath = resourcesDir.toPath().resolve(BootstrapConstants.DESCRIPTOR_PATH);
-                if (Files.exists(descriptorPath)) {
-                    return loadExtensionInfo(project, descriptorPath, artifactId, projectDep, isIncludedBuild);
-                }
-            }
+            ProjectComponentIdentifier componentId = (ProjectComponentIdentifier) artifact.getId().getComponentIdentifier();
+
+            projectDependency = getProjectExtensionDependencyOrNull(
+                    project,
+                    componentId.getProjectPath(),
+                    componentId.getBuild().getName());
+
+            if (projectDependency != null)
+                return projectDependency;
+        }
+
+        Project localExtensionProject = ToolingUtils.findLocalProject(
+                project,
+                ArtifactCoords.of(artifactId.getGroup(), artifactId.getName(), null, null, artifactId.getVersion()));
+
+        if (localExtensionProject != null) {
+            projectDependency = getExtensionInfoOrNull(project, localExtensionProject);
+
+            if (projectDependency != null)
+                return projectDependency;
         }
 
+        File artifactFile = artifact.getFile();
         if (!artifactFile.exists()) {
             return null;
         }
+
         if (artifactFile.isDirectory()) {
             Path descriptorPath = artifactFile.toPath().resolve(BootstrapConstants.DESCRIPTOR_PATH);
-            if (Files.exists(descriptorPath)) {
-                return loadExtensionInfo(project, descriptorPath, artifactId, null, false);
+            if (Files.isRegularFile(descriptorPath)) {
+                return createExtensionDependency(project, artifactId, descriptorPath);
             }
         } else if (ArtifactCoords.TYPE_JAR.equals(artifact.getExtension())) {
             try (FileSystem artifactFs = ZipUtils.newFileSystem(artifactFile.toPath())) {
                 Path descriptorPath = artifactFs.getPath(BootstrapConstants.DESCRIPTOR_PATH);
                 if (Files.exists(descriptorPath)) {
-                    return loadExtensionInfo(project, descriptorPath, artifactId, null, false);
+                    return createExtensionDependency(project, artifactId, descriptorPath);
                 }
-            } catch (IOException e) {
-                throw new GradleException("Failed to read " + artifactFile, e);
+            } catch (IOException x) {
+                throw new GradleException("Failed to read " + artifactFile, x);
             }
         }
+
         return null;
     }
 
-    private static ExtensionDependency loadExtensionInfo(Project project, Path descriptorPath,
-            ModuleVersionIdentifier exentionId, Project extensionProject, boolean isIncludedBuild) {
-        final Properties extensionProperties = new Properties();
-        try (BufferedReader reader = Files.newBufferedReader(descriptorPath)) {
-            extensionProperties.load(reader);
-        } catch (IOException e) {
-            throw new GradleException("Failed to load " + descriptorPath, e);
+    public static ExtensionDependency getExtensionInfoOrNull(Project project, Project extensionProject) {
+        boolean isIncludedBuild = !project.getRootProject().getGradle().equals(extensionProject.getRootProject().getGradle());
+
+        ModuleVersionIdentifier extensionArtifactId = DefaultModuleVersionIdentifier.newId(
+                extensionProject.getGroup().toString(),
+                extensionProject.getName(),
+                extensionProject.getVersion().toString());
+
+        Object extensionConfiguration = extensionProject
+                .getExtensions().findByName(ExtensionConstants.EXTENSION_CONFIGURATION_NAME);
+
+        // If there's an extension configuration file in the project resources it can override
+        // certain settings, so we also look for it here.
+        Path descriptorPath = findLocalExtensionDescriptorPath(extensionProject);
+
+        if (extensionConfiguration != null || descriptorPath != null) {
+            return createExtensionDependency(
+                    project,
+                    extensionArtifactId,
+                    extensionProject,
+                    extensionConfiguration,
+                    descriptorPath != null ? loadLocalExtensionDescriptor(descriptorPath) : null,
+                    isIncludedBuild);
+        } else {
+            return null;
         }
-        ArtifactCoords deploymentModule = GACTV
+    }
+
+    private static Path findLocalExtensionDescriptorPath(Project extensionProject) {
+        SourceSetContainer sourceSets = extensionProject.getExtensions().getByType(SourceSetContainer.class);
+        SourceSet mainSourceSet = sourceSets.findByName(SourceSet.MAIN_SOURCE_SET_NAME);
+        if (mainSourceSet == null) {
+            return null;
+        }
+
+        Set resourcesSourceDirs = mainSourceSet.getResources().getSrcDirs();
+        for (File resourceSourceDir : resourcesSourceDirs) {
+            Path descriptorPath = resourceSourceDir.toPath().resolve(BootstrapConstants.DESCRIPTOR_PATH);
+            if (Files.isRegularFile(descriptorPath)) {
+                return descriptorPath;
+            }
+        }
+
+        return null;
+    }
+
+    private static Properties loadLocalExtensionDescriptor(Path descriptorPath) {
+        Properties descriptor = new Properties();
+        try (InputStream inputStream = Files.newInputStream(descriptorPath)) {
+            descriptor.load(inputStream);
+        } catch (IOException x) {
+            throw new GradleException("Failed to load extension descriptor at " + descriptorPath, x);
+        }
+
+        return descriptor;
+    }
+
+    @Nullable
+    public static ExtensionDependency getProjectExtensionDependencyOrNull(
+            Project project,
+            String projectPath,
+            @Nullable String buildName) {
+        Project extensionProject = project.getRootProject().findProject(projectPath);
+        if (extensionProject == null) {
+            IncludedBuild extProjIncludedBuild = ToolingUtils.includedBuild(project, buildName);
+            if (extProjIncludedBuild instanceof IncludedBuildInternal) {
+                extensionProject = ToolingUtils
+                        .includedBuildProject((IncludedBuildInternal) extProjIncludedBuild, projectPath);
+            }
+        }
+
+        if (extensionProject != null) {
+            return getExtensionInfoOrNull(project, extensionProject);
+        }
+
+        return null;
+    }
+
+    private static ProjectExtensionDependency createExtensionDependency(
+            Project project,
+            ModuleVersionIdentifier extensionArtifactId,
+            Project extensionProject,
+            @Nullable Object extensionConfiguration,
+            @Nullable Properties extensionDescriptor,
+            boolean isIncludedBuild) {
+        if (extensionConfiguration == null && extensionDescriptor == null) {
+            throw new IllegalArgumentException("both extensionConfiguration and extensionDescriptor are null");
+        }
+
+        Project deploymentProject = null;
+
+        if (extensionConfiguration != null) {
+            final String deploymentProjectPath = ConfigurationUtils.getDeploymentModule(extensionConfiguration).get();
+            deploymentProject = ToolingUtils.findLocalProject(extensionProject, deploymentProjectPath);
+
+            if (deploymentProject == null) {
+                throw new GradleException("Cannot find deployment project for extension " + extensionArtifactId + " at path "
+                        + deploymentProjectPath);
+            }
+        } else if (extensionDescriptor.containsKey(BootstrapConstants.PROP_DEPLOYMENT_ARTIFACT)) {
+            final ArtifactCoords deploymentArtifact = GACTV
+                    .fromString(extensionDescriptor.getProperty(BootstrapConstants.PROP_DEPLOYMENT_ARTIFACT));
+
+            deploymentProject = ToolingUtils.findLocalProject(project, deploymentArtifact);
+
+            if (deploymentProject == null) {
+                throw new GradleException("Cannot find deployment project for extension " + extensionArtifactId
+                        + " with artifact coordinates " + deploymentArtifact);
+            }
+        }
+
+        final List conditionalDependencies = new ArrayList<>();
+        final List dependencyConditions = new ArrayList<>();
+
+        if (extensionConfiguration != null) {
+            final ListProperty conditionalDependenciesProp = ConfigurationUtils
+                    .getConditionalDependencies(extensionConfiguration);
+
+            if (conditionalDependenciesProp.isPresent()) {
+                for (String rawDep : conditionalDependenciesProp.get()) {
+                    conditionalDependencies.add(create(project.getDependencies(), rawDep));
+                }
+            }
+
+            final ListProperty dependencyConditionsProp = ConfigurationUtils
+                    .getDependencyConditions(extensionConfiguration);
+
+            if (dependencyConditionsProp.isPresent()) {
+                for (String rawCond : dependencyConditionsProp.get()) {
+                    dependencyConditions.add(GACT.fromString(rawCond));
+                }
+            }
+        }
+
+        if (extensionDescriptor != null && extensionDescriptor.containsKey(BootstrapConstants.CONDITIONAL_DEPENDENCIES)) {
+            final String[] deps = BootstrapUtils
+                    .splitByWhitespace(extensionDescriptor.getProperty(BootstrapConstants.CONDITIONAL_DEPENDENCIES));
+
+            for (String condDep : deps) {
+                conditionalDependencies.add(create(project.getDependencies(), condDep));
+            }
+        }
+
+        if (extensionDescriptor != null && extensionDescriptor.containsKey(BootstrapConstants.DEPENDENCY_CONDITION)) {
+            final ArtifactKey[] conditions = BootstrapUtils
+                    .parseDependencyCondition(extensionDescriptor.getProperty(BootstrapConstants.DEPENDENCY_CONDITION));
+
+            dependencyConditions.addAll(Arrays.asList(conditions));
+        }
+
+        return new ProjectExtensionDependency(
+                extensionProject,
+                deploymentProject,
+                isIncludedBuild,
+                conditionalDependencies,
+                dependencyConditions);
+    }
+
+    private static ArtifactExtensionDependency createExtensionDependency(
+            Project project,
+            ModuleVersionIdentifier extensionArtifactId,
+            Path descriptorPath) {
+        final Properties extensionProperties = loadLocalExtensionDescriptor(descriptorPath);
+
+        final ArtifactCoords deploymentArtifact = GACTV
                 .fromString(extensionProperties.getProperty(BootstrapConstants.PROP_DEPLOYMENT_ARTIFACT));
+
         final List conditionalDependencies;
         if (extensionProperties.containsKey(BootstrapConstants.CONDITIONAL_DEPENDENCIES)) {
             final String[] deps = BootstrapUtils
                     .splitByWhitespace(extensionProperties.getProperty(BootstrapConstants.CONDITIONAL_DEPENDENCIES));
-            conditionalDependencies = new ArrayList<>(deps.length);
-            for (String conditionalDep : deps) {
-                conditionalDependencies.add(create(project.getDependencies(), conditionalDep));
+
+            if (deps.length > 0) {
+                conditionalDependencies = new ArrayList<>(deps.length);
+                for (String condDep : deps) {
+                    conditionalDependencies.add(create(project.getDependencies(), condDep));
+                }
+            } else {
+                conditionalDependencies = Collections.emptyList();
             }
         } else {
             conditionalDependencies = Collections.emptyList();
         }
 
-        final ArtifactKey[] constraints = BootstrapUtils
-                .parseDependencyCondition(extensionProperties.getProperty(BootstrapConstants.DEPENDENCY_CONDITION));
-        if (isIncludedBuild) {
-            return new IncludedBuildExtensionDependency(extensionProject, exentionId, deploymentModule, conditionalDependencies,
-                    constraints == null ? Collections.emptyList() : Arrays.asList(constraints));
-        }
-        if (extensionProject != null) {
-            return new LocalExtensionDependency(extensionProject, exentionId, deploymentModule, conditionalDependencies,
-                    constraints == null ? Collections.emptyList() : Arrays.asList(constraints));
+        final List dependencyConditions;
+        if (extensionProperties.containsKey(BootstrapConstants.DEPENDENCY_CONDITION)) {
+            final ArtifactKey[] conditions = BootstrapUtils
+                    .parseDependencyCondition(extensionProperties.getProperty(BootstrapConstants.DEPENDENCY_CONDITION));
+
+            if (conditions.length > 0) {
+                dependencyConditions = Arrays.asList(conditions);
+            } else {
+                dependencyConditions = Collections.emptyList();
+            }
+        } else {
+            dependencyConditions = Collections.emptyList();
         }
-        return new ExtensionDependency(exentionId, deploymentModule, conditionalDependencies,
-                constraints == null ? Collections.emptyList() : Arrays.asList(constraints));
+
+        return new ArtifactExtensionDependency(
+                extensionArtifactId,
+                deploymentArtifact,
+                conditionalDependencies,
+                dependencyConditions);
     }
 
     public static Dependency create(DependencyHandler dependencies, String conditionalDependency) {
@@ -173,16 +349,37 @@ public static Dependency create(DependencyHandler dependencies, String condition
                 dependencyCoords.getVersion()));
     }
 
-    public static void addLocalDeploymentDependency(String deploymentConfigurationName, LocalExtensionDependency extension,
-            DependencyHandler dependencies) {
-        dependencies.add(deploymentConfigurationName,
-                dependencies.project(Collections.singletonMap("path", extension.findDeploymentModulePath())));
+    public static Dependency createDeploymentDependency(
+            DependencyHandler dependencyHandler,
+            ExtensionDependency dependency) {
+        if (dependency instanceof ProjectExtensionDependency) {
+            ProjectExtensionDependency ped = (ProjectExtensionDependency) dependency;
+            return createDeploymentProjectDependency(dependencyHandler, ped);
+        } else if (dependency instanceof ArtifactExtensionDependency) {
+            ArtifactExtensionDependency aed = (ArtifactExtensionDependency) dependency;
+            return createArtifactDeploymentDependency(dependencyHandler, aed);
+        }
+
+        throw new IllegalArgumentException("Unknown ExtensionDependency type: " + dependency.getClass().getName());
+    }
+
+    private static Dependency createDeploymentProjectDependency(DependencyHandler handler, ProjectExtensionDependency ped) {
+        if (ped.isIncludedBuild()) {
+            return new DefaultExternalModuleDependency(
+                    ped.getDeploymentModule().getGroup().toString(),
+                    ped.getDeploymentModule().getName(),
+                    ped.getDeploymentModule().getVersion().toString());
+        } else if (ped.getDeploymentModule() instanceof ProjectInternal) {
+            return handler.create(new DefaultProjectDependency((ProjectInternal) ped.getDeploymentModule(), true));
+        } else {
+            return handler.create(handler.project(Collections.singletonMap("path", ped.getDeploymentModule().getPath())));
+        }
     }
 
-    public static void requireDeploymentDependency(String deploymentConfigurationName, ExtensionDependency extension,
-            DependencyHandler dependencies) {
-        dependencies.add(deploymentConfigurationName,
-                extension.getDeploymentModule().getGroupId() + ":" + extension.getDeploymentModule().getArtifactId() + ":"
-                        + extension.getDeploymentModule().getVersion());
+    private static Dependency createArtifactDeploymentDependency(DependencyHandler handler,
+            ArtifactExtensionDependency dependency) {
+        return handler.create(dependency.getDeploymentModule().getGroupId() + ":"
+                + dependency.getDeploymentModule().getArtifactId() + ":"
+                + dependency.getDeploymentModule().getVersion());
     }
 }
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ExtensionDependency.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ExtensionDependency.java
index 1a44212f9c0b6..c10143b695ce7 100644
--- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ExtensionDependency.java
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ExtensionDependency.java
@@ -8,18 +8,17 @@
 import org.gradle.api.artifacts.ModuleVersionIdentifier;
 import org.gradle.api.artifacts.dsl.DependencyHandler;
 
-import io.quarkus.maven.dependency.ArtifactCoords;
 import io.quarkus.maven.dependency.ArtifactKey;
 
-public class ExtensionDependency {
+public abstract class ExtensionDependency {
 
     private final ModuleVersionIdentifier extensionId;
-    protected final ArtifactCoords deploymentModule;
+    private final T deploymentModule;
     private final List conditionalDependencies;
     private final List dependencyConditions;
     private boolean isConditional;
 
-    public ExtensionDependency(ModuleVersionIdentifier extensionId, ArtifactCoords deploymentModule,
+    public ExtensionDependency(ModuleVersionIdentifier extensionId, T deploymentModule,
             List conditionalDependencies,
             List dependencyConditions) {
         this.extensionId = extensionId;
@@ -41,10 +40,6 @@ public void importConditionalDependency(DependencyHandler dependencies, ModuleVe
                         .withDependencies(d -> d.add(DependencyUtils.asDependencyNotation(dependency))))));
     }
 
-    public String asDependencyNotation() {
-        return String.join(":", this.extensionId.getGroup(), this.extensionId.getName(), this.extensionId.getVersion());
-    }
-
     private Dependency findConditionalDependency(ModuleVersionIdentifier capability) {
         for (Dependency conditionalDependency : conditionalDependencies) {
             if (conditionalDependency.getGroup().equals(capability.getGroup())
@@ -83,7 +78,7 @@ public List getConditionalDependencies() {
         return conditionalDependencies;
     }
 
-    public ArtifactCoords getDeploymentModule() {
+    public T getDeploymentModule() {
         return deploymentModule;
     }
 
@@ -101,7 +96,7 @@ public boolean equals(Object o) {
             return true;
         if (o == null || getClass() != o.getClass())
             return false;
-        ExtensionDependency that = (ExtensionDependency) o;
+        ExtensionDependency that = (ExtensionDependency) o;
         return Objects.equals(extensionId, that.extensionId)
                 && Objects.equals(conditionalDependencies, that.conditionalDependencies)
                 && Objects.equals(dependencyConditions, that.dependencyConditions);
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/IncludedBuildExtensionDependency.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/IncludedBuildExtensionDependency.java
deleted file mode 100644
index fd8d676827a1e..0000000000000
--- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/IncludedBuildExtensionDependency.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package io.quarkus.gradle.tooling.dependency;
-
-import java.util.List;
-
-import org.gradle.api.Project;
-import org.gradle.api.artifacts.Dependency;
-import org.gradle.api.artifacts.ModuleVersionIdentifier;
-import org.gradle.api.internal.artifacts.dependencies.DefaultExternalModuleDependency;
-
-import io.quarkus.maven.dependency.ArtifactCoords;
-import io.quarkus.maven.dependency.ArtifactKey;
-
-public class IncludedBuildExtensionDependency extends LocalExtensionDependency {
-    public IncludedBuildExtensionDependency(Project localProject, ModuleVersionIdentifier extensionId,
-            ArtifactCoords deploymentModule,
-            List conditionalDependencies, List dependencyConditions) {
-        super(localProject, extensionId, deploymentModule, conditionalDependencies, dependencyConditions);
-    }
-
-    public Dependency getDeployment() {
-        return new DefaultExternalModuleDependency(deploymentModule.getGroupId(), deploymentModule.getArtifactId(),
-                deploymentModule.getVersion());
-    }
-}
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/LocalExtensionDependency.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/LocalExtensionDependency.java
deleted file mode 100644
index 4172c17c14676..0000000000000
--- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/LocalExtensionDependency.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package io.quarkus.gradle.tooling.dependency;
-
-import java.util.List;
-
-import org.gradle.api.Project;
-import org.gradle.api.artifacts.Dependency;
-import org.gradle.api.artifacts.ModuleVersionIdentifier;
-
-import io.quarkus.maven.dependency.ArtifactCoords;
-import io.quarkus.maven.dependency.ArtifactKey;
-
-public class LocalExtensionDependency extends ExtensionDependency {
-
-    private static final String DEFAULT_DEPLOYMENT_PATH_SUFFIX = "deployment";
-
-    private Project localProject;
-
-    public LocalExtensionDependency(Project localProject, ModuleVersionIdentifier extensionId,
-            ArtifactCoords deploymentModule,
-            List conditionalDependencies, List dependencyConditions) {
-        super(extensionId, deploymentModule, conditionalDependencies, dependencyConditions);
-        this.localProject = localProject;
-    }
-
-    public String findDeploymentModulePath() {
-
-        String deploymentModuleName = DEFAULT_DEPLOYMENT_PATH_SUFFIX;
-        if (localProject.getParent().findProject(deploymentModule.getArtifactId()) != null) {
-            deploymentModuleName = deploymentModule.getArtifactId();
-        }
-
-        String parentPath = localProject.getParent().getPath();
-        if (parentPath.endsWith(":")) {
-            return parentPath + deploymentModuleName;
-        }
-
-        return parentPath + ":" + deploymentModuleName;
-    }
-}
diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ProjectExtensionDependency.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ProjectExtensionDependency.java
new file mode 100644
index 0000000000000..77ef259b116a0
--- /dev/null
+++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/dependency/ProjectExtensionDependency.java
@@ -0,0 +1,34 @@
+package io.quarkus.gradle.tooling.dependency;
+
+import java.util.List;
+
+import org.gradle.api.Project;
+import org.gradle.api.artifacts.Dependency;
+import org.gradle.api.internal.artifacts.DefaultModuleVersionIdentifier;
+
+import io.quarkus.maven.dependency.ArtifactKey;
+
+public class ProjectExtensionDependency extends ExtensionDependency {
+    private final Boolean isIncludedBuild;
+
+    public ProjectExtensionDependency(
+            Project extensionProject,
+            Project deploymentModule,
+            Boolean isIncludedBuild,
+            List conditionalDependencies,
+            List dependencyConditions) {
+        super(DefaultModuleVersionIdentifier.newId(
+                extensionProject.getGroup().toString(),
+                extensionProject.getName(),
+                extensionProject.getVersion().toString()),
+                deploymentModule,
+                conditionalDependencies,
+                dependencyConditions);
+
+        this.isIncludedBuild = isIncludedBuild;
+    }
+
+    public Boolean isIncludedBuild() {
+        return isIncludedBuild;
+    }
+}
diff --git a/devtools/gradle/gradle/libs.versions.toml b/devtools/gradle/gradle/libs.versions.toml
index 34743ae4dcaa7..18a40560f0872 100644
--- a/devtools/gradle/gradle/libs.versions.toml
+++ b/devtools/gradle/gradle/libs.versions.toml
@@ -3,7 +3,7 @@ plugin-publish = "1.2.1"
 
 # updating Kotlin here makes QuarkusPluginTest > shouldNotFailOnProjectDependenciesWithoutMain(Path) fail
 kotlin = "1.9.22"
-smallrye-config = "3.4.4"
+smallrye-config = "3.5.2"
 
 junit5 = "5.10.1"
 assertj = "3.25.1"
diff --git a/docs/src/main/asciidoc/getting-started.adoc b/docs/src/main/asciidoc/getting-started.adoc
index 357fdafa2aeb5..83b5d1f3955d3 100644
--- a/docs/src/main/asciidoc/getting-started.adoc
+++ b/docs/src/main/asciidoc/getting-started.adoc
@@ -481,6 +481,8 @@ but users can also choose to expose one that might present a security risk under
 
 If the application contains the `quarkus-info` extension, then Quarkus will by default expose the `/q/info` endpoint which provides information about the build, java version, version control, and operating system. The level of detail of the exposed information is configurable.
 
+All CDI beans implementing the `InfoContributor` will be picked up and their data will be append to the endpoint.
+
 ==== Configuration Reference
 
 include::{generated-dir}/config/quarkus-info.adoc[opts=optional, leveloffset=+2]
diff --git a/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc b/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc
index 0113fdfbf6488..d99e2420de01c 100644
--- a/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc
+++ b/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc
@@ -1018,6 +1018,112 @@ You can enable AWS request signing in Hibernate Search by adding a dedicated ext
 See link:{hibernate-search-orm-elasticsearch-aws-guide}#aws-configuration-reference[the documentation for the Hibernate Search ORM + Elasticsearch AWS extension]
 for more information.
 
+[[management]]
+== Management endpoint
+
+[CAUTION]
+====
+Hibernate Search's management endpoint is considered preview.
+
+In _preview_, backward compatibility and presence in the ecosystem is not guaranteed.
+Specific improvements might require changing configuration or APIs, or even storage formats,
+and plans to become _stable_ are under way.
+Feedback is welcome on our https://groups.google.com/d/forum/quarkus-dev[mailing list]
+or as issues in our https://github.com/quarkusio/quarkus/issues[GitHub issue tracker].
+====
+
+The Hibernate Search extension provides an HTTP endpoint to reindex your data through the xref:./management-interface-reference.adoc[management interface].
+By default, this endpoint is not available. It can be enabled through configuration properties as shown below.
+
+[source,properties]
+----
+quarkus.management.enabled=true <1>
+quarkus.hibernate-search-orm.management.enabled=true <2>
+----
+<1> Enable the xref:./management-interface-reference.adoc[management interface].
+<2> Enable Hibernate Search specific management endpoints.
+
+Once the management is enabled, data can be re-indexed via `/q/hibernate-search/reindex`, where `/q` is the default management root path
+and `/hibernate-search` is the default Hibernate Search root management path.
+It (`/hibernate-search`) can be changed via configuration property as shown below.
+
+[source,properties]
+----
+quarkus.hibernate-search-orm.management.root-path=custom-root-path <1>
+----
+<1> Use a custom `custom-root-path` path for Hibernate Search's management endpoint.
+If the default management root path is used then the reindex path becomes `/q/custom-root-path/reindex`.
+
+This endpoint accepts `POST` requests with `application/json` content type only.
+All indexed entities will be re-indexed if an empty request body is submitted.
+If only a subset of entities must be re-indexed or
+if there is a need to have a custom configuration of the underlying mass indexer
+then this information can be passed through the request body as shown below.
+
+[source,json]
+----
+{
+  "filter": {
+    "types": ["EntityName1", "EntityName2", "EntityName3", ...], <1>
+  },
+  "massIndexer":{
+    "typesToIndexInParallel": 1, <2>
+  }
+}
+----
+<1> An array of entity names that should be re-indexed. If unspecified or empty, all entity types will be re-indexed.
+<2> Sets the number of entity types to be indexed in parallel.
+
+The full list of possible filters and available mass indexer configurations is presented in the example below.
+
+[source,json]
+----
+{
+  "filter": { <1>
+    "types": ["EntityName1", "EntityName2", "EntityName3", ...], <2>
+    "tenants": ["tenant1", "tenant2", ...] <3>
+  },
+  "massIndexer":{ <4>
+    "typesToIndexInParallel": 1, <5>
+    "threadsToLoadObjects": 6,  <6>
+    "batchSizeToLoadObjects": 10, <7>
+    "cacheMode": "IGNORE", <8>
+    "mergeSegmentsOnFinish": false, <9>
+    "mergeSegmentsAfterPurge": true, <10>
+    "dropAndCreateSchemaOnStart": false, <11>
+    "purgeAllOnStart": true, <12>
+    "idFetchSize": 100, <13>
+    "transactionTimeout": 100000, <14>
+  }
+}
+----
+<1> Filter object that allows to limit the scope of reindexing.
+<2> An array of entity names that should be re-indexed. If unspecified or empty, all entity types will be re-indexed.
+<3> An array of tenant ids, in case of multi-tenancy. If unspecified or empty, all tenants will be re-indexed.
+<4> Mass indexer configuration object.
+<5> Sets the number of entity types to be indexed in parallel.
+<6> Sets the number of threads to be used to load the root entities.
+<7> Sets the batch size used to load the root entities.
+<8> Sets the cache interaction mode for the data loading tasks.
+<9> Whether each index is merged into a single segment after indexing.
+<10> Whether each index is merged into a single segment after the initial index purge, just before indexing.
+<11> Whether the indexes and their schema (if they exist) should be dropped and re-created before indexing.
+<12> Whether all entities are removed from the indexes before indexing.
+<13> Specifies the fetch size to be used when loading primary keys if objects to be indexed.
+<14> Specifies the timeout of transactions for loading ids and entities to be re-indexed.
++
+Note all the properties in the json are optional, and only those that are needed should be used.
+
+For more detailed information on mass indexer configuration see the
+link:{hibernate-search-docs-url}#indexing-massindexer-parameters[corresponding section of the Hibernate Search reference documentation].
+
+Submitting the reindexing request will trigger indexing in the background. Mass indexing progress will appear in the application logs.
+For testing purposes, it might be useful to know when the indexing finished. Adding `wait_for=finished` query parameter to the URL
+will result in the management endpoint returning a chunked response that will report when the indexing starts and then when it is finished.
+
+When working with multiple persistence units, the name of the persistence unit to reindex can be supplied through the
+`persistence_unit` query parameter: `/q/hibernate-search/reindex?persistence_unit=non-default-persistence-unit`.
+
 == Further reading
 
 If you are interested in learning more about Hibernate Search 6,
diff --git a/docs/src/main/asciidoc/mongodb-panache.adoc b/docs/src/main/asciidoc/mongodb-panache.adoc
index 1c2140715c483..e7fc011781d0e 100644
--- a/docs/src/main/asciidoc/mongodb-panache.adoc
+++ b/docs/src/main/asciidoc/mongodb-panache.adoc
@@ -770,6 +770,8 @@ MongoDB offers ACID transactions since version 4.0.
 
 To use them with MongoDB with Panache you need to annotate the method that starts the transaction with the `@Transactional` annotation.
 
+Inside methods annotated with `@Transactional` you can access the `ClientSession` with `Panache.getClientSession()` if needed.
+
 In MongoDB, a transaction is only possible on a replicaset,
 luckily our xref:mongodb.adoc#dev-services[Dev Services for MongoDB] setups a single node replicaset so it is compatible with transactions.
 
diff --git a/docs/src/main/asciidoc/security-keycloak-authorization.adoc b/docs/src/main/asciidoc/security-keycloak-authorization.adoc
index aa2c96b858f8b..f4a3eabe6fdd5 100644
--- a/docs/src/main/asciidoc/security-keycloak-authorization.adoc
+++ b/docs/src/main/asciidoc/security-keycloak-authorization.adoc
@@ -3,29 +3,37 @@ This guide is maintained in the main Quarkus repository
 and pull requests should be submitted there:
 https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc
 ////
-= Using OpenID Connect (OIDC) and Keycloak to Centralize Authorization
+= Using OpenID Connect (OIDC) and Keycloak to centralize authorization
 include::_attributes.adoc[]
+:diataxis-type: howto
 :categories: security
 :keywords: sso oidc security keycloak
-:summary: This guide demonstrates how your Quarkus application can authorize access to protected resources using Keycloak Authorization Services.
 :topics: security,authentication,authorization,keycloak,sso,oidc
 :extensions: io.quarkus:quarkus-oidc,io.quarkus:quarkus-keycloak-authorization
 
-This guide demonstrates how your Quarkus application can authorize a bearer token access to protected resources using https://www.keycloak.org/docs/latest/authorization_services/index.html[Keycloak Authorization Services].
+Learn how to enable bearer token authorization in your Quarkus application using link:https://www.keycloak.org/docs/latest/authorization_services/index.html[Keycloak Authorization Services] for secure access to protected resources.
 
-The `quarkus-keycloak-authorization` extension is based on `quarkus-oidc` and provides a policy enforcer that enforces access to protected resources based on permissions managed by Keycloak and currently can only be used with the Quarkus xref:security-oidc-bearer-token-authentication.adoc[OIDC service applications].
+The `quarkus-keycloak-authorization` extension relies on `quarkus-oidc`.
+It includes a policy enforcer that regulates access to secured resources.
+Access is governed by permissions set in Keycloak.
+Currently, this extension is compatible solely with Quarkus xref:security-oidc-bearer-token-authentication.adoc[OIDC service applications].
 
 It provides a flexible and dynamic authorization capability based on Resource-Based Access Control.
 
-Instead of explicitly enforcing access based on some specific access control mechanism such as Role-Based Access Control(RBAC), `quarkus-keycloak-authorization` checks whether a request is allowed to access a resource based on its name, identifier or URI by sending a bearer access token verified by `quarkus-oidc` to Keycloak Authorization Services where an authorization decision is made.
+Rather than explicitly enforcing access through specific mechanisms such as role-based access control (RBAC), `quarkus-keycloak-authorization` determines request permissions based on resource attributes such as name, identifier, or Uniform Resource Identifier (URI).
+This process involves sending a `quarkus-oidc`-verified bearer access token to Keycloak Authorization Services for an authorization decision.
 
-Use `quarkus-keycloak-authorization` only if you work with Keycloak and have Keycloak Authorization Services enabled to make authorization decisions. Use `quarkus-oidc` if you do not work with Keycloak or work with Keycloak but do not have its Keycloak Authorization Services enabled to make authorization decisions.
+Use `quarkus-keycloak-authorization` only if you work with Keycloak and have Keycloak Authorization Services enabled to make authorization decisions.
+Use `quarkus-oidc` if you do not work with Keycloak or work with Keycloak but do not have its Keycloak Authorization Services enabled to make authorization decisions.
 
-By externalizing authorization from your application, you are allowed to protect your applications using different access control mechanisms as well as avoid re-deploying your application every time your security requirements change, where Keycloak will be acting as a centralized authorization service from where your protected resources and their associated permissions are managed.
+By shifting authorization responsibilities outside your application, you enhance security through various access control methods while eliminating the need for frequent re-deployments whenever security needs evolve.
+In this case, Keycloak acts as a centralized authorization hub, managing your protected resources and their corresponding permissions effectively.
 
-See the xref:security-oidc-bearer-token-authentication.adoc[OIDC Bearer token authentication] guide for more information about `Bearer Token` authentication mechanism. It is important to realize that it is the `Bearer Token` authentication mechanism which does the authentication and creates a security identity - while the `quarkus-keycloak-authorization` extension is responsible for applying a Keycloak Authorization Policy to this identity based on the current request path and other policy settings.
+For more information, see the xref:security-oidc-bearer-token-authentication.adoc[OIDC Bearer token authentication] guide.
+It is important to realize that the Bearer token authentication mechanism does the authentication and creates a security identity.
+Meanwhile, the `quarkus-keycloak-authorization` extension applies a Keycloak Authorization Policy to this identity based on the current request path and other policy settings.
 
-Please see https://www.keycloak.org/docs/latest/authorization_services/index.html#_enforcer_overview[Keycloak Authorization Services documentation] for more information.
+For more information, see https://www.keycloak.org/docs/latest/authorization_services/index.html#_enforcer_overview[Keycloak Authorization Services documentation].
 
 == Prerequisites
 
@@ -36,25 +44,28 @@ include::{includes}/prerequisites.adoc[]
 
 == Architecture
 
-In this example, we build a very simple microservice which offers two endpoints:
+In this example, we build a very simple microservice that offers two endpoints:
 
 * `/api/users/me`
 * `/api/admin`
 
-These endpoints are protected and can only be accessed if a client is sending a bearer token along with the request, which must be valid (e.g.: signature, expiration and audience) and trusted by the microservice.
+These endpoints are protected.
+Access is granted only when a client sends a bearer token with the request.
+This token must be valid, having a correct signature, expiration date, and audience.
+Additionally, the microservice must trust the token.
 
-The bearer token is issued by a Keycloak Server and represents the subject to which the token was issued for.
+The bearer token is issued by a Keycloak server and represents the subject for which the token was issued.
 For being an OAuth 2.0 Authorization Server, the token also references the client acting on behalf of the user.
 
 The `/api/users/me` endpoint can be accessed by any user with a valid token.
-As a response, it returns a JSON document with details about the user where these details are obtained from the information carried on the token.
-This endpoint is protected with RBAC (Role-Based Access Control) and only users granted with the `user` role can access this endpoint.
+As a response, it returns a JSON document with details about the user obtained from the information carried on the token.
+This endpoint is protected with RBAC, and only users granted with the `user` role can access this endpoint.
 
-The `/api/admin` endpoint is protected with RBAC (Role-Based Access Control) and only users granted with the `admin` role can access it.
+The `/api/admin` endpoint is protected with RBAC, and only users granted the `admin` role can access it.
 
-This is a very simple example using RBAC policies to govern access to your resources.
-However, Keycloak supports other types of policies that you can use to perform even more fine-grained access control.
-By using this example, you'll see that your application is completely decoupled from your authorization policies with enforcement being purely based on the accessed resource.
+This is a very simple example of using RBAC policies to govern access to your resources.
+However, Keycloak supports other policies that you can use to perform even more fine-grained access control.
+By using this example, you'll see that your application is completely decoupled from your authorization policies, with enforcement purely based on the accessed resource.
 
 == Solution
 
@@ -63,9 +74,9 @@ However, you can go right to the completed example.
 
 Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive].
 
-The solution is located in the `security-keycloak-authorization-quickstart` link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart[directory].
+The solution is in the `security-keycloak-authorization-quickstart` link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart[directory].
 
-== Creating the Project
+== Creating the project
 
 First, we need a new project.
 Create a new project with the following command:
@@ -74,7 +85,8 @@ Create a new project with the following command:
 :create-app-extensions: oidc,keycloak-authorization,resteasy-reactive-jackson
 include::{includes}/devtools/create-app.adoc[]
 
-This command generates a project, importing the `keycloak-authorization` extension which is an implementation of a Keycloak Adapter for Quarkus applications and provides all the necessary capabilities to integrate with a Keycloak Server and perform bearer token authorization.
+This command generates a project, importing the `keycloak-authorization` extension.
+This extension implements a Keycloak Adapter for Quarkus applications and provides all the necessary capabilities to integrate with a Keycloak server and perform bearer token authorization.
 
 If you already have your Quarkus project configured, you can add the `oidc` and `keycloak-authorization` extensions
 to your project by running the following command in your project base directory:
@@ -82,7 +94,7 @@ to your project by running the following command in your project base directory:
 :add-extension-extensions: oidc,keycloak-authorization
 include::{includes}/devtools/extension-add.adoc[]
 
-This will add the following to your build file:
+This adds the following dependencies to your build file:
 
 [source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"]
 .pom.xml
@@ -105,7 +117,7 @@ implementation("io.quarkus:quarkus-keycloak-authorization")
 ----
 
 Let's start by implementing the `/api/users/me` endpoint.
-As you can see from the source code below it is just a regular Jakarta REST resource:
+As you can see in the following source code, it is a regular Jakarta REST resource:
 
 [source,java]
 ----
@@ -172,12 +184,12 @@ public class AdminResource {
 }
 ----
 
-Note that we did not define any annotation such as `@RolesAllowed` to explicitly enforce access to a resource.
-The extension will be responsible to map the URIs of the protected resources you have in Keycloak and evaluate the permissions accordingly, granting or denying access depending on the permissions that will be granted by Keycloak.
+Be aware that we have not defined annotations such as `@RolesAllowed` to explicitly enforce access to a resource.
+Instead, the extension is responsible for mapping the URIs of the protected resources in Keycloak and evaluating the permissions accordingly, granting or denying access depending on the permissions granted by Keycloak.
 
 === Configuring the application
 
-The OpenID Connect extension allows you to define the adapter configuration using the `application.properties` file which should be located at the `src/main/resources` directory.
+The OpenID Connect extension allows you to define the adapter configuration by using the `application.properties` file, which is usually located in the `src/main/resources` directory.
 
 [source,properties]
 ----
@@ -191,31 +203,35 @@ quarkus.oidc.tls.verification=none
 quarkus.keycloak.policy-enforcer.enable=true
 
 # Tell Dev Services for Keycloak to import the realm file
-# This property is not effective when running the application in JVM or Native modes
+# This property is not effective when running the application in JVM or native modes
 quarkus.keycloak.devservices.realm-path=quarkus-realm.json
 ----
 
-NOTE: Adding a `%prod.` profile prefix to `quarkus.oidc.auth-server-url` ensures that `Dev Services for Keycloak` will launch a container for you when the application is run in a dev mode. See <> section below for more information.
+NOTE: Adding a `%prod.` profile prefix to `quarkus.oidc.auth-server-url` ensures that Dev Services for Keycloak launches a container for you when the application is run in dev mode.
+For more information, see the <> section.
 
-NOTE: By default, applications using the `quarkus-oidc` extension are marked as a `service` type application (see `quarkus.oidc.application-type`). This extension also supports only  `web-app` type applications but only if the access token returned as part of the authorization code grant response is marked as a source of roles: `quarkus.oidc.roles.source=accesstoken` (`web-app` type applications check ID token roles by default).
+NOTE: By default, applications that use the `quarkus-oidc` extension are marked as a `service` type application (see `quarkus.oidc.application-type`).
+This extension also supports only  `web-app` type applications but only if the access token returned as part of the authorization code grant response is marked as a source of roles: `quarkus.oidc.roles.source=accesstoken` (`web-app` type applications check ID token roles by default).
 
-== Starting and Configuring the Keycloak Server
+== Starting and configuring the Keycloak server
 
-NOTE: Do not start the Keycloak server when you run the application in a dev mode - `Dev Services for Keycloak` will launch a container. See <> section below for more information.
+NOTE: Do not start the Keycloak server when you run the application in dev mode.
+Dev Services for Keycloak launches a container.
+For more information, see the <> section.
 
-To start a Keycloak Server you can use Docker and just run the following command:
+To start a Keycloak server, use the following Docker command:
 
 [source,bash,subs=attributes+]
 ----
 docker run --name keycloak -e KEYCLOAK_ADMIN=admin -e KEYCLOAK_ADMIN_PASSWORD=admin -p 8543:8443 -v "$(pwd)"/config/keycloak-keystore.jks:/etc/keycloak-keystore.jks quay.io/keycloak/keycloak:{keycloak.version} start  --hostname-strict=false --https-key-store-file=/etc/keycloak-keystore.jks
 ----
 
-where `keycloak.version` should be set to `23.0.0` or higher and the `keycloak-keystore.jks` can be found in https://github.com/quarkusio/quarkus-quickstarts/blob/main/security-keycloak-authorization-quickstart/config/keycloak-keystore.jks[quarkus-quickstarts/security-keycloak-authorization-quickstart/config]
+where `keycloak.version` must be `23.0.0` or later and the `keycloak-keystore.jks` can be found in https://github.com/quarkusio/quarkus-quickstarts/blob/main/security-keycloak-authorization-quickstart/config/keycloak-keystore.jks[quarkus-quickstarts/security-keycloak-authorization-quickstart/config].
 
-You should be able to access your Keycloak Server at https://localhost:8543[localhost:8543].
+Try to access your Keycloak server at https://localhost:8543[localhost:8543].
 
-Log in as the `admin` user to access the Keycloak Administration Console.
-Username should be `admin` and password `admin`.
+To access the Keycloak Administration Console, log in as the `admin` user.
+The username and password are both `admin`.
 
 Import the link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart/config/quarkus-realm.json[realm configuration file] to create a new realm.
 For more details, see the Keycloak documentation about how to https://www.keycloak.org/docs/latest/server_admin/index.html#_create-realm[create a new realm].
@@ -227,46 +243,49 @@ image::keycloak-authorization-permissions.png[alt=Keycloak Authorization Permiss
 It explains why the endpoint has no `@RolesAllowed` annotations - the resource access permissions are set directly in Keycloak.
 
 [[keycloak-dev-mode]]
-== Running the Application in Dev mode
+== Running the application in dev mode
 
 To run the application in dev mode, use:
 
 include::{includes}/devtools/dev.adoc[]
 
-xref:security-openid-connect-dev-services.adoc[Dev Services for Keycloak] will launch a Keycloak container and import the link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart/config/quarkus-realm.json[realm configuration file].
+xref:security-openid-connect-dev-services.adoc[Dev Services for Keycloak] launches a Keycloak container and imports a `quarkus-realm.json`.
 
-Open a xref:dev-ui.adoc[Dev UI] available at http://localhost:8080/q/dev-ui[/q/dev-ui] and click on a `Provider: Keycloak` link in an `OpenID Connect` `Dev UI` card.
+Open a xref:dev-ui.adoc[Dev UI] available at http://localhost:8080/q/dev-ui[/q/dev-ui] and click a `Provider: Keycloak` link in an `OpenID Connect` `Dev UI` card.
 
-You will be asked to log in into a `Single Page Application` provided by `OpenID Connect Dev UI`:
+When asked to log in to a `Single Page Application` provided by `OpenID Connect Dev UI`:
 
- * Login as `alice` (password: `alice`) who only has a `User Permission` to access the `/api/users/me` resource
- ** accessing `/api/admin` will return `403`
- ** accessing `/api/users/me` will return `200`
- * Logout and login as `admin` (password: `admin`) who has both `Admin Permission` to access the `/api/admin` resource and `User Permission` to access the `/api/users/me` resource
- ** accessing `/api/admin` will return `200`
- ** accessing `/api/users/me` will return `200`
+ * Log in as `alice` (password: `alice`), who only has a `User Permission` to access the `/api/users/me` resource:
+ ** Access `/api/admin`, which returns `403`.
+ ** Access `/api/users/me`, which returns `200`.
+ * Log out and log in as `admin` (password: `admin`), who has both `Admin Permission` to access the `/api/admin` resource and `User Permission` to access the `/api/users/me` resource:
+ ** Access `/api/admin`, which returns `200`.
+ ** Access `/api/users/me`, which returns `200`.
 
-If you have started xref:security-openid-connect-dev-services.adoc[Dev Services for Keycloak] without importing a realm file such as link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart/config/quarkus-realm.json[quarkus-realm.json] which is already configured to support Keycloak Authorization then a default `quarkus` realm without Keycloak authorization policies will be created. In this case you must select the `Keycloak Admin` link in the `OpenId Connect` Dev UI card and configure link:https://www.keycloak.org/docs/latest/authorization_services/index.html[Keycloak Authorization] in the default `quarkus` realm.
+If you have started xref:security-openid-connect-dev-services.adoc[Dev Services for Keycloak] without importing a realm file such as link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart/config/quarkus-realm.json[quarkus-realm.json] that is already configured to support Keycloak Authorization, create a default `quarkus` realm without Keycloak authorization policies.
+In this case, you must select the `Keycloak Admin` link in the `OpenId Connect` Dev UI card and configure link:https://www.keycloak.org/docs/latest/authorization_services/index.html[Keycloak Authorization Services] in the default `quarkus` realm.
 
 The `Keycloak Admin` link is easy to find in Dev UI:
 
 image::dev-ui-oidc-keycloak-card.png[alt=Dev UI OpenID Connect Card,role="center"]
 
-When logging in the Keycloak admin console, the username is `admin` and the password is `admin`.
+When logging into the Keycloak admin console, the username and password are both `admin`.
 
-If your application configures Keycloak authorization with link:https://www.keycloak.org/docs/latest/authorization_services/index.html#_policy_js[JavaScript policies] that are deployed to Keycloak in a jar file then you can configure `Dev Services for Keycloak` to copy this jar to the Keycloak container, for example:
+If your application uses Keycloak authorization configured with link:https://www.keycloak.org/docs/latest/authorization_services/index.html#_policy_js[JavaScript policies] that are deployed in a JAR file, you can set up Dev Services for Keycloak to transfer this archive to the Keycloak container.
+For instance:
 
 [source,properties]
 ----
 quarkus.keycloak.devservices.resource-aliases.policies=/policies.jar <1>
 quarkus.keycloak.devservices.resource-mappings.policies=/opt/keycloak/providers/policies.jar <2>
 ----
-<1> `policies` alias is created for the `/policies.jar` classpath resource. Policy jars can also be located in the file system.
-<2> The policies jar is mapped to the `/opt/keycloak/providers/policies.jar` container location.
+<1> `policies` alias is created for the `/policies.jar` classpath resource.
+Policy archive can also be located in the file system.
+<2> The policies archive is mapped to the `/opt/keycloak/providers/policies.jar` container location.
 
-== Running the Application in JVM mode
+== Running the application in JVM mode
 
-When you're done playing with the `dev` mode" you can run it as a standard Java application.
+After exploring the application in dev mode, you can run it as a standard Java application.
 
 First compile it:
 
@@ -279,17 +298,17 @@ Then run it:
 java -jar target/quarkus-app/quarkus-run.jar
 ----
 
-== Running the Application in Native Mode
+== Running the application in native mode
 
-This same demo can be compiled into native code: no modifications required.
+This same demo can be compiled into native code; no modifications are required.
 
-This implies that you no longer need to install a JVM on your production environment, as the runtime technology is included in the produced binary, and optimized to run with minimal resource overhead.
+This implies that you no longer need to install a JVM on your production environment because the runtime technology is included in the produced binary and optimized to run with minimal resources.
 
-Compilation will take a bit longer, so this step is disabled by default; let's build again by enabling the `native` profile:
+Compilation takes a bit longer, so this step is turned off by default; let's build again by enabling the `native` profile:
 
 include::{includes}/devtools/build-native.adoc[]
 
-After getting a cup of coffee, you'll be able to run this binary directly:
+After a while, you can run this binary directly:
 
 [source,bash]
 ----
@@ -297,13 +316,13 @@ After getting a cup of coffee, you'll be able to run this binary directly:
 ----
 
 [[testing]]
-== Testing the Application
+== Testing the application
 
-See <> section above about testing your application in a dev mode.
+See the preceding <> section about testing your application in a dev mode.
 
-You can test the application launched in JVM or Native modes with `curl`.
+You can test the application launched in JVM or native modes with `curl`.
 
-The application is using bearer token authorization and the first thing to do is obtain an access token from the Keycloak Server in order to access the application resources:
+The application uses bearer token authorization, and the first thing to do is obtain an access token from the Keycloak server to access the application resources:
 
 [source,bash]
 ----
@@ -315,11 +334,11 @@ export access_token=$(\
  )
 ----
 
-The example above obtains an access token for user `alice`.
+The preceding example obtains an access token for user `alice`.
 
 Any user is allowed to access the
-`http://localhost:8080/api/users/me` endpoint
-which basically returns a JSON payload with details about the user.
+`http://localhost:8080/api/users/me` endpoint,
+which returns a JSON payload with details about the user.
 
 [source,bash]
 ----
@@ -329,7 +348,7 @@ curl -v -X GET \
 ----
 
 The `http://localhost:8080/api/admin` endpoint can only be accessed by users with the `admin` role.
-If you try to access this endpoint with the previously issued access token, you should get a `403` response from the server.
+If you try to access this endpoint with the previously issued access token, you get a `403` response from the server.
 
 [source,bash]
 ----
@@ -338,7 +357,7 @@ If you try to access this endpoint with the previously issued access token, you
    -H "Authorization: Bearer "$access_token
 ----
 
-In order to access the admin endpoint you should obtain a token for the `admin` user:
+To access the admin endpoint, get a token for the `admin` user:
 
 [source,bash]
 ----
@@ -350,11 +369,10 @@ export access_token=$(\
  )
 ----
 
-== Injecting the Authorization Client
+== Injecting the authorization client
 
-In some cases, you may want to use the https://www.keycloak.org/docs/latest/authorization_services/#_service_client_api[Keycloak Authorization Client Java API] to perform
-specific operations like managing resources and obtaining permissions directly from Keycloak. For that, you can inject a
-`AuthzClient` instance into your beans as follows:
+In some cases, using the link:https://www.keycloak.org/docs/latest/authorization_services/#_service_client_api[Keycloak Authorization Client Java API] is beneficial for tasks such as managing resources and obtaining permissions directly from Keycloak.
+For this purpose, you can inject an `AuthzClient` instance into your beans as follows:
 
 [source,java]
 ----
@@ -364,32 +382,34 @@ public class ProtectedResource {
 }
 ----
 
-Note: If you want to use the `AuthzClient` directly make sure to set `quarkus.keycloak.policy-enforcer.enable=true` otherwise there is no Bean available for injection.
+NOTE: If you want to use the `AuthzClient` directly, set `quarkus.keycloak.policy-enforcer.enable=true`; otherwise, no bean is available for injection.
 
-== Mapping Protected Resources
+== Mapping protected resources
 
-By default, the extension is going to fetch resources on-demand from Keycloak where their `URI` are used to map the resources in your application that should be protected.
+By default, the extension fetches resources on-demand from Keycloak, using their URI to identify and map the resources in your application that need to be protected.
 
-If you want to disable this behavior and fetch resources during startup, you can use the following configuration:
+To disable this on-demand fetching and instead pre-load resources at startup, apply the following configuration setting:
 
 [source,properties]
 ----
 quarkus.keycloak.policy-enforcer.lazy-load-paths=false
 ----
 
-Note that, depending on how many resources you have in Keycloak the time taken to fetch them may impact your application startup time.
+The time required to pre-load resources from Keycloak at startup varies based on their quantity, potentially affecting your application's initial load time."
 
-== More About Configuring Protected Resources
+== More about configuring protected resources
 
-In the default configuration, Keycloak is responsible for managing the roles and deciding who can access which routes.
+In the default configuration, Keycloak manages the roles and decides who can access which routes.
 
-To configure the protected routes using the `@RolesAllowed` annotation or the `application.properties` file, check the xref:security-oidc-bearer-token-authentication.adoc[Using OpenID Connect Adapter to Protect Jakarta REST Applications] and xref:security-authorize-web-endpoints-reference.adoc[Security Authorization] guides. For more details, check the xref:security-overview.adoc[Security guide].
+To configure the protected routes by using the `@RolesAllowed` annotation or the `application.properties` file, check the xref:security-oidc-bearer-token-authentication.adoc[OpenID Connect (OIDC) Bearer token authentication] and xref:security-authorize-web-endpoints-reference.adoc[Authorization of web endpoints] guides.
+For more details, check the xref:security-overview.adoc[Quarkus Security overview].
 
-== Access to Public Resources
+== Access to public resources
 
-If you'd like to access a public resource without `quarkus-keycloak-authorization` trying to apply its policies to it then you need to create a `permit` HTTP Policy configuration in `application.properties` as documented in the xref:security-authorize-web-endpoints-reference.adoc[Security Authorization] guide.
+To enable access to a public resource without the `quarkus-keycloak-authorization` applying its policies, create a `permit` HTTP Policy configuration in `application.properties`.
+For more information, see the xref:security-authorize-web-endpoints-reference.adoc[Authorization of web endpoints] guide.
 
-Disabling a policy check using a Keycloak Authorization Policy such as:
+There's no need to deactivate policy checks for a Keycloak Authorization Policy with settings such as these:
 
 [source,properties]
 ----
@@ -397,9 +417,7 @@ quarkus.keycloak.policy-enforcer.paths.1.path=/api/public
 quarkus.keycloak.policy-enforcer.paths.1.enforcement-mode=DISABLED
 ----
 
-is no longer required.
-
-If you'd like to block access to the public resource to anonymous users then you can create an enforcing Keycloak Authorization Policy:
+To block access to the public resource to anonymous users, you can create an enforcing Keycloak Authorization Policy:
 
 [source,properties]
 ----
@@ -407,12 +425,14 @@ quarkus.keycloak.policy-enforcer.paths.1.path=/api/public-enforcing
 quarkus.keycloak.policy-enforcer.paths.1.enforcement-mode=ENFORCING
 ----
 
-Note only the default tenant configuration applies when controlling anonymous access to the public resource is required.
+Only the default tenant configuration applies when controlling anonymous access to the public resource is required.
 
-== Checking Permission Scopes Programmatically
+== Checking permission scopes programmatically
 
-In addition to resource permissions, you may want to specify method scopes. The scope usually represents an action that
-can be performed on a resource. You can create an enforcing Keycloak Authorization Policy with method scope like this:
+In addition to resource permissions, you can specify method scopes.
+The scope usually represents an action that can be performed on a resource.
+You can create an enforcing Keycloak Authorization Policy with a method scope.
+For example:
 
 [source,properties]
 ----
@@ -430,11 +450,11 @@ quarkus.keycloak.policy-enforcer.paths.3.path=/api/protected/annotation-way
 ----
 <1> User must have resource permission 'Scope Permission Resource' and scope 'read'
 
-Request path `/api/protected/standard-way` is now secured by the Keycloak Policy Enforcer and does not require
-any additions (such as `@RolesAllowed` annotation). In some cases, you may want to perform the same check programmatically.
-You are allowed to do that by injecting a `SecurityIdentity` instance in your beans, as demonstrated in the example below.
-Alternatively, if you annotate resource method with the `@PermissionsAllowed` annotation, you can achieve the same effect.
-The following example shows three resource method that all requires same 'read' scope:
+The Keycloak Policy Enforcer now secures the `/api/protected/standard-way` request path, eliminating the need for additional annotations such as `@RolesAllowed`.
+However, in certain scenarios, a programmatic check is necessary.
+You can achieve this by injecting a `SecurityIdentity` instance into your beans, as shown in the following example.
+Or, you can get the same result by annotating the resource method with `@PermissionsAllowed`.
+The following example demonstrates three resource methods, each requiring the same `read` scope:
 
 [source,java]
 ----
@@ -490,14 +510,14 @@ public class ProtectedResource {
     }
 }
 ----
-<1> Request sub-path `/standard-way` requires both resource permission and scope `read` according to the configuration properties we set in the `application.properties` before.
+<1> Request sub-path `/standard-way` requires both resource permission and scope `read` according to the configuration properties we previously set in the `application.properties`.
 <2> Request sub-path `/programmatic-way` only requires permission `Scope Permission Resource`, but we can enforce scope with `SecurityIdentity#checkPermission`.
 <3> The `@PermissionsAllowed` annotation only grants access to the requests with permission `Scope Permission Resource` and scope `read`.
 For more information, see the section xref:security-authorize-web-endpoints-reference.adoc#standard-security-annotations[Authorization using annotations] of the Security Authorization guide.
 
-== Multi-Tenancy
+== Multi-tenancy
 
-It is possible to configure multiple policy enforcer configurations, one per each tenant, similarly to how it can be done for xref:security-openid-connect-multitenancy.adoc[Multi-Tenant OpenID Connect Service Applications].
+You can set up policy enforcer configurations for each tenant, similar to how it is done with xref:security-openid-connect-multitenancy.adoc[OpenID Connect (OIDC) multi-tenancy].
 
 For example:
 
@@ -541,16 +561,17 @@ quarkus.keycloak.webapp-tenant.policy-enforcer.paths.1.path=/api/permission
 quarkus.keycloak.webapp-tenant.policy-enforcer.paths.1.claim-information-point.claims.static-claim=static-claim
 ----
 
-== Configuration Reference
+== Configuration reference
 
-The configuration is based on the official https://www.keycloak.org/docs/latest/authorization_services/index.html#_enforcer_filter[Keycloak Policy Enforcer Configuration]. If you are looking for more details about the different configuration options, please take a look at this documentation,
+This configuration adheres to the official [Keycloak Policy Enforcer Configuration](https://www.keycloak.org/docs/latest/authorization_services/index.html#_enforcer_filter) guidelines.
+For detailed insights into various configuration options, see the following documentation:
 
 include::{generated-dir}/config/quarkus-keycloak-keycloak-policy-enforcer-config.adoc[opts=optional]
 
 == References
 
-* https://www.keycloak.org/documentation.html[Keycloak Documentation]
-* https://www.keycloak.org/docs/latest/authorization_services/index.html[Keycloak Authorization Services Documentation]
+* https://www.keycloak.org/documentation.html[Keycloak documentation]
+* https://www.keycloak.org/docs/latest/authorization_services/index.html[Keycloak Authorization Services]
 * https://openid.net/connect/[OpenID Connect]
 * https://tools.ietf.org/html/rfc7519[JSON Web Token]
 * xref:security-overview.adoc[Quarkus Security overview]
diff --git a/docs/src/main/asciidoc/security-oidc-code-flow-authentication-tutorial.adoc b/docs/src/main/asciidoc/security-oidc-code-flow-authentication-tutorial.adoc
index 157370f05a139..c266583b568bd 100644
--- a/docs/src/main/asciidoc/security-oidc-code-flow-authentication-tutorial.adoc
+++ b/docs/src/main/asciidoc/security-oidc-code-flow-authentication-tutorial.adoc
@@ -11,11 +11,11 @@ include::_attributes.adoc[]
 :topics: security,oidc,keycloak,authorization
 :extensions: io.quarkus:quarkus-oidc
 
-With the Quarkus OpenID Connect (OIDC) extension, you can protect application HTTP endpoints by using the OIDC Authorization Code Flow mechanism.
+Discover how to secure application HTTP endpoints by using the Quarkus OpenID Connect (OIDC) authorization code flow mechanism with the Quarkus OIDC extension, providing robust authentication and authorization.
 
-To learn more about the OIDC authorization code flow mechanism, see xref:security-oidc-code-flow-authentication.adoc[OIDC code flow mechanism for protecting web applications].
+For more information, see xref:security-oidc-code-flow-authentication.adoc[OIDC code flow mechanism for protecting web applications].
 
-To learn about how well-known social providers such as Apple, Facebook, GitHub, Google, Mastodon, Microsoft, Twitch, Twitter (X), and Spotify can be used with Quarkus OIDC, see xref:security-openid-connect-providers.adoc[Configuring Well-Known OpenID Connect Providers].
+To learn about how well-known social providers such as Apple, Facebook, GitHub, Google, Mastodon, Microsoft, Twitch, Twitter (X), and Spotify can be used with Quarkus OIDC, see xref:security-openid-connect-providers.adoc[Configuring well-known OpenID Connect providers].
 See also, xref:security-authentication-mechanisms.adoc#other-supported-authentication-mechanisms[Authentication mechanisms in Quarkus].
 
 If you want to protect your service applications by using OIDC Bearer token authentication, see xref:security-oidc-bearer-token-authentication.adoc[OIDC Bearer token authentication].
@@ -27,18 +27,19 @@ include::{includes}/prerequisites.adoc[]
 
 == Architecture
 
-In this example, we build a very simple web application with a single page:
+In this example, we build a simple web application with a single page:
 
 * `/index.html`
 
-This page is protected and can only be accessed by authenticated users.
+This page is protected, and only authenticated users can access it.
 
 == Solution
 
-We recommend that you follow the instructions in the next sections and create the application step by step.
-However, you can go right to the completed example.
+Follow the instructions in the next sections and create the application step by step.
+Alternatively, you can go right to the completed example.
 
-Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive].
+Clone the Git repository by running the `git clone {quickstarts-clone-url}` command.
+Alternatively, download an {quickstarts-archive-url}[archive].
 
 The solution is located in the `security-openid-connect-web-authentication-quickstart` link:{quickstarts-tree-url}/security-openid-connect-web-authentication-quickstart[directory].
 
@@ -48,7 +49,7 @@ The solution is located in the `security-openid-connect-web-authentication-quick
 == Create the Maven project
 
 First, we need a new project.
-Create a new project with the following command:
+Create a new project by running the following command:
 
 :create-app-artifact-id: security-openid-connect-web-authentication-quickstart
 :create-app-extensions: resteasy-reactive,oidc
@@ -59,7 +60,7 @@ If you already have your Quarkus project configured, you can add the `oidc` exte
 :add-extension-extensions: oidc
 include::{includes}/devtools/extension-add.adoc[]
 
-This will add the following to your build file:
+This adds the following dependency to your build file:
 
 [source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"]
 .pom.xml
@@ -78,7 +79,7 @@ implementation("io.quarkus:quarkus-oidc")
 
 == Write the application
 
-Let's write a simple Jakarta REST resource which has all the tokens returned in the authorization code grant response injected:
+Let's write a simple Jakarta REST resource that has all the tokens returned in the authorization code grant response injected:
 
 [source,java]
 ----
@@ -99,29 +100,30 @@ import io.quarkus.oidc.RefreshToken;
 public class TokenResource {
 
    /**
-    * Injection point for the ID Token issued by the OpenID Connect Provider
+    * Injection point for the ID token issued by the OpenID Connect provider
     */
    @Inject
    @IdToken
    JsonWebToken idToken;
 
    /**
-    * Injection point for the Access Token issued by the OpenID Connect Provider
+    * Injection point for the access token issued by the OpenID Connect provider
     */
    @Inject
    JsonWebToken accessToken;
 
    /**
-    * Injection point for the Refresh Token issued by the OpenID Connect Provider
+    * Injection point for the refresh token issued by the OpenID Connect provider
     */
    @Inject
    RefreshToken refreshToken;
 
    /**
-    * Returns the tokens available to the application. This endpoint exists only for demonstration purposes, you should not
-    * expose these tokens in a real application.
+    * Returns the tokens available to the application.
+    * This endpoint exists only for demonstration purposes.
+    * Do not expose these tokens in a real application.
     *
-    * @return a HTML page containing the tokens available to the application
+    * @return an HTML page containing the tokens available to the application.
     */
    @GET
    @Produces("text/html")
@@ -151,15 +153,15 @@ public class TokenResource {
 ----
 
 This endpoint has ID, access, and refresh tokens injected.
-It returns a `preferred_username` claim from the ID token, a `scope` claim from the access token, and also a refresh token availability status.
+It returns a `preferred_username` claim from the ID token, a `scope` claim from the access token, and a refresh token availability status.
 
-Note that you do not have to inject the tokens - it is only required if the endpoint needs to use the ID token to interact with the currently authenticated user or use the access token to access a downstream service on behalf of this user.
+You only need to inject the tokens if the endpoint needs to use the ID token to interact with the currently authenticated user or use the access token to access a downstream service on behalf of this user.
 
 // SJ: TO DO - update link to point to new reference guide. For more information, see <> section.
 
 == Configure the application
 
-The OIDC extension allows you to define the configuration using the `application.properties` file which should be located at the `src/main/resources` directory.
+The OIDC extension allows you to define the configuration by using the `application.properties` file in the `src/main/resources` directory.
 
 [source,properties]
 ----
@@ -173,12 +175,12 @@ quarkus.http.auth.permission.authenticated.policy=authenticated
 
 This is the simplest configuration you can have when enabling authentication to your application.
 
-The `quarkus.oidc.client-id` property references the `client_id` issued by the OIDC provider and the `quarkus.oidc.credentials.secret` property sets the client secret.
+The `quarkus.oidc.client-id` property references the `client_id` issued by the OIDC provider, and the `quarkus.oidc.credentials.secret` property sets the client secret.
 
-The `quarkus.oidc.application-type` property is set to `web-app` in order to tell Quarkus that you want to enable the OIDC authorization code flow, so that your users are redirected to the OIDC provider to authenticate.
+The `quarkus.oidc.application-type` property is set to `web-app` to tell Quarkus that you want to enable the OIDC authorization code flow so that your users are redirected to the OIDC provider to authenticate.
 
 Finally, the `quarkus.http.auth.permission.authenticated` permission is set to tell Quarkus about the paths you want to protect.
-In this case, all paths are being protected by a policy that ensures that only `authenticated` users are allowed to access.
+In this case, all paths are protected by a policy that ensures only `authenticated` users can access them.
 For more information, see xref:security-authorize-web-endpoints-reference.adoc[Security Authorization Guide].
 
 == Start and configure the Keycloak server
@@ -190,23 +192,23 @@ To start a Keycloak server, use Docker and run the following command:
 docker run --name keycloak -e KEYCLOAK_ADMIN=admin -e KEYCLOAK_ADMIN_PASSWORD=admin -p 8180:8080 quay.io/keycloak/keycloak:{keycloak.version} start-dev
 ----
 
-where `keycloak.version` should be set to `23.0.0` or higher.
+where `keycloak.version` is set to `23.0.0` or later.
 
-You should be able to access your Keycloak Server at http://localhost:8180[localhost:8180].
+You can access your Keycloak Server at http://localhost:8180[localhost:8180].
 
 To access the Keycloak Administration Console, log in as the `admin` user.
-Username should be `admin` and password `admin`.
+The username and password are both `admin`.
 
-Import the link:{quickstarts-tree-url}/security-openid-connect-web-authentication-quickstart/config/quarkus-realm.json[realm configuration file] to create a new realm.
+To create a new realm, import the link:{quickstarts-tree-url}/security-openid-connect-web-authentication-quickstart/config/quarkus-realm.json[realm configuration file].
 For more information, see the Keycloak documentation about how to https://www.keycloak.org/docs/latest/server_admin/index.html#configuring-realms[create and configure a new realm].
 
 == Run the application in dev and JVM modes
 
-To run the application in a dev mode, use:
+To run the application in dev mode, use:
 
 include::{includes}/devtools/dev.adoc[]
 
-When you're done playing with dev mode, you can run it as a standard Java application.
+After exploring the application in dev mode, you can run it as a standard Java application.
 
 First, compile it:
 
@@ -224,15 +226,14 @@ java -jar target/quarkus-app/quarkus-run.jar
 This same demo can be compiled into native code.
 No modifications are required.
 
-This implies that you no longer need to install a JVM on your production environment, as the runtime technology is included in
-the produced binary, and optimized to run with minimal resource overhead.
+This implies that you no longer need to install a JVM on your production environment, as the runtime technology is included in the produced binary and optimized to run with minimal resources.
 
-Compilation will take a bit longer, so this step is disabled by default.
+Compilation takes longer, so this step is turned off by default.
 You can build again by enabling the native build:
 
 include::{includes}/devtools/build-native.adoc[]
 
-After getting a cup of coffee, you can run this binary directly:
+After a while, you can run this binary directly:
 
 [source,bash]
 ----
@@ -243,35 +244,37 @@ After getting a cup of coffee, you can run this binary directly:
 
 To test the application, open your browser and access the following URL:
 
-
 * http://localhost:8080/tokens[http://localhost:8080/tokens]
 
-If everything is working as expected, you are redirected to the Keycloak server to authenticate.
+If everything works as expected, you are redirected to the Keycloak server to authenticate.
 
-To authenticate to the application, type the following credentials when at the Keycloak login page:
+To authenticate to the application, enter the following credentials at the Keycloak login page:
 
 * Username: *alice*
 * Password: *alice*
 
-After clicking the `Login` button, you are redirected back to the application and a session cookie will be created.
+After clicking the `Login` button, you are redirected back to the application, and a session cookie will be created.
 
-The session for this demo is short-lived and you will be asked to re-authenticate on every page refresh. Please follow the Keycloak https://www.keycloak.org/docs/latest/server_admin/#_timeouts[session timeout] documentation to learn how to increase the session timeouts. For example, you can access Keycloak Admin console directly from Dev UI by selecting a `Keycloak Admin` link if you use xref:security-oidc-code-flow-authentication.adoc#integration-testing-keycloak-devservices[Dev Services for Keycloak] in dev mode:
+The session for this demo is valid for a short period of time and, on every page refresh, you will be asked to re-authenticate.
+For information about how to increase the session timeouts, see the Keycloak https://www.keycloak.org/docs/latest/server_admin/#_timeouts[session timeout] documentation.
+For example, you can access the Keycloak Admin console directly from the dev UI by clicking the `Keycloak Admin` link if you use xref:security-oidc-code-flow-authentication.adoc#integration-testing-keycloak-devservices[Dev Services for Keycloak] in dev mode:
 
 image::dev-ui-oidc-keycloak-card.png[alt=Dev UI OpenID Connect Card,role="center"]
 
 For more information about writing the integration tests that depend on `Dev Services for Keycloak`, see the xref:security-oidc-code-flow-authentication.adoc#integration-testing-keycloak-devservices[Dev Services for Keycloak] section.
 
+:sectnums!:
+
 == Summary
 
-Congratulations!
 You have learned how to set up and use the OIDC authorization code flow mechanism to protect and test application HTTP endpoints.
 After you have completed this tutorial, explore xref:security-oidc-bearer-token-authentication.adoc[OIDC Bearer token authentication] and xref:security-authentication-mechanisms.adoc[other authentication mechanisms].
 
 == References
 * xref:security-overview.adoc[Quarkus Security overview]
 * xref:security-oidc-code-flow-authentication.adoc[OIDC code flow mechanism for protecting web applications]
-* xref:security-openid-connect-providers.adoc[Configuring well-known OpenID Connect Providers]
-* xref:security-openid-connect-client-reference.adoc[OpenID Connect and OAuth2 Client and Filters Reference Guide]
+* xref:security-openid-connect-providers.adoc[Configuring well-known OpenID Connect providers]
+* xref:security-openid-connect-client-reference.adoc[OpenID Connect and OAuth2 Client and Filters reference guide]
 * xref:security-openid-connect-dev-services.adoc[Dev Services for Keycloak]
 * xref:security-jwt-build.adoc[Sign and encrypt JWT tokens with SmallRye JWT Build]
 * xref:security-authentication-mechanisms.adoc#oidc-jwt-oauth2-comparison[Choosing between OpenID Connect, SmallRye JWT, and OAuth2 authentication mechanisms]
diff --git a/docs/src/main/asciidoc/security-openid-connect-client.adoc b/docs/src/main/asciidoc/security-openid-connect-client.adoc
index bcce8989a88da..2e0f6906e3584 100644
--- a/docs/src/main/asciidoc/security-openid-connect-client.adoc
+++ b/docs/src/main/asciidoc/security-openid-connect-client.adoc
@@ -3,18 +3,20 @@ This guide is maintained in the main Quarkus repository
 and pull requests should be submitted there:
 https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc
 ////
-= OpenID Connect Client and Token Propagation Quickstart
+= OpenID Connect client and token propagation quickstart
 include::_attributes.adoc[]
+:diataxis-type: tutorial
 :categories: security
-:summary: This guide explains how to use OpenID Connect and OAuth2 Client and Filters to acquire, refresh and propagate access tokens.
 :topics: security,oidc,client
 :extensions: io.quarkus:quarkus-oidc-client
 
-This quickstart demonstrates how to use `OpenID Connect Client Reactive Filter` to acquire and propagate access tokens as `HTTP Authorization Bearer` access tokens, alongside `OpenID Token Propagation Reactive Filter` which propagates the incoming `HTTP Authorization Bearer` access tokens.
+Learn how to use OpenID Connect (OIDC) and OAuth2 clients with filters to get, refresh, and propagate access tokens in your applications.
 
-Please check xref:security-openid-connect-client-reference.adoc[OpenID Connect Client and Token Propagation Reference Guide] for all the information related to `Oidc Client` and `Token Propagation` support in Quarkus.
+This approach uses an OIDC token propagation Reactive filter to propagate the incoming bearer access tokens.
 
-Please also read xref:security-oidc-bearer-token-authentication.adoc[OIDC Bearer token authentication] guide if you need to protect your applications using Bearer Token Authorization.
+For more information about `Oidc Client` and `Token Propagation` support in Quarkus, see the xref:security-openid-connect-client-reference.adoc[OpenID Connect (OIDC) and OAuth2 client and filters reference guide.
+
+To protect your applications by using Bearer Token Authorization, see the xref:security-oidc-bearer-token-authentication.adoc[OpenID Connect (OIDC) Bearer token authentication] guide.
 
 == Prerequisites
 
@@ -24,23 +26,29 @@ include::{includes}/prerequisites.adoc[]
 
 == Architecture
 
-In this example, we will build an application which consists of two Jakarta REST resources, `FrontendResource` and `ProtectedResource`. `FrontendResource` propagates access tokens to `ProtectedResource` and uses either `OpenID Connect Client Reactive Filter` to acquire a token first before propagating it or `OpenID Token Propagation Reactive Filter` to propagate the incoming, already existing access token.
+In this example, an application is built with two Jakarta REST resources, `FrontendResource` and `ProtectedResource`.
+Here, `FrontendResource` uses one of two methods to propagate access tokens to `ProtectedResource`:
+
+* It can get a token by using an OIDC token propagation Reactive filter before propagating it.
+* It can use an OIDC token propagation Reactive filter to propagate the incoming access token.
 
-`FrontendResource` has 4 endpoints:
+`FrontendResource` has four endpoints:
 
 * `/frontend/user-name-with-oidc-client-token`
 * `/frontend/admin-name-with-oidc-client-token`
 * `/frontend/user-name-with-propagated-token`
 * `/frontend/admin-name-with-propagated-token`
 
-`FrontendResource` will use REST Client with `OpenID Connect Client Reactive Filter` to acquire and propagate an access token to `ProtectedResource` when either `/frontend/user-name-with-oidc-client-token` or `/frontend/admin-name-with-oidc-client-token` is called. And it will use REST Client with `OpenID Connect Token Propagation Reactive Filter` to propagate the current incoming access token to `ProtectedResource` when either `/frontend/user-name-with-propagated-token` or `/frontend/admin-name-with-propagated-token` is called.
+`FrontendResource` uses a REST Client with an OIDC token propagation Reactive filter to get and propagate an access token to `ProtectedResource` when either `/frontend/user-name-with-oidc-client-token` or `/frontend/admin-name-with-oidc-client-token` is called.
+Also, `FrontendResource` uses a REST Client with `OpenID Connect Token Propagation Reactive Filter` to propagate the current incoming access token to `ProtectedResource` when either `/frontend/user-name-with-propagated-token` or `/frontend/admin-name-with-propagated-token` is called.
 
-`ProtecedResource` has 2 endpoints:
+`ProtectedResource` has two endpoints:
 
 * `/protected/user-name`
 * `/protected/admin-name`
 
-Both of these endpoints return the username extracted from the incoming access token which was propagated to `ProtectedResource` from `FrontendResource`. The only difference between these endpoints is that calling `/protected/user-name` is only allowed if the current access token has a `user` role and calling `/protected/admin-name` is only allowed if the current access token has an `admin` role.
+Both endpoints return the username extracted from the incoming access token, which was propagated to `ProtectedResource` from `FrontendResource`.
+The only difference between these endpoints is that calling `/protected/user-name` is only allowed if the current access token has a `user` role, and calling `/protected/admin-name` is only allowed if the current access token has an `admin` role.
 
 == Solution
 
@@ -49,24 +57,25 @@ However, you can go right to the completed example.
 
 Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive].
 
-The solution is located in the `security-openid-connect-client-quickstart` link:{quickstarts-tree-url}/security-openid-connect-client-quickstart[directory].
+The solution is in the `security-openid-connect-client-quickstart` link:{quickstarts-tree-url}/security-openid-connect-client-quickstart[directory].
 
-== Creating the Maven Project
+== Creating the Maven project
 
-First, we need a new project. Create a new project with the following command:
+First, you need a new project.
+Create a new project with the following command:
 
 :create-app-artifact-id: security-openid-connect-client-quickstart
 :create-app-extensions: oidc,oidc-client-reactive-filter,oidc-token-propagation-reactive,resteasy-reactive
 include::{includes}/devtools/create-app.adoc[]
 
-This command generates a Maven project, importing the `oidc`, `oidc-client-reactive-filter`, `oidc-token-propagation-reactive-filter` and `resteasy-reactive` extensions.
+This command generates a Maven project, importing the `oidc`, `oidc-client-reactive-filter`, `oidc-token-propagation-reactive-filter`, and `resteasy-reactive` extensions.
 
 If you already have your Quarkus project configured, you can add these extensions to your project by running the following command in your project base directory:
 
 :add-extension-extensions: oidc,oidc-client-reactive-filter,oidc-token-propagation-reactive,resteasy-reactive
 include::{includes}/devtools/extension-add.adoc[]
 
-This will add the following to your build file:
+This command adds the following extensions to your build file:
 
 [source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"]
 .pom.xml
@@ -97,7 +106,7 @@ implementation("io.quarkus:quarkus-oidc,oidc-client-reactive-filter,oidc-token-p
 
 == Writing the application
 
-Let's start by implementing `ProtectedResource`:
+Start by implementing `ProtectedResource`:
 
 [source,java]
 ----
@@ -139,9 +148,12 @@ public class ProtectedResource {
 }
 ----
 
-As you can see `ProtectedResource` returns a name from both `userName()` and `adminName()` methods. The name is extracted from the current `JsonWebToken`.
+`ProtectedResource` returns a name from both `userName()` and `adminName()` methods.
+The name is extracted from the current `JsonWebToken`.
+
+Next, add two REST clients, `OidcClientRequestReactiveFilter` and `AccessTokenRequestReactiveFilter`, which `FrontendResource` uses to call `ProtectedResource`.
 
-Next let's add a REST Client with `OidcClientRequestReactiveFilter` and another REST Client with `AccessTokenRequestReactiveFilter`. `FrontendResource` will use these two clients to call `ProtectedResource`:
+Add the `OidcClientRequestReactiveFilter` REST Client:
 
 [source,java]
 ----
@@ -174,7 +186,9 @@ public interface RestClientWithOidcClientFilter {
 }
 ----
 
-where `RestClientWithOidcClientFilter` will depend on `OidcClientRequestReactiveFilter` to acquire and propagate the tokens and
+The `RestClientWithOidcClientFilter` interface depends on `OidcClientRequestReactiveFilter` to get and propagate the tokens.
+
+Add the `AccessTokenRequestReactiveFilter` REST Client:
 
 [source,java]
 ----
@@ -207,11 +221,13 @@ public interface RestClientWithTokenPropagationFilter {
 }
 ----
 
-where `RestClientWithTokenPropagationFilter` will depend on `AccessTokenRequestReactiveFilter` to propagate the incoming, already existing tokens.
+The `RestClientWithTokenPropagationFilter` interface depends on `AccessTokenRequestReactiveFilter` to propagate the incoming already-existing tokens.
 
-Note that both `RestClientWithOidcClientFilter` and `RestClientWithTokenPropagationFilter` interfaces are identical - the reason behind it is that combining `OidcClientRequestReactiveFilter` and `AccessTokenRequestReactiveFilter` on the same REST Client will cause side effects as both filters can interfere with other, for example, `OidcClientRequestReactiveFilter` may override the token propagated by `AccessTokenRequestReactiveFilter` or `AccessTokenRequestReactiveFilter` can fail if it is called when no token is available to propagate and `OidcClientRequestReactiveFilter` is expected to acquire a new token instead.
+Note that both `RestClientWithOidcClientFilter` and `RestClientWithTokenPropagationFilter` interfaces are the same.
+This is because combining `OidcClientRequestReactiveFilter` and `AccessTokenRequestReactiveFilter` on the same REST Client causes side effects because both filters can interfere with each other.
+For example, `OidcClientRequestReactiveFilter` can override the token propagated by `AccessTokenRequestReactiveFilter`, or `AccessTokenRequestReactiveFilter` can fail if it is called when no token is available to propagate and `OidcClientRequestReactiveFilter` is expected to get a new token instead.
 
-Now let's complete creating the application with adding `FrontendResource`:
+Now, finish creating the application by adding `FrontendResource`:
 
 [source,java]
 ----
@@ -266,9 +282,10 @@ public class FrontendResource {
 }
 ----
 
-`FrontendResource` will use REST Client with `OpenID Connect Client Reactive Filter` to acquire and propagate an access token to `ProtectedResource` when either `/frontend/user-name-with-oidc-client-token` or `/frontend/admin-name-with-oidc-client-token` is called. And it will use REST Client with `OpenID Connect Token Propagation Reactive Filter` to propagate the current incoming access token to `ProtectedResource` when either `/frontend/user-name-with-propagated-token` or `/frontend/admin-name-with-propagated-token` is called.
+`FrontendResource` uses REST Client with an OIDC token propagation Reactive filter to get and propagate an access token to `ProtectedResource` when either `/frontend/user-name-with-oidc-client-token` or `/frontend/admin-name-with-oidc-client-token` is called.
+Also, `FrontendResource` uses REST Client with `OpenID Connect Token Propagation Reactive Filter` to propagate the current incoming access token to `ProtectedResource` when either `/frontend/user-name-with-propagated-token` or `/frontend/admin-name-with-propagated-token` is called.
 
-Finally, let's add a Jakarta REST `ExceptionMapper`:
+Finally, add a Jakarta REST `ExceptionMapper`:
 
 [source,java]
 ----
@@ -291,11 +308,13 @@ public class FrontendExceptionMapper implements ExceptionMapper> section below for more information.
+NOTE: Adding a `%prod.` profile prefix to `quarkus.oidc.auth-server-url` ensures that `Dev Services for Keycloak` launches a container for you when the application is run in dev or test modes.
+For more information, see the <> section.
 
-== Starting and Configuring the Keycloak Server
+== Starting and configuring the Keycloak server
 
-NOTE: Do not start the Keycloak server when you run the application in dev mode or test modes - `Dev Services for Keycloak` will launch a container. See <> section below for more information. Make sure to put the link:{quickstarts-tree-url}/security-openid-connect-client-quickstart/config/quarkus-realm.json[realm configuration file] on the classpath (`target/classes` directory) so that it gets imported automatically when running in dev mode - unless you have already built a link:{quickstarts-tree-url}/security-openid-connect-quickstart[complete solution] in which case this realm file will be added to the classpath during the build.
+NOTE: Do not start the Keycloak server when you run the application in dev or test modes; `Dev Services for Keycloak` launches a container.
+For more information, see the <> section.
+Ensure you put the link:{quickstarts-tree-url}/security-openid-connect-client-quickstart/config/quarkus-realm.json[realm configuration file] on the classpath, in the `target/classes` directory.
+This placement ensures that the file is automatically imported in dev mode.
+However, if you have already built a link:{quickstarts-tree-url}/security-openid-connect-quickstart[complete solution], you do not need to add the realm file to the classpath because the build process has already done so.
 
-To start a Keycloak Server you can use Docker and just run the following command:
+To start a Keycloak Server, you can use Docker and just run the following command:
 
 [source,bash,subs=attributes+]
 ----
 docker run --name keycloak -e KEYCLOAK_ADMIN=admin -e KEYCLOAK_ADMIN_PASSWORD=admin -p 8180:8080 quay.io/keycloak/keycloak:{keycloak.version} start-dev
 ----
 
-where `keycloak.version` should be set to `17.0.0` or higher.
+Set `{keycloak.version}` to `23.0.0` or later.
 
-You should be able to access your Keycloak Server at http://localhost:8180[localhost:8180].
+You can access your Keycloak Server at http://localhost:8180[localhost:8180].
 
-Log in as the `admin` user to access the Keycloak Administration Console. Username should be `admin` and password `admin`.
+Log in as the `admin` user to access the Keycloak Administration Console.
+The password is `admin`.
 
-Import the link:{quickstarts-tree-url}/security-openid-connect-client-quickstart/config/quarkus-realm.json[realm configuration file] to create a new realm. For more details, see the Keycloak documentation about how to https://www.keycloak.org/docs/latest/server_admin/index.html#_create-realm[create a new realm].
+Import the link:{quickstarts-tree-url}/security-openid-connect-client-quickstart/config/quarkus-realm.json[realm configuration file] to create a new realm.
+For more details, see the Keycloak documentation about how to https://www.keycloak.org/docs/latest/server_admin/index.html#_create-realm[create a new realm].
 
-This `quarkus` realm file will add a `frontend` client, and `alice` and `admin` users. `alice` has a `user` role, `admin` - both `user` and `admin` roles.
+This `quarkus` realm file adds a `frontend` client, and `alice` and `admin` users.
+`alice` has a `user` role.
+`admin` has both `user` and `admin` roles.
 
 [[keycloak-dev-mode]]
-== Running the Application in Dev mode
+== Running the application in dev mode
 
 To run the application in a dev mode, use:
 
 include::{includes}/devtools/dev.adoc[]
 
-xref:security-openid-connect-dev-services.adoc[Dev Services for Keycloak] will launch a Keycloak container and import a `quarkus-realm.json`.
+xref:security-openid-connect-dev-services.adoc[Dev Services for Keycloak] launches a Keycloak container and imports `quarkus-realm.json`.
 
-Open a xref:dev-ui.adoc[Dev UI] available at http://localhost:8080/q/dev-ui[/q/dev-ui] and click on a `Provider: Keycloak` link in an `OpenID Connect` `Dev UI` card.
+Open a xref:dev-ui.adoc[Dev UI] available at http://localhost:8080/q/dev-ui[/q/dev-ui] and click a `Provider: Keycloak` link in the *OpenID Connect Dev UI* card.
 
-You will be asked to log in into a `Single Page Application` provided by `OpenID Connect Dev UI`:
+When asked, log in to a `Single Page Application` provided by the OpenID Connect Dev UI:
 
- * Login as `alice` (password: `alice`) who has a `user` role
- ** accessing `/frontend/user-name-with-propagated-token` will return `200`
- ** accessing `/frontend/admin-name-with-propagated-token` will return `403`
- * Logout and login as `admin` (password: `admin`) who has both `admin` and `user` roles
- ** accessing `/frontend/user-name-with-propagated-token` will return `200`
- ** accessing `/frontend/admin-name-with-propagated-token` will return `200`
+ * Log in as `alice`, with the password, `alice`.
+This user has a `user` role.
+ ** Access `/frontend/user-name-with-propagated-token`, which returns `200`.
+ ** Access `/frontend/admin-name-with-propagated-token`, which returns `403`.
+ * Log out and back in as `admin` with the password, `admin`.
+This user has both `admin` and `user` roles.
+ ** Access `/frontend/user-name-with-propagated-token`, which returns `200`.
+ ** Access `/frontend/admin-name-with-propagated-token`, which returns `200`.
 
-In this case you are testing that `FrontendResource` can propagate the access tokens acquired by `OpenID Connect Dev UI`.
+In this case, you are testing that `FrontendResource` can propagate the access tokens from the OpenID Connect Dev UI.
 
-== Running the Application in JVM mode
+== Running the application in JVM mode
 
-When you're done playing with the `dev` mode" you can run it as a standard Java application.
+After exploring the application in dev mode, you can run it as a standard Java application.
 
-First compile it:
+First, compile it:
 
 include::{includes}/devtools/build.adoc[]
 
-Then run it:
+Then, run it:
 
 [source,bash]
 ----
 java -jar target/quarkus-app/quarkus-run.jar
 ----
 
-== Running the Application in Native Mode
+== Running the application in native mode
 
-This same demo can be compiled into native code: no modifications required.
+You can compile this demo into native code; no modifications are required.
 
 This implies that you no longer need to install a JVM on your
 production environment, as the runtime technology is included in
-the produced binary, and optimized to run with minimal resource overhead.
+the produced binary and optimized to run with minimal resources.
 
-Compilation will take a bit longer, so this step is disabled by default;
-let's build again by enabling the `native` profile:
+Compilation takes longer, so this step is turned off by default.
+To build again, enable the `native` profile:
 
 include::{includes}/devtools/build-native.adoc[]
 
-After getting a cup of coffee, you'll be able to run this binary directly:
+After a little while, when the build finishes, you can run the native binary directly:
 
 [source,bash]
 ----
 ./target/security-openid-connect-quickstart-1.0.0-SNAPSHOT-runner
 ----
 
-== Testing the Application
+== Testing the application
 
-See <> section above about testing your application in dev mode.
+For more information about testing your application in dev mode, see the preceding <> section.
 
 You can test the application launched in JVM or Native modes with `curl`.
 
@@ -429,7 +460,7 @@ export access_token=$(\
  )
 ----
 
-Now use this token to call `/frontend/user-name-with-propagated-token` and `/frontend/admin-name-with-propagated-token`:
+Now, use this token to call `/frontend/user-name-with-propagated-token` and `/frontend/admin-name-with-propagated-token`:
 
 [source,bash]
 ----
@@ -438,7 +469,7 @@ curl -i -X GET \
   -H "Authorization: Bearer "$access_token
 ----
 
-will return `200` status code and the name `alice` while
+This command returns the `200` status code and the name `alice`.
 
 [source,bash]
 ----
@@ -447,9 +478,10 @@ curl -i -X GET \
   -H "Authorization: Bearer "$access_token
 ----
 
-will return `403` - recall that `alice` only has a `user` role.
+In contrast, this command returns `403`.
+Recall that `alice` only has a `user` role.
 
-Next obtain an access token for `admin`:
+Next, obtain an access token for `admin`:
 
 [source,bash]
 ----
@@ -461,7 +493,7 @@ export access_token=$(\
  )
 ----
 
-and use this token to call `/frontend/user-name-with-propagated-token` and `/frontend/admin-name-with-propagated-token`:
+Use this token to call `/frontend/user-name-with-propagated-token`:
 
 [source,bash]
 ----
@@ -470,7 +502,9 @@ curl -i -X GET \
   -H "Authorization: Bearer "$access_token
 ----
 
-will return `200` status code and the name `admin`, and
+This command returns a `200` status code and the name `admin`.
+
+Now, use this token to call `/frontend/admin-name-with-propagated-token`:
 
 [source,bash]
 ----
@@ -479,10 +513,11 @@ curl -i -X GET \
   -H "Authorization: Bearer "$access_token
 ----
 
-will also return `200` status code and the name `admin`, as `admin` has both `user` and `admin` roles.
+This command also returns the `200` status code and the name `admin` because `admin` has both `user` and `admin` roles.
 
 
-Now let's check `FrontendResource` methods which do not propagate the existing tokens but use `OidcClient` to acquire and propagate the tokens. You have seen that `OidcClient` is configured to acquire the tokens for the `alice` user, so:
+Now, check the `FrontendResource` methods, which do not propagate the existing tokens but use `OidcClient` to get and propagate the tokens.
+As already shown, `OidcClient` is configured to get the tokens for the `alice` user, so:
 
 [source,bash]
 ----
@@ -490,7 +525,7 @@ curl -i -X GET \
   http://localhost:8080/frontend/user-name-with-oidc-client-token
 ----
 
-will return `200` status code and the name `alice`, but
+This command returns the `200` status code and the name `alice`.
 
 [source,bash]
 ----
@@ -498,7 +533,7 @@ curl -i -X GET \
   http://localhost:8080/frontend/admin-name-with-oidc-client-token
 ----
 
-will return `403` status code.
+In contrast with the preceding command, this command returns a `403` status code.
 
 == References
 
diff --git a/docs/src/main/asciidoc/security-overview.adoc b/docs/src/main/asciidoc/security-overview.adoc
index 6bad18e92531c..da8ded90a71f3 100644
--- a/docs/src/main/asciidoc/security-overview.adoc
+++ b/docs/src/main/asciidoc/security-overview.adoc
@@ -10,18 +10,18 @@ include::_attributes.adoc[]
 :categories: security
 :topics: security
 
-Quarkus Security is a framework that provides the architecture, multiple authentication and authorization mechanisms, and other tools for you to build secure and production-quality Java applications.
+Quarkus Security is a framework that provides the architecture, multiple authentication and authorization mechanisms, and other tools to build secure and production-quality Java applications.
 
-Before building security into your Quarkus applications, learn about the xref:security-architecture.adoc[Quarkus Security architecture] and the different authentication mechanisms and features that you can use.
+Before building security into your Quarkus applications, learn about the xref:security-architecture.adoc[Quarkus Security architecture] and the different authentication mechanisms and features you can use.
 
 == Key features of Quarkus Security
 
 The Quarkus Security framework provides built-in security authentication mechanisms for Basic, Form-based, and mutual TLS (mTLS) authentication.
 You can also use other well-known xref:security-authentication-mechanisms.adoc#other-supported-authentication-mechanisms[authentication mechanisms], such as OpenID Connect (OIDC) and WebAuthn.
 
-Authentication mechanisms depend on xref:security-identity-providers.adoc[Identity providers] to verify the authentication credentials and map them to a `SecurityIdentity` instance, which has the username, roles, original authentication credentials, and other attributes.
+Authentication mechanisms depend on xref:security-identity-providers.adoc[Identity providers] to verify the authentication credentials and map them to a `SecurityIdentity` instance with the username, roles, original authentication credentials, and other attributes.
 
-{project-name} also includes built-in security to allow for role-based access control (RBAC) based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints, and CDI beans.
+{project-name} also includes built-in security to allow for role-based access control (RBAC) based on the common security annotations `@RolesAllowed`, `@DenyAll`, `@PermitAll` on REST endpoints, and Contexts and Dependency Injection (CDI) beans.
 For more information, see the Quarkus xref:security-authorize-web-endpoints-reference.adoc[Authorization of web endpoints] guide.
 
 Quarkus Security also supports the following features:
@@ -49,7 +49,7 @@ After successfully securing your Quarkus application with Basic authentication,
 
 == Quarkus Security testing
 
-Guidance for testing Quarkus Security features and ensuring that your Quarkus applications are securely protected is provided in the Quarkus xref:security-testing.adoc[Security testing] guide.
+For guidance on testing Quarkus Security features and ensuring that your Quarkus applications are securely protected, see the xref:security-testing.adoc[Security testing] guide.
 
 == More about security features in Quarkus
 
@@ -57,7 +57,7 @@ Guidance for testing Quarkus Security features and ensuring that your Quarkus ap
 === Cross-origin resource sharing
 
 To make your Quarkus application accessible to another application running on a different domain, you need to configure cross-origin resource sharing (CORS).
-For more information about the CORS filter that Quarkus provides, see the xref:security-cors.adoc#cors-filter[CORS filter] section of the Quarkus "Cross-origin resource sharing" guide.
+For more information about the CORS filter Quarkus provides, see the xref:security-cors.adoc#cors-filter[CORS filter] section of the Quarkus "Cross-origin resource sharing" guide.
 
 [[csrf-prevention]]
 === Cross-Site Request Forgery (CSRF) prevention
@@ -85,8 +85,8 @@ For more information, see the Quarkus xref:config.adoc#secrets-in-environment-pr
 [[secure-serialization]]
 === Secure serialization
 
-If your Quarkus Security architecture includes RESTEasy Reactive and Jackson, Quarkus can limit the fields that are included in JSON serialization based on the configured security.
-For more information, see the xref:resteasy-reactive.adoc#secure-serialization[JSON serialisation] section of the Quarkus “Writing REST services with RESTEasy Reactive” guide.
+If your Quarkus Security architecture includes RESTEasy Reactive and Jackson, Quarkus can limit the fields included in JSON serialization based on the configured security.
+For more information, see the xref:resteasy-reactive.adoc#secure-serialization[JSON serialization] section of the Quarkus “Writing REST services with RESTEasy Reactive” guide.
 
 
 [[rest-data-panache]]
diff --git a/docs/src/main/asciidoc/writing-extensions.adoc b/docs/src/main/asciidoc/writing-extensions.adoc
index 85465b26cd04d..cdf5dbe5a04ce 100644
--- a/docs/src/main/asciidoc/writing-extensions.adoc
+++ b/docs/src/main/asciidoc/writing-extensions.adoc
@@ -3051,5 +3051,7 @@ group-id: 
 artifact-id: 
 ----
 
+NOTE: When your repository contains multiple extensions, you need to create a separate file for each individual extension, not just one file for the entire repository.
+
 That's all. Once the pull request is merged, a scheduled job will check Maven Central for new versions and update the xref:extension-registry-user.adoc[Quarkus Extension Registry].
 
diff --git a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java
index f7a7040fe886d..b52cb438b06dc 100644
--- a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java
+++ b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java
@@ -14,8 +14,8 @@
 
 import javax.sql.XADataSource;
 
+import jakarta.enterprise.context.ApplicationScoped;
 import jakarta.enterprise.inject.Default;
-import jakarta.inject.Singleton;
 
 import org.jboss.jandex.ClassType;
 import org.jboss.jandex.DotName;
@@ -72,6 +72,7 @@ class AgroalProcessor {
 
     private static final String OPEN_TELEMETRY_DRIVER = "io.opentelemetry.instrumentation.jdbc.OpenTelemetryDriver";
     private static final DotName DATA_SOURCE = DotName.createSimple(javax.sql.DataSource.class.getName());
+    private static final DotName AGROAL_DATA_SOURCE = DotName.createSimple(AgroalDataSource.class.getName());
 
     @BuildStep
     void agroal(BuildProducer feature) {
@@ -277,7 +278,8 @@ void generateDataSourceBeans(AgroalRecorder recorder,
             SyntheticBeanBuildItem.ExtendedBeanConfigurator configurator = SyntheticBeanBuildItem
                     .configure(AgroalDataSource.class)
                     .addType(DATA_SOURCE)
-                    .scope(Singleton.class)
+                    .addType(AGROAL_DATA_SOURCE)
+                    .scope(ApplicationScoped.class)
                     .setRuntimeInit()
                     .unremovable()
                     .addInjectionPoint(ClassType.create(DotName.createSimple(DataSources.class)))
diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/EagerStartupTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/EagerStartupTest.java
new file mode 100644
index 0000000000000..838c8ad9a7131
--- /dev/null
+++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/EagerStartupTest.java
@@ -0,0 +1,41 @@
+package io.quarkus.agroal.test;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import jakarta.inject.Singleton;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.RegisterExtension;
+
+import io.quarkus.agroal.runtime.DataSources;
+import io.quarkus.arc.Arc;
+import io.quarkus.datasource.common.runtime.DataSourceUtil;
+import io.quarkus.test.QuarkusUnitTest;
+
+/**
+ * Check that datasources are created eagerly on application startup.
+ * 

+ * This has always been the case historically, so we want to keep it that way. + */ +public class EagerStartupTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withConfigurationResource("base.properties"); + + @Test + public void shouldStartEagerly() { + var container = Arc.container(); + var instanceHandle = container.instance(DataSources.class); + // Check that the following call won't trigger a lazy initialization: + // the DataSources bean must be eagerly initialized. + assertThat(container.getActiveContext(Singleton.class).getState() + .getContextualInstances().get(instanceHandle.getBean())) + .as("Eagerly instantiated DataSources bean") + .isNotNull(); + // Check that the datasource has already been eagerly created. + assertThat(instanceHandle.get().isDataSourceCreated(DataSourceUtil.DEFAULT_DATASOURCE_NAME)) + .isTrue(); + } + +} diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NoConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NoConfigTest.java index 9e6c4eaade823..9d8e268078bce 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NoConfigTest.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NoConfigTest.java @@ -1,19 +1,94 @@ package io.quarkus.agroal.test; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + import java.sql.SQLException; +import javax.sql.DataSource; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; +import io.agroal.api.AgroalDataSource; +import io.quarkus.arc.Arc; +import io.quarkus.runtime.configuration.ConfigurationException; import io.quarkus.test.QuarkusUnitTest; +/** + * We should be able to start the application, even with no configuration at all. + */ public class NoConfigTest { @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest(); + static final QuarkusUnitTest config = new QuarkusUnitTest() + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + @Inject + MyBean myBean; @Test - public void testNoConfig() throws SQLException { - // we should be able to start the application, even with no configuration at all + public void dataSource_default() { + DataSource ds = Arc.container().instance(DataSource.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(ds).isNotNull(); + // However, if unconfigured, any attempt to use it at runtime will fail. + assertThatThrownBy(() -> ds.getConnection()) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining("quarkus.datasource.jdbc.url has not been defined"); + } + + @Test + public void agroalDataSource_default() { + AgroalDataSource ds = Arc.container().instance(AgroalDataSource.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(ds).isNotNull(); + // However, if unconfigured, any attempt to use it at runtime will fail. + assertThatThrownBy(() -> ds.getConnection()) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining("quarkus.datasource.jdbc.url has not been defined"); + } + + @Test + public void dataSource_named() { + DataSource ds = Arc.container().instance(DataSource.class, + new io.quarkus.agroal.DataSource.DataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(ds).isNull(); + } + + @Test + public void agroalDataSource_named() { + AgroalDataSource ds = Arc.container().instance(AgroalDataSource.class, + new io.quarkus.agroal.DataSource.DataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(ds).isNull(); + } + + @Test + public void injectedBean_default() { + assertThatThrownBy(() -> myBean.useDataSource()) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining("quarkus.datasource.jdbc.url has not been defined"); + } + + @ApplicationScoped + public static class MyBean { + @Inject + DataSource ds; + + public void useDataSource() throws SQLException { + ds.getConnection(); + } } } diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java index 341e2cca19966..0e6e827b06a42 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java @@ -13,6 +13,7 @@ import java.util.function.Function; import java.util.stream.Collectors; +import jakarta.annotation.PostConstruct; import jakarta.annotation.PreDestroy; import jakarta.enterprise.inject.Any; import jakarta.enterprise.inject.Default; @@ -122,6 +123,10 @@ public static AgroalDataSource fromName(String dataSourceName) { .getDataSource(dataSourceName); } + public boolean isDataSourceCreated(String dataSourceName) { + return dataSources.containsKey(dataSourceName); + } + public AgroalDataSource getDataSource(String dataSourceName) { return dataSources.computeIfAbsent(dataSourceName, new Function() { @Override @@ -131,6 +136,13 @@ public AgroalDataSource apply(String s) { }); } + @PostConstruct + public void start() { + for (String dataSourceName : dataSourceSupport.entries.keySet()) { + getDataSource(dataSourceName); + } + } + @SuppressWarnings("resource") public AgroalDataSource doCreateDataSource(String dataSourceName) { if (!dataSourceSupport.entries.containsKey(dataSourceName)) { @@ -140,6 +152,7 @@ public AgroalDataSource doCreateDataSource(String dataSourceName) { DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig .dataSources().get(dataSourceName).jdbc(); DataSourceRuntimeConfig dataSourceRuntimeConfig = dataSourcesRuntimeConfig.dataSources().get(dataSourceName); + DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig .getDataSourceJdbcRuntimeConfig(dataSourceName); diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java index 1c9b1f4c01629..5b8c1893fd1f7 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java @@ -224,13 +224,21 @@ public class ArcConfig { public ArcContextPropagationConfig contextPropagation; /** - * If set to {@code true}, the container should try to optimize the contexts for some of the scopes. + * If set to {@code true}, the container should try to optimize the contexts for some of the scopes. If set to {@code auto} + * then optimize the contexts if there's less than 1000 beans in the application. If set to {@code false} do not optimize + * the contexts. *

* Typically, some implementation parts of the context for {@link jakarta.enterprise.context.ApplicationScoped} could be * pregenerated during build. */ - @ConfigItem(defaultValue = "true", generateDocumentation = false) - public boolean optimizeContexts; + @ConfigItem(defaultValue = "auto", generateDocumentation = false) + public OptimizeContexts optimizeContexts; + + public enum OptimizeContexts { + TRUE, + FALSE, + AUTO + } public final boolean isRemoveUnusedBeansFieldValid() { return ALLOWED_REMOVE_UNUSED_BEANS_VALUES.contains(removeUnusedBeans.toLowerCase()); diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java index dc44f9333bcc9..0ad8d634aac41 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java @@ -396,7 +396,23 @@ public Integer compute(AnnotationTarget target, Collection stere } builder.setBuildCompatibleExtensions(buildCompatibleExtensions.entrypoint); - builder.setOptimizeContexts(arcConfig.optimizeContexts); + builder.setOptimizeContexts(new Predicate() { + @Override + public boolean test(BeanDeployment deployment) { + switch (arcConfig.optimizeContexts) { + case TRUE: + return true; + case FALSE: + return false; + case AUTO: + // Optimize the context if there is less than 1000 beans in the app + // Note that removed beans are excluded + return deployment.getBeans().size() < 1000; + default: + throw new IllegalArgumentException("Unexpected value: " + arcConfig.optimizeContexts); + } + } + }); BeanProcessor beanProcessor = builder.build(); ContextRegistrar.RegistrationContext context = beanProcessor.registerCustomContexts(); @@ -598,7 +614,7 @@ public ArcContainerBuildItem initializeContainer(ArcConfig config, ArcRecorder r throws Exception { ArcContainer container = recorder.initContainer(shutdown, currentContextFactory.isPresent() ? currentContextFactory.get().getFactory() : null, - config.strictCompatibility, config.optimizeContexts); + config.strictCompatibility); return new ArcContainerBuildItem(container); } diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/OptimizeContextsAutoTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/OptimizeContextsAutoTest.java new file mode 100644 index 0000000000000..666c38dbc79f3 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/OptimizeContextsAutoTest.java @@ -0,0 +1,35 @@ +package io.quarkus.arc.test.context.optimized; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ServiceLoader; + +import jakarta.inject.Inject; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.ComponentsProvider; +import io.quarkus.test.QuarkusUnitTest; + +public class OptimizeContextsAutoTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot(root -> root + .addClasses(SimpleBean.class)) + .overrideConfigKey("quarkus.arc.optimize-contexts", "auto"); + + @Inject + SimpleBean bean; + + @Test + public void testContexts() { + assertTrue(bean.ping()); + for (ComponentsProvider componentsProvider : ServiceLoader.load(ComponentsProvider.class)) { + // We have less than 1000 beans + assertFalse(componentsProvider.getComponents().getContextInstances().isEmpty()); + } + } +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/OptimizeContextsDisabledTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/OptimizeContextsDisabledTest.java new file mode 100644 index 0000000000000..b1b611c81312c --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/OptimizeContextsDisabledTest.java @@ -0,0 +1,34 @@ +package io.quarkus.arc.test.context.optimized; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ServiceLoader; + +import jakarta.inject.Inject; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.ComponentsProvider; +import io.quarkus.test.QuarkusUnitTest; + +public class OptimizeContextsDisabledTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot(root -> root + .addClasses(SimpleBean.class)) + .overrideConfigKey("quarkus.arc.optimize-contexts", "false"); + + @Inject + SimpleBean bean; + + @Test + public void testContexts() { + assertTrue(bean.ping()); + for (ComponentsProvider componentsProvider : ServiceLoader.load(ComponentsProvider.class)) { + assertTrue(componentsProvider.getComponents().getContextInstances().isEmpty()); + } + } + +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/SimpleBean.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/SimpleBean.java new file mode 100644 index 0000000000000..0c545a000a5b0 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/SimpleBean.java @@ -0,0 +1,12 @@ +package io.quarkus.arc.test.context.optimized; + +import jakarta.enterprise.context.ApplicationScoped; + +@ApplicationScoped +class SimpleBean { + + public boolean ping() { + return true; + } + +} \ No newline at end of file diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcRecorder.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcRecorder.java index 23ffb5196720d..4c1ecac85a712 100644 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcRecorder.java +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcRecorder.java @@ -42,12 +42,10 @@ public class ArcRecorder { public static volatile Map, ?>> syntheticBeanProviders; public ArcContainer initContainer(ShutdownContext shutdown, RuntimeValue currentContextFactory, - boolean strictCompatibility, boolean optimizeContexts) - throws Exception { + boolean strictCompatibility) throws Exception { ArcInitConfig.Builder builder = ArcInitConfig.builder(); builder.setCurrentContextFactory(currentContextFactory != null ? currentContextFactory.getValue() : null); builder.setStrictCompatibility(strictCompatibility); - builder.setOptimizeContexts(optimizeContexts); ArcContainer container = Arc.initialize(builder.build()); shutdown.addShutdownTask(new Runnable() { @Override diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/appcds/AppCDSRecorder.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/appcds/AppCDSRecorder.java index 70e03db0d3545..1f5e5a37b5b83 100644 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/appcds/AppCDSRecorder.java +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/appcds/AppCDSRecorder.java @@ -7,8 +7,10 @@ @Recorder public class AppCDSRecorder { + public static final String QUARKUS_APPCDS_GENERATE_PROP = "quarkus.appcds.generate"; + public void controlGenerationAndExit() { - if (Boolean.parseBoolean(System.getProperty("quarkus.appcds.generate", "false"))) { + if (Boolean.parseBoolean(System.getProperty(QUARKUS_APPCDS_GENERATE_PROP, "false"))) { InitializationTaskRecorder.preventFurtherRecorderSteps(5, "Unable to properly shutdown Quarkus application when creating AppCDS", PreventFurtherStepsException::new); diff --git a/extensions/cache/deployment/pom.xml b/extensions/cache/deployment/pom.xml index 055055a4c594c..7d8309729061c 100644 --- a/extensions/cache/deployment/pom.xml +++ b/extensions/cache/deployment/pom.xml @@ -38,6 +38,10 @@ io.quarkus quarkus-mutiny-deployment + + io.quarkus + quarkus-vertx-deployment + io.quarkus quarkus-vertx-http-dev-ui-spi diff --git a/extensions/cache/runtime/pom.xml b/extensions/cache/runtime/pom.xml index 70993637b709a..e849a5b223d04 100644 --- a/extensions/cache/runtime/pom.xml +++ b/extensions/cache/runtime/pom.xml @@ -27,6 +27,10 @@ io.quarkus quarkus-mutiny + + io.quarkus + quarkus-vertx + io.quarkus quarkus-cache-runtime-spi diff --git a/extensions/datasource/common/pom.xml b/extensions/datasource/common/pom.xml index 6527c853e4c84..acdedbcd44a0c 100644 --- a/extensions/datasource/common/pom.xml +++ b/extensions/datasource/common/pom.xml @@ -12,6 +12,10 @@ quarkus-datasource-common Quarkus - Datasource - Common + + io.quarkus + quarkus-core + org.junit.jupiter junit-jupiter diff --git a/extensions/datasource/common/src/main/java/io/quarkus/datasource/common/runtime/DataSourceUtil.java b/extensions/datasource/common/src/main/java/io/quarkus/datasource/common/runtime/DataSourceUtil.java index f0a4b3378f1ba..7b11b9e4aab7a 100644 --- a/extensions/datasource/common/src/main/java/io/quarkus/datasource/common/runtime/DataSourceUtil.java +++ b/extensions/datasource/common/src/main/java/io/quarkus/datasource/common/runtime/DataSourceUtil.java @@ -2,6 +2,10 @@ import java.util.Collection; import java.util.List; +import java.util.Locale; +import java.util.Set; + +import io.quarkus.runtime.configuration.ConfigurationException; public final class DataSourceUtil { @@ -34,6 +38,18 @@ public static List dataSourcePropertyKeys(String datasourceName, String } } + public static ConfigurationException dataSourceNotConfigured(String dataSourceName) { + return new ConfigurationException(String.format(Locale.ROOT, + "Datasource '%s' is not configured." + + " To solve this, configure datasource '%s'." + + " Refer to https://quarkus.io/guides/datasource for guidance.", + dataSourceName, dataSourceName), + Set.of(dataSourcePropertyKey(dataSourceName, "db-kind"), + dataSourcePropertyKey(dataSourceName, "username"), + dataSourcePropertyKey(dataSourceName, "password"), + dataSourcePropertyKey(dataSourceName, "jdbc.url"))); + } + private DataSourceUtil() { } diff --git a/extensions/datasource/deployment/src/main/java/io/quarkus/datasource/deployment/devservices/DevServicesDatasourceProcessor.java b/extensions/datasource/deployment/src/main/java/io/quarkus/datasource/deployment/devservices/DevServicesDatasourceProcessor.java index cccdf480f078b..22b6650110044 100644 --- a/extensions/datasource/deployment/src/main/java/io/quarkus/datasource/deployment/devservices/DevServicesDatasourceProcessor.java +++ b/extensions/datasource/deployment/src/main/java/io/quarkus/datasource/deployment/devservices/DevServicesDatasourceProcessor.java @@ -199,7 +199,7 @@ private RunningDevService startDevDb( LaunchMode launchMode, Optional consoleInstalledBuildItem, LoggingSetupBuildItem loggingSetupBuildItem, GlobalDevServicesConfig globalDevServicesConfig) { boolean explicitlyDisabled = !(dataSourceBuildTimeConfig.devservices().enabled().orElse(true)); - String dataSourcePrettyName = DataSourceUtil.isDefault(dbName) ? "default datasource" : "datasource" + dbName; + String dataSourcePrettyName = DataSourceUtil.isDefault(dbName) ? "default datasource" : "datasource " + dbName; if (explicitlyDisabled) { //explicitly disabled diff --git a/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigEmptyDefaultDatasourceTest.java b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigEmptyDefaultDatasourceTest.java new file mode 100644 index 0000000000000..c172cdecb9a73 --- /dev/null +++ b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigEmptyDefaultDatasourceTest.java @@ -0,0 +1,62 @@ +package io.quarkus.flyway.test; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.enterprise.inject.CreationException; +import jakarta.enterprise.inject.Instance; +import jakarta.inject.Inject; + +import org.flywaydb.core.Flyway; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +public class FlywayExtensionConfigEmptyDefaultDatasourceTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + @Inject + Instance flywayForDefaultDatasource; + + @Inject + MyBean myBean; + + @Test + @DisplayName("If there is no config for the default datasource, the application should boot, but Flyway should be deactivated for that datasource") + public void testBootSucceedsButFlywayDeactivated() { + assertThatThrownBy(flywayForDefaultDatasource::get) + .isInstanceOf(CreationException.class) + .cause() + .hasMessageContainingAll("Unable to find datasource '' for Flyway", + "Datasource '' is not configured.", + "To solve this, configure datasource ''.", + "Refer to https://quarkus.io/guides/datasource for guidance."); + } + + @Test + @DisplayName("If there is no config for the default datasource, the application should boot even if we inject a bean that depends on Liquibase, but actually using Liquibase should fail") + public void testBootSucceedsWithInjectedBeanDependingOnFlywayButFlywayDeactivated() { + assertThatThrownBy(() -> myBean.useFlyway()) + .cause() + .hasMessageContainingAll("Unable to find datasource '' for Flyway", + "Datasource '' is not configured.", + "To solve this, configure datasource ''.", + "Refer to https://quarkus.io/guides/datasource for guidance."); + } + + @ApplicationScoped + public static class MyBean { + @Inject + Flyway flywayForDefaultDatasource; + + public void useFlyway() { + flywayForDefaultDatasource.getConfiguration(); + } + } +} diff --git a/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigEmptyNamedDataSourceTest.java b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigEmptyNamedDataSourceTest.java new file mode 100644 index 0000000000000..a195ce306b47c --- /dev/null +++ b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigEmptyNamedDataSourceTest.java @@ -0,0 +1,41 @@ +package io.quarkus.flyway.test; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import jakarta.enterprise.inject.Instance; +import jakarta.enterprise.inject.UnsatisfiedResolutionException; +import jakarta.inject.Inject; + +import org.flywaydb.core.Flyway; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.flyway.FlywayDataSource; +import io.quarkus.test.QuarkusUnitTest; + +public class FlywayExtensionConfigEmptyNamedDataSourceTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + // We need this otherwise the *default* datasource may impact this test + .overrideConfigKey("quarkus.datasource.db-kind", "h2") + .overrideConfigKey("quarkus.datasource.username", "sa") + .overrideConfigKey("quarkus.datasource.password", "sa") + .overrideConfigKey("quarkus.datasource.jdbc.url", + "jdbc:h2:tcp://localhost/mem:test-quarkus-migrate-at-start;DB_CLOSE_DELAY=-1") + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + @Inject + @FlywayDataSource("users") + Instance flywayForNamedDatasource; + + @Test + @DisplayName("If there is no config for a named datasource, the application should boot, but Flyway should be deactivated for that datasource") + public void testBootSucceedsButFlywayDeactivated() { + assertThatThrownBy(flywayForNamedDatasource::get) + .isInstanceOf(UnsatisfiedResolutionException.class) + .hasMessageContaining("No bean found"); + } +} diff --git a/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigEmptyTest.java b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigEmptyTest.java deleted file mode 100644 index d5f680c6d9ef6..0000000000000 --- a/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigEmptyTest.java +++ /dev/null @@ -1,36 +0,0 @@ -package io.quarkus.flyway.test; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -import jakarta.enterprise.inject.Instance; -import jakarta.enterprise.inject.UnsatisfiedResolutionException; -import jakarta.inject.Inject; - -import org.flywaydb.core.Flyway; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.test.QuarkusUnitTest; - -/** - * Flyway needs a datasource to work. - * This tests assures, that an error occurs, - * as soon as the default flyway configuration points to a missing default datasource. - */ -public class FlywayExtensionConfigEmptyTest { - - @Inject - Instance flyway; - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .withApplicationRoot((jar) -> jar - .addAsResource("config-empty.properties", "application.properties")); - - @Test - @DisplayName("Injecting (default) flyway should fail if there is no datasource configured") - public void testFlywayNotAvailableWithoutDataSource() { - assertThrows(UnsatisfiedResolutionException.class, flyway::get); - } -} diff --git a/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigMissingNamedDataSourceTest.java b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigMissingNamedDataSourceTest.java deleted file mode 100644 index af5c7cca818f5..0000000000000 --- a/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionConfigMissingNamedDataSourceTest.java +++ /dev/null @@ -1,37 +0,0 @@ -package io.quarkus.flyway.test; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -import jakarta.enterprise.inject.Instance; -import jakarta.enterprise.inject.UnsatisfiedResolutionException; -import jakarta.inject.Inject; - -import org.flywaydb.core.Flyway; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.flyway.FlywayDataSource; -import io.quarkus.test.QuarkusUnitTest; - -/** - * Flyway needs a datasource to work. - * This tests assures that an error occurs as soon as a named flyway configuration points to a missing datasource. - */ -public class FlywayExtensionConfigMissingNamedDataSourceTest { - - @Inject - @FlywayDataSource("users") - Instance flyway; - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .withApplicationRoot((jar) -> jar - .addAsResource("config-for-missing-named-datasource.properties", "application.properties")); - - @Test - @DisplayName("Injecting flyway should fail if the named datasource is missing") - public void testFlywayNotAvailableWithoutDataSource() { - assertThrows(UnsatisfiedResolutionException.class, flyway::get); - } -} diff --git a/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionMigrateAtStartDefaultDatasourceConfigEmptyTest.java b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionMigrateAtStartDefaultDatasourceConfigEmptyTest.java new file mode 100644 index 0000000000000..0f9f506d16d0a --- /dev/null +++ b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionMigrateAtStartDefaultDatasourceConfigEmptyTest.java @@ -0,0 +1,41 @@ +package io.quarkus.flyway.test; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import jakarta.enterprise.inject.CreationException; +import jakarta.enterprise.inject.Instance; +import jakarta.inject.Inject; + +import org.flywaydb.core.Flyway; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +public class FlywayExtensionMigrateAtStartDefaultDatasourceConfigEmptyTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addAsResource("db/migration/V1.0.0__Quarkus.sql")) + .overrideConfigKey("quarkus.flyway.migrate-at-start", "true") + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + @Inject + Instance flywayForDefaultDatasource; + + @Test + @DisplayName("If there is no config for the default datasource, even if migrate-at-start is enabled, the application should boot, but Flyway should be deactivated for that datasource") + public void testBootSucceedsButFlywayDeactivated() { + assertThatThrownBy(flywayForDefaultDatasource::get) + .isInstanceOf(CreationException.class) + .cause() + .hasMessageContainingAll("Unable to find datasource '' for Flyway", + "Datasource '' is not configured.", + "To solve this, configure datasource ''.", + "Refer to https://quarkus.io/guides/datasource for guidance."); + } + +} diff --git a/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionMigrateAtStartNamedDatasourceConfigEmptyTest.java b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionMigrateAtStartNamedDatasourceConfigEmptyTest.java new file mode 100644 index 0000000000000..48e507e40783d --- /dev/null +++ b/extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionMigrateAtStartNamedDatasourceConfigEmptyTest.java @@ -0,0 +1,44 @@ +package io.quarkus.flyway.test; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import jakarta.enterprise.inject.Instance; +import jakarta.enterprise.inject.UnsatisfiedResolutionException; +import jakarta.inject.Inject; + +import org.flywaydb.core.Flyway; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.flyway.FlywayDataSource; +import io.quarkus.test.QuarkusUnitTest; + +public class FlywayExtensionMigrateAtStartNamedDatasourceConfigEmptyTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addAsResource("db/migration/V1.0.0__Quarkus.sql")) + .overrideConfigKey("quarkus.flyway.users.migrate-at-start", "true") + // We need this otherwise the *default* datasource may impact this test + .overrideConfigKey("quarkus.datasource.db-kind", "h2") + .overrideConfigKey("quarkus.datasource.username", "sa") + .overrideConfigKey("quarkus.datasource.password", "sa") + .overrideConfigKey("quarkus.datasource.jdbc.url", + "jdbc:h2:tcp://localhost/mem:test-quarkus-migrate-at-start;DB_CLOSE_DELAY=-1") + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + @Inject + @FlywayDataSource("users") + Instance flywayForNamedDatasource; + + @Test + @DisplayName("If there is no config for a named datasource, even if migrate-at-start is enabled, the application should boot, but Flyway should be deactivated for that datasource") + public void testBootSucceedsButFlywayDeactivated() { + assertThatThrownBy(flywayForNamedDatasource::get) + .isInstanceOf(UnsatisfiedResolutionException.class) + .hasMessageContaining("No bean found"); + } +} diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRecorder.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRecorder.java index f8df08e4e7825..4e858dcf71a3b 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRecorder.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRecorder.java @@ -2,6 +2,7 @@ import java.lang.annotation.Annotation; import java.util.Collection; +import java.util.Locale; import java.util.Map; import java.util.function.Function; @@ -31,6 +32,7 @@ import io.quarkus.flyway.FlywayDataSource.FlywayDataSourceLiteral; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; +import io.quarkus.runtime.configuration.ConfigurationException; @Recorder public class FlywayRecorder { @@ -64,15 +66,24 @@ public Function, FlywayContainer> fl return new Function<>() { @Override public FlywayContainer apply(SyntheticCreationalContext context) { - DataSource dataSource = context.getInjectedReference(DataSources.class).getDataSource(dataSourceName); - if (dataSource instanceof UnconfiguredDataSource) { - return new UnconfiguredDataSourceFlywayContainer(dataSourceName); + DataSource dataSource; + try { + dataSource = context.getInjectedReference(DataSources.class).getDataSource(dataSourceName); + if (dataSource instanceof UnconfiguredDataSource) { + throw DataSourceUtil.dataSourceNotConfigured(dataSourceName); + } + } catch (ConfigurationException e) { + // TODO do we really want to enable retrieval of a FlywayContainer for an unconfigured datasource? + // Assigning ApplicationScoped to the FlywayContainer + // and throwing UnsatisfiedResolutionException on bean creation (first access) + // would probably make more sense. + return new UnconfiguredDataSourceFlywayContainer(dataSourceName, String.format(Locale.ROOT, + "Unable to find datasource '%s' for Flyway: %s", + dataSourceName, e.getMessage()), e); } FlywayContainerProducer flywayProducer = context.getInjectedReference(FlywayContainerProducer.class); - FlywayContainer flywayContainer = flywayProducer.createFlyway(dataSource, dataSourceName, hasMigrations, - createPossible); - return flywayContainer; + return flywayProducer.createFlyway(dataSource, dataSourceName, hasMigrations, createPossible); } }; } diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/UnconfiguredDataSourceFlywayContainer.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/UnconfiguredDataSourceFlywayContainer.java index a3206cd8141ae..5011c9898ce0d 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/UnconfiguredDataSourceFlywayContainer.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/UnconfiguredDataSourceFlywayContainer.java @@ -4,13 +4,17 @@ public class UnconfiguredDataSourceFlywayContainer extends FlywayContainer { - public UnconfiguredDataSourceFlywayContainer(String dataSourceName) { + private final String message; + private final Throwable cause; + + public UnconfiguredDataSourceFlywayContainer(String dataSourceName, String message, Throwable cause) { super(null, false, false, false, false, false, dataSourceName, false, false); + this.message = message; + this.cause = cause; } @Override public Flyway getFlyway() { - throw new UnsupportedOperationException( - "Cannot get a Flyway instance for unconfigured datasource " + getDataSourceName()); + throw new UnsupportedOperationException(message, cause); } } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java index 4e66fd21023bb..65831169c0b03 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java @@ -254,14 +254,16 @@ public static Channel createChannel(String name, Set perClientIntercepto options.setHttp2ClearTextUpgrade(false); // this fixes i30379 if (!plainText) { + // always set ssl + alpn for plain-text=false + options.setSsl(true); + options.setUseAlpn(true); + if (config.ssl.trustStore.isPresent()) { Optional trustStorePath = config.ssl.trustStore; if (trustStorePath.isPresent()) { PemTrustOptions to = new PemTrustOptions(); to.addCertValue(bufferFor(trustStorePath.get(), "trust store")); options.setTrustOptions(to); - options.setSsl(true); - options.setUseAlpn(true); } Optional certificatePath = config.ssl.certificate; Optional keyPath = config.ssl.key; @@ -270,8 +272,6 @@ public static Channel createChannel(String name, Set perClientIntercepto cko.setCertValue(bufferFor(certificatePath.get(), "certificate")); cko.setKeyValue(bufferFor(keyPath.get(), "key")); options.setKeyCertOptions(cko); - options.setSsl(true); - options.setUseAlpn(true); } } } diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java index 7c90798d1ade0..2ad2bb9e78ac7 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java @@ -871,13 +871,10 @@ private void handleHibernateORMWithNoPersistenceXml( && (!hibernateOrmConfig.defaultPersistenceUnit.datasource.isPresent() || DataSourceUtil.isDefault(hibernateOrmConfig.defaultPersistenceUnit.datasource.get())) && !defaultJdbcDataSource.isPresent()) { - throw new ConfigurationException( - "Model classes are defined for the default persistence unit, but no default datasource was found." - + " The default EntityManagerFactory will not be created." - + " To solve this, configure the default datasource." - + " Refer to https://quarkus.io/guides/datasource for guidance.", - new HashSet<>(Arrays.asList("quarkus.datasource.db-kind", "quarkus.datasource.username", - "quarkus.datasource.password", "quarkus.datasource.jdbc.url"))); + String persistenceUnitName = PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME; + String dataSourceName = DataSourceUtil.DEFAULT_DATASOURCE_NAME; + throw PersistenceUnitUtil.unableToFindDataSource(persistenceUnitName, dataSourceName, + DataSourceUtil.dataSourceNotConfigured(dataSourceName)); } for (Entry persistenceUnitEntry : hibernateOrmConfig.persistenceUnits @@ -1228,14 +1225,12 @@ private static void collectDialectConfigForPersistenceXml(String persistenceUnit private static Optional findJdbcDataSource(String persistenceUnitName, HibernateOrmConfigPersistenceUnit persistenceUnitConfig, List jdbcDataSources) { if (persistenceUnitConfig.datasource.isPresent()) { + String dataSourceName = persistenceUnitConfig.datasource.get(); return Optional.of(jdbcDataSources.stream() - .filter(i -> persistenceUnitConfig.datasource.get().equals(i.getName())) + .filter(i -> dataSourceName.equals(i.getName())) .findFirst() - .orElseThrow(() -> new ConfigurationException(String.format(Locale.ROOT, - "The datasource '%1$s' is not configured but the persistence unit '%2$s' uses it." - + " To solve this, configure datasource '%1$s'." - + " Refer to https://quarkus.io/guides/datasource for guidance.", - persistenceUnitConfig.datasource.get(), persistenceUnitName)))); + .orElseThrow(() -> PersistenceUnitUtil.unableToFindDataSource(persistenceUnitName, dataSourceName, + DataSourceUtil.dataSourceNotConfigured(dataSourceName)))); } else if (PersistenceUnitUtil.isDefaultPersistenceUnit(persistenceUnitName)) { return jdbcDataSources.stream() .filter(i -> i.isDefault()) diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java deleted file mode 100644 index 4285499aad472..0000000000000 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java +++ /dev/null @@ -1,35 +0,0 @@ -package io.quarkus.hibernate.orm.config; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.runtime.configuration.ConfigurationException; -import io.quarkus.test.QuarkusUnitTest; - -public class EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest { - - @RegisterExtension - static QuarkusUnitTest runner = new QuarkusUnitTest() - .assertException(t -> { - assertThat(t) - .isInstanceOf(ConfigurationException.class) - .hasMessageContainingAll( - "The datasource 'ds-1' is not configured but the persistence unit '' uses it.", - "To solve this, configure datasource 'ds-1'.", - "Refer to https://quarkus.io/guides/datasource for guidance."); - }) - .withApplicationRoot((jar) -> jar - .addClass(MyEntity.class) - .addAsResource("application-default-pu-explicit-unconfigured-datasource.properties", - "application.properties")); - - @Test - public void testInvalidConfiguration() { - // deployment exception should happen first - Assertions.fail(); - } - -} diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java deleted file mode 100644 index 41da125a13c78..0000000000000 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java +++ /dev/null @@ -1,32 +0,0 @@ -package io.quarkus.hibernate.orm.config; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.runtime.configuration.ConfigurationException; -import io.quarkus.test.QuarkusUnitTest; - -public class EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest { - - @RegisterExtension - static QuarkusUnitTest runner = new QuarkusUnitTest() - .assertException(t -> { - assertThat(t) - .isInstanceOf(ConfigurationException.class) - .hasMessageContainingAll( - "Model classes are defined for the default persistence unit, but no default datasource was found. The default EntityManagerFactory will not be created. To solve this, configure the default datasource. Refer to https://quarkus.io/guides/datasource for guidance."); - }) - .withApplicationRoot((jar) -> jar - .addClass(MyEntity.class)) - .overrideConfigKey("quarkus.datasource.devservices.enabled", "false"); - - @Test - public void testInvalidConfiguration() { - // deployment exception should happen first - Assertions.fail(); - } - -} diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java new file mode 100644 index 0000000000000..95da175428278 --- /dev/null +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java @@ -0,0 +1,35 @@ +package io.quarkus.hibernate.orm.config.datasource; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.hibernate.orm.config.MyEntity; +import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.test.QuarkusUnitTest; + +public class EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClass(MyEntity.class)) + .overrideConfigKey("quarkus.hibernate-orm.datasource", "ds-1") + .overrideConfigKey("quarkus.hibernate-orm.database.generation", "drop-and-create") + .assertException(t -> assertThat(t) + .isInstanceOf(ConfigurationException.class) + .hasMessageContainingAll( + "Unable to find datasource 'ds-1' for persistence unit ''", + "Datasource 'ds-1' is not configured.", + "To solve this, configure datasource 'ds-1'.", + "Refer to https://quarkus.io/guides/datasource for guidance.")); + + @Test + public void testInvalidConfiguration() { + // deployment exception should happen first + Assertions.fail(); + } + +} diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java new file mode 100644 index 0000000000000..5e301b02be941 --- /dev/null +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java @@ -0,0 +1,35 @@ +package io.quarkus.hibernate.orm.config.datasource; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.hibernate.orm.config.MyEntity; +import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.test.QuarkusUnitTest; + +public class EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClass(MyEntity.class)) + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false") + .assertException(t -> assertThat(t) + .isInstanceOf(ConfigurationException.class) + .hasMessageContainingAll( + "Unable to find datasource '' for persistence unit ''", + "Datasource '' is not configured.", + "To solve this, configure datasource ''.", + "Refer to https://quarkus.io/guides/datasource for guidance.")); + + @Test + public void testInvalidConfiguration() { + // deployment exception should happen first + Assertions.fail(); + } + +} diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInNamedPUWithExplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInNamedPUWithExplicitUnconfiguredDatasourceTest.java new file mode 100644 index 0000000000000..bc7f76483c09a --- /dev/null +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInNamedPUWithExplicitUnconfiguredDatasourceTest.java @@ -0,0 +1,35 @@ +package io.quarkus.hibernate.orm.config.datasource; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.hibernate.orm.config.namedpu.MyEntity; +import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.test.QuarkusUnitTest; + +public class EntitiesInNamedPUWithExplicitUnconfiguredDatasourceTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addPackage(MyEntity.class.getPackage().getName())) + .overrideConfigKey("quarkus.hibernate-orm.pu-1.datasource", "ds-1") + .overrideConfigKey("quarkus.hibernate-orm.pu-1.database.generation", "drop-and-create") + .assertException(t -> assertThat(t) + .isInstanceOf(ConfigurationException.class) + .hasMessageContainingAll( + "Unable to find datasource 'ds-1' for persistence unit 'pu-1'", + "Datasource 'ds-1' is not configured.", + "To solve this, configure datasource 'ds-1'.", + "Refer to https://quarkus.io/guides/datasource for guidance.")); + + @Test + public void testInvalidConfiguration() { + // deployment exception should happen first + Assertions.fail(); + } + +} diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInNamedPUWithoutDatasourceTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInNamedPUWithoutDatasourceTest.java new file mode 100644 index 0000000000000..b2ca39823cdbb --- /dev/null +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/datasource/EntitiesInNamedPUWithoutDatasourceTest.java @@ -0,0 +1,34 @@ +package io.quarkus.hibernate.orm.config.datasource; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.hibernate.orm.config.namedpu.MyEntity; +import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.test.QuarkusUnitTest; + +public class EntitiesInNamedPUWithoutDatasourceTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addPackage(MyEntity.class.getPackage().getName())) + // There will still be a default datasource if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false") + // We need at least one build-time property, otherwise the PU gets ignored... + .overrideConfigKey("quarkus.hibernate-orm.pu-1.packages", MyEntity.class.getPackageName()) + .overrideConfigKey("quarkus.hibernate-orm.pu-1.database.generation", "drop-and-create") + .assertException(t -> assertThat(t) + .isInstanceOf(ConfigurationException.class) + .hasMessageContainingAll("Datasource must be defined for persistence unit 'pu-1'."));; + + @Test + public void testInvalidConfiguration() { + // deployment exception should happen first + Assertions.fail(); + } + +} diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/namedpu/EntitiesInNamedPUWithExplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/namedpu/EntitiesInNamedPUWithExplicitUnconfiguredDatasourceTest.java deleted file mode 100644 index fdcaf43005835..0000000000000 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/namedpu/EntitiesInNamedPUWithExplicitUnconfiguredDatasourceTest.java +++ /dev/null @@ -1,35 +0,0 @@ -package io.quarkus.hibernate.orm.config.namedpu; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.runtime.configuration.ConfigurationException; -import io.quarkus.test.QuarkusUnitTest; - -public class EntitiesInNamedPUWithExplicitUnconfiguredDatasourceTest { - - @RegisterExtension - static QuarkusUnitTest runner = new QuarkusUnitTest() - .assertException(t -> { - assertThat(t) - .isInstanceOf(ConfigurationException.class) - .hasMessageContainingAll( - "The datasource 'ds-1' is not configured but the persistence unit 'pu-1' uses it.", - "To solve this, configure datasource 'ds-1'.", - "Refer to https://quarkus.io/guides/datasource for guidance."); - }) - .withApplicationRoot((jar) -> jar - .addPackage(MyEntity.class.getPackage().getName()) - .addAsResource("application-named-pu-explicit-unconfigured-datasource.properties", - "application.properties")); - - @Test - public void testInvalidConfiguration() { - // deployment exception should happen first - Assertions.fail(); - } - -} diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/namedpu/EntitiesInNamedPUWithoutDatasourceTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/namedpu/EntitiesInNamedPUWithoutDatasourceTest.java deleted file mode 100644 index 552e9079dcd5b..0000000000000 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/namedpu/EntitiesInNamedPUWithoutDatasourceTest.java +++ /dev/null @@ -1,32 +0,0 @@ -package io.quarkus.hibernate.orm.config.namedpu; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.runtime.configuration.ConfigurationException; -import io.quarkus.test.QuarkusUnitTest; - -public class EntitiesInNamedPUWithoutDatasourceTest { - - @RegisterExtension - static QuarkusUnitTest runner = new QuarkusUnitTest() - .assertException(t -> { - assertThat(t) - .isInstanceOf(ConfigurationException.class) - .hasMessageContainingAll("Datasource must be defined for persistence unit 'pu-1'."); - }) - .withConfigurationResource("application-named-pu-no-datasource.properties") - .overrideConfigKey("quarkus.datasource.devservices.enabled", "false") - .withApplicationRoot((jar) -> jar - .addPackage(MyEntity.class.getPackage().getName())); - - @Test - public void testInvalidConfiguration() { - // deployment exception should happen first - Assertions.fail(); - } - -} diff --git a/extensions/hibernate-orm/deployment/src/test/resources/application-default-pu-explicit-unconfigured-datasource.properties b/extensions/hibernate-orm/deployment/src/test/resources/application-default-pu-explicit-unconfigured-datasource.properties deleted file mode 100644 index 6bc5049280142..0000000000000 --- a/extensions/hibernate-orm/deployment/src/test/resources/application-default-pu-explicit-unconfigured-datasource.properties +++ /dev/null @@ -1,3 +0,0 @@ -quarkus.hibernate-orm.datasource=ds-1 -#quarkus.hibernate-orm.log.sql=true -quarkus.hibernate-orm.database.generation=drop-and-create diff --git a/extensions/hibernate-orm/deployment/src/test/resources/application-named-pu-explicit-unconfigured-datasource.properties b/extensions/hibernate-orm/deployment/src/test/resources/application-named-pu-explicit-unconfigured-datasource.properties deleted file mode 100644 index 4d7291dba7fcb..0000000000000 --- a/extensions/hibernate-orm/deployment/src/test/resources/application-named-pu-explicit-unconfigured-datasource.properties +++ /dev/null @@ -1,3 +0,0 @@ -quarkus.hibernate-orm.pu-1.datasource=ds-1 -quarkus.hibernate-orm.pu-1.log.sql=true -quarkus.hibernate-orm.pu-1.database.generation=drop-and-create diff --git a/extensions/hibernate-orm/deployment/src/test/resources/application-named-pu-no-datasource.properties b/extensions/hibernate-orm/deployment/src/test/resources/application-named-pu-no-datasource.properties deleted file mode 100644 index 422410bbf6fe1..0000000000000 --- a/extensions/hibernate-orm/deployment/src/test/resources/application-named-pu-no-datasource.properties +++ /dev/null @@ -1,4 +0,0 @@ -# We need at least one build-time property, otherwise the PU gets ignored... -quarkus.hibernate-orm.pu-1.packages=io.quarkus.hibernate.orm.config.namedpu -quarkus.hibernate-orm.pu-1.log.sql=true -quarkus.hibernate-orm.pu-1.database.generation=drop-and-create diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/FastBootHibernatePersistenceProvider.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/FastBootHibernatePersistenceProvider.java index 5db72232952c5..f98ba87694899 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/FastBootHibernatePersistenceProvider.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/FastBootHibernatePersistenceProvider.java @@ -24,10 +24,9 @@ import org.hibernate.service.internal.ProvidedService; import org.jboss.logging.Logger; -import io.quarkus.agroal.DataSource.DataSourceLiteral; +import io.quarkus.agroal.runtime.DataSources; import io.quarkus.agroal.runtime.UnconfiguredDataSource; import io.quarkus.arc.Arc; -import io.quarkus.arc.InstanceHandle; import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.hibernate.orm.runtime.RuntimeSettings.Builder; import io.quarkus.hibernate.orm.runtime.boot.FastBootEntityManagerFactoryBuilder; @@ -38,7 +37,6 @@ import io.quarkus.hibernate.orm.runtime.integration.HibernateOrmIntegrationRuntimeInitListener; import io.quarkus.hibernate.orm.runtime.recording.PrevalidatedQuarkusMetadata; import io.quarkus.hibernate.orm.runtime.recording.RecordedState; -import io.quarkus.runtime.configuration.ConfigurationException; /** * This can not inherit from HibernatePersistenceProvider as that would force @@ -375,7 +373,7 @@ private void verifyProperties(Map properties) { } } - private static void injectDataSource(String persistenceUnitName, String dataSource, + private static void injectDataSource(String persistenceUnitName, String dataSourceName, RuntimeSettings.Builder runtimeSettingsBuilder) { // first convert @@ -389,26 +387,16 @@ private static void injectDataSource(String persistenceUnitName, String dataSour return; } - InstanceHandle dataSourceHandle; - if (DataSourceUtil.isDefault(dataSource)) { - dataSourceHandle = Arc.container().instance(DataSource.class); - } else { - dataSourceHandle = Arc.container().instance(DataSource.class, new DataSourceLiteral(dataSource)); - } - - if (!dataSourceHandle.isAvailable()) { - throw new IllegalStateException( - "No datasource " + dataSource + " has been defined for persistence unit " + persistenceUnitName); - } - - DataSource ds = dataSourceHandle.get(); - if (ds instanceof UnconfiguredDataSource) { - throw new ConfigurationException( - "Model classes are defined for the default persistence unit " + persistenceUnitName - + " but configured datasource " + dataSource - + " not found: the default EntityManagerFactory will not be created. To solve this, configure the default datasource. Refer to https://quarkus.io/guides/datasource for guidance."); + DataSource dataSource; + try { + dataSource = Arc.container().instance(DataSources.class).get().getDataSource(dataSourceName); + if (dataSource instanceof UnconfiguredDataSource) { + throw DataSourceUtil.dataSourceNotConfigured(dataSourceName); + } + } catch (RuntimeException e) { + throw PersistenceUnitUtil.unableToFindDataSource(persistenceUnitName, dataSourceName, e); } - runtimeSettingsBuilder.put(AvailableSettings.DATASOURCE, ds); + runtimeSettingsBuilder.put(AvailableSettings.DATASOURCE, dataSource); } private static void injectRuntimeConfiguration(HibernateOrmRuntimeConfigPersistenceUnit persistenceUnitConfig, diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java index 481249f84ab5b..f7c05f7530e44 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java @@ -75,7 +75,8 @@ public void run() { } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { - throw new RuntimeException(e.getCause()); + throw e.getCause() instanceof RuntimeException ? (RuntimeException) e.getCause() + : new RuntimeException(e.getCause()); } } } diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/PersistenceUnitUtil.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/PersistenceUnitUtil.java index b91894c052e68..a3194b02f77ad 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/PersistenceUnitUtil.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/PersistenceUnitUtil.java @@ -13,6 +13,7 @@ import io.quarkus.arc.InjectableInstance; import io.quarkus.hibernate.orm.PersistenceUnit; import io.quarkus.hibernate.orm.PersistenceUnitExtension; +import io.quarkus.runtime.configuration.ConfigurationException; public class PersistenceUnitUtil { private static final Logger LOG = Logger.getLogger(PersistenceUnitUtil.class); @@ -104,4 +105,13 @@ public static InjectableInstance legacySingleExtensionInstanceForPersiste private static boolean isDefaultBean(InjectableInstance instance) { return instance.isResolvable() && instance.getHandle().getBean().isDefaultBean(); } + + public static ConfigurationException unableToFindDataSource(String persistenceUnitName, + String dataSourceName, + Throwable cause) { + return new ConfigurationException(String.format(Locale.ROOT, + "Unable to find datasource '%s' for persistence unit '%s': %s", + dataSourceName, persistenceUnitName, cause.getMessage()), + cause); + } } diff --git a/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java new file mode 100644 index 0000000000000..c4c11e2307d01 --- /dev/null +++ b/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java @@ -0,0 +1,23 @@ +package io.quarkus.hibernate.reactive.config.datasource; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.hibernate.reactive.config.MyEntity; +import io.quarkus.test.QuarkusUnitTest; + +public class EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClass(MyEntity.class)) + .overrideConfigKey("quarkus.hibernate-orm.datasource", "ds-1") + .overrideConfigKey("quarkus.hibernate-orm.database.generation", "drop-and-create"); + + @Test + public void testInvalidConfiguration() { + // bootstrap will succeed and ignore the fact that a datasource is unconfigured... + } + +} diff --git a/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java new file mode 100644 index 0000000000000..74f0f25029c80 --- /dev/null +++ b/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java @@ -0,0 +1,23 @@ +package io.quarkus.hibernate.reactive.config.datasource; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.hibernate.reactive.config.MyEntity; +import io.quarkus.test.QuarkusUnitTest; + +public class EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClass(MyEntity.class)) + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + @Test + public void testInvalidConfiguration() { + // bootstrap will succeed and ignore the fact that a datasource is unconfigured... + } + +} diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/pom.xml b/extensions/hibernate-search-orm-elasticsearch/deployment/pom.xml index 9f9331be27bbf..d825482a1369e 100644 --- a/extensions/hibernate-search-orm-elasticsearch/deployment/pom.xml +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/pom.xml @@ -33,6 +33,10 @@ io.quarkus quarkus-vertx-http-dev-ui-spi + + io.quarkus + quarkus-vertx-http-deployment-spi + diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/deployment/HibernateSearchElasticsearchProcessor.java b/extensions/hibernate-search-orm-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/deployment/HibernateSearchElasticsearchProcessor.java index d89ae9184541a..5df805957096a 100644 --- a/extensions/hibernate-search-orm-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/deployment/HibernateSearchElasticsearchProcessor.java +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/deployment/HibernateSearchElasticsearchProcessor.java @@ -65,8 +65,10 @@ import io.quarkus.hibernate.search.orm.elasticsearch.runtime.HibernateSearchElasticsearchBuildTimeConfigPersistenceUnit.ElasticsearchIndexBuildTimeConfig; import io.quarkus.hibernate.search.orm.elasticsearch.runtime.HibernateSearchElasticsearchRecorder; import io.quarkus.hibernate.search.orm.elasticsearch.runtime.HibernateSearchElasticsearchRuntimeConfig; +import io.quarkus.hibernate.search.orm.elasticsearch.runtime.management.HibernateSearchManagementConfig; import io.quarkus.runtime.configuration.ConfigUtils; import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.vertx.http.deployment.spi.RouteBuildItem; @BuildSteps(onlyIf = HibernateSearchEnabled.class) class HibernateSearchElasticsearchProcessor { @@ -435,4 +437,17 @@ void devServicesDropAndCreateAndDropByDefault( } } + @Record(ExecutionTime.RUNTIME_INIT) + @BuildStep(onlyIf = HibernateSearchManagementEnabled.class) + void createManagementRoutes(BuildProducer routes, + HibernateSearchElasticsearchRecorder recorder, + HibernateSearchManagementConfig managementConfig) { + + routes.produce(RouteBuildItem.newManagementRoute( + managementConfig.rootPath() + (managementConfig.rootPath().endsWith("/") ? "" : "/") + "reindex") + .withRoutePathConfigKey("quarkus.hibernate-search-orm.management.root-path") + .withRequestHandler(recorder.managementHandler()) + .displayOnNotFoundPage() + .build()); + } } diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/deployment/HibernateSearchManagementEnabled.java b/extensions/hibernate-search-orm-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/deployment/HibernateSearchManagementEnabled.java new file mode 100644 index 0000000000000..04093b0b3de91 --- /dev/null +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/deployment/HibernateSearchManagementEnabled.java @@ -0,0 +1,25 @@ +package io.quarkus.hibernate.search.orm.elasticsearch.deployment; + +import io.quarkus.hibernate.search.orm.elasticsearch.runtime.HibernateSearchElasticsearchBuildTimeConfig; +import io.quarkus.hibernate.search.orm.elasticsearch.runtime.management.HibernateSearchManagementConfig; + +/** + * Supplier that can be used to only run build steps + * if the Hibernate Search extension and its management is enabled. + */ +public class HibernateSearchManagementEnabled extends HibernateSearchEnabled { + + private final HibernateSearchManagementConfig config; + + HibernateSearchManagementEnabled(HibernateSearchElasticsearchBuildTimeConfig config, + HibernateSearchManagementConfig managementConfig) { + super(config); + this.config = managementConfig; + } + + @Override + public boolean getAsBoolean() { + return super.getAsBoolean() && config.enabled(); + } + +} diff --git a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java index 586a3942338d4..b1801a4bdb991 100644 --- a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java +++ b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java @@ -51,9 +51,12 @@ import io.quarkus.hibernate.search.orm.elasticsearch.runtime.HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.ElasticsearchBackendRuntimeConfig; import io.quarkus.hibernate.search.orm.elasticsearch.runtime.HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.ElasticsearchIndexRuntimeConfig; import io.quarkus.hibernate.search.orm.elasticsearch.runtime.bean.HibernateSearchBeanUtil; +import io.quarkus.hibernate.search.orm.elasticsearch.runtime.management.HibernateSearchManagementHandler; import io.quarkus.hibernate.search.orm.elasticsearch.runtime.mapping.QuarkusHibernateOrmSearchMappingConfigurer; import io.quarkus.runtime.annotations.Recorder; import io.quarkus.runtime.configuration.ConfigurationException; +import io.vertx.core.Handler; +import io.vertx.ext.web.RoutingContext; @Recorder public class HibernateSearchElasticsearchRecorder { @@ -165,6 +168,10 @@ public SearchSession get() { }; } + public Handler managementHandler() { + return new HibernateSearchManagementHandler(); + } + private static final class HibernateSearchIntegrationStaticInitInactiveListener implements HibernateOrmIntegrationStaticInitListener { private HibernateSearchIntegrationStaticInitInactiveListener() { diff --git a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/management/HibernateSearchManagementConfig.java b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/management/HibernateSearchManagementConfig.java new file mode 100644 index 0000000000000..0235472ff9d51 --- /dev/null +++ b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/management/HibernateSearchManagementConfig.java @@ -0,0 +1,30 @@ +package io.quarkus.hibernate.search.orm.elasticsearch.runtime.management; + +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; + +@ConfigMapping(prefix = "quarkus.hibernate-search-orm.management") +@ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +public interface HibernateSearchManagementConfig { + + /** + * Root path for reindexing endpoints. + * This value will be resolved as a path relative to `${quarkus.management.root-path}`. + * + * @asciidoclet + */ + @WithDefault("hibernate-search/") + String rootPath(); + + /** + * If management interface is turned on the reindexing endpoints will be published under the management interface. + * This property allows to enable this functionality by setting it to ``true`. + * + * @asciidoclet + */ + @WithDefault("false") + boolean enabled(); + +} diff --git a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/management/HibernateSearchManagementHandler.java b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/management/HibernateSearchManagementHandler.java new file mode 100644 index 0000000000000..e478fa011f2d3 --- /dev/null +++ b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/management/HibernateSearchManagementHandler.java @@ -0,0 +1,53 @@ +package io.quarkus.hibernate.search.orm.elasticsearch.runtime.management; + +import java.util.Locale; + +import io.quarkus.arc.Arc; +import io.quarkus.arc.ManagedContext; +import io.vertx.core.Handler; +import io.vertx.core.http.HttpHeaders; +import io.vertx.core.http.HttpMethod; +import io.vertx.core.http.HttpServerRequest; +import io.vertx.ext.web.RoutingContext; + +public class HibernateSearchManagementHandler implements Handler { + + @Override + public void handle(RoutingContext routingContext) { + ManagedContext requestContext = Arc.container().requestContext(); + if (requestContext.isActive()) { + doHandle(routingContext); + } else { + requestContext.activate(); + try { + doHandle(routingContext); + } finally { + requestContext.terminate(); + } + } + } + + private void doHandle(RoutingContext ctx) { + HttpServerRequest request = ctx.request(); + + if (!HttpMethod.POST.equals(request.method())) { + errorResponse(ctx, 406, "Http method [" + request.method().name() + "] is not supported. Use [POST] instead."); + return; + } + + String contentType = request.getHeader(HttpHeaders.CONTENT_TYPE); + if (contentType != null && !contentType.toLowerCase(Locale.ROOT).startsWith("application/json")) { + errorResponse(ctx, 406, "Content type [" + contentType + " is not supported. Use [application/json] instead."); + return; + } + + new HibernateSearchPostRequestProcessor().process(ctx); + } + + private void errorResponse(RoutingContext ctx, int code, String message) { + ctx.response() + .setStatusCode(code) + .setStatusMessage(message) + .end(); + } +} diff --git a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/management/HibernateSearchPostRequestProcessor.java b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/management/HibernateSearchPostRequestProcessor.java new file mode 100644 index 0000000000000..35777e89dfebc --- /dev/null +++ b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/management/HibernateSearchPostRequestProcessor.java @@ -0,0 +1,220 @@ +package io.quarkus.hibernate.search.orm.elasticsearch.runtime.management; + +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +import org.hibernate.CacheMode; +import org.hibernate.search.mapper.orm.mapping.SearchMapping; +import org.hibernate.search.mapper.orm.massindexing.MassIndexer; +import org.hibernate.search.mapper.orm.scope.SearchScope; + +import io.quarkus.arc.Arc; +import io.quarkus.arc.InstanceHandle; +import io.quarkus.hibernate.orm.PersistenceUnit; +import io.vertx.core.http.HttpServerRequest; +import io.vertx.core.json.JsonArray; +import io.vertx.core.json.JsonObject; +import io.vertx.ext.web.RoutingContext; + +class HibernateSearchPostRequestProcessor { + + private static final String QUERY_PARAM_WAIT_FOR = "wait_for"; + private static final String QUERY_PARAM_PERSISTENCE_UNIT = "persistence_unit"; + + public void process(RoutingContext ctx) { + JsonObject config = ctx.body().asJsonObject(); + if (config == null) { + config = new JsonObject(); + } + try (InstanceHandle searchMappingInstanceHandle = searchMappingInstanceHandle(ctx.request())) { + + SearchMapping searchMapping = searchMappingInstanceHandle.get(); + + JsonObject filter = config.getJsonObject("filter"); + List types = getTypesToFilter(filter); + Set tenants = getTenants(filter); + MassIndexer massIndexer; + if (types == null || types.isEmpty()) { + massIndexer = createMassIndexer(searchMapping.scope(Object.class), tenants); + } else { + massIndexer = createMassIndexer(searchMapping.scope(Object.class, types), tenants); + } + + HibernateSearchMassIndexerConfiguration.configure(massIndexer, config.getJsonObject("massIndexer")); + + CompletionStage massIndexerFuture = massIndexer.start(); + + if (WaitFor.STARTED.equals(getWaitForParameter(ctx.request()))) { + ctx.response().end(message(202, "Reindexing started")); + } else { + ctx.response() + .setChunked(true) + .write(message(202, "Reindexing started"), + ignored -> massIndexerFuture.whenComplete((ignored2, throwable) -> { + if (throwable == null) { + ctx.response().end(message(200, "Reindexing succeeded")); + } else { + ctx.response().end(message( + 500, + "Reindexing failed:\n" + Arrays.stream(throwable.getStackTrace()) + .map(Object::toString) + .collect(Collectors.joining("\n")))); + } + })); + } + } + } + + private MassIndexer createMassIndexer(SearchScope scope, Set tenants) { + if (tenants == null || tenants.isEmpty()) { + return scope.massIndexer(); + } else { + return scope.massIndexer(tenants); + } + } + + private List getTypesToFilter(JsonObject filter) { + if (filter == null) { + return null; + } + JsonArray array = filter.getJsonArray("types"); + if (array == null) { + return null; + } + List types = array + .stream() + .map(Object::toString) + .collect(Collectors.toList()); + return types.isEmpty() ? null : types; + } + + private Set getTenants(JsonObject filter) { + if (filter == null) { + return null; + } + JsonArray array = filter.getJsonArray("tenants"); + if (array == null) { + return null; + } + Set types = array + .stream() + .map(Object::toString) + .collect(Collectors.toSet()); + return types.isEmpty() ? null : types; + } + + private WaitFor getWaitForParameter(HttpServerRequest request) { + return WaitFor.valueOf(request.getParam(QUERY_PARAM_WAIT_FOR, WaitFor.STARTED.name()).toUpperCase(Locale.ROOT)); + } + + private InstanceHandle searchMappingInstanceHandle(HttpServerRequest request) { + String pu = request.getParam(QUERY_PARAM_PERSISTENCE_UNIT, PersistenceUnit.DEFAULT); + return Arc.container().instance(SearchMapping.class, new PersistenceUnit.PersistenceUnitLiteral(pu)); + } + + private static String message(int code, String message) { + return JsonObject.of("code", code, "message", message) + "\n"; + } + + private enum WaitFor { + STARTED, + FINISHED; + } + + private static final class HibernateSearchMassIndexerConfiguration { + private HibernateSearchMassIndexerConfiguration() { + } + + /** + * Sets the number of entity types to be indexed in parallel + */ + private static final String TYPES_TO_INDEX_IN_PARALLEL = "typesToIndexInParallel"; + + /** + * Sets the number of threads to be used to load the root entities. + */ + private static final String THREADS_TO_LOAD_OBJECTS = "threadsToLoadObjects"; + + /** + * Sets the batch size used to load the root entities. + */ + private static final String BATCH_SIZE_TO_LOAD_OBJECTS = "batchSizeToLoadObjects"; + + /** + * Sets the cache interaction mode for the data loading tasks. + */ + private static final String CACHE_MODE = "cacheMode"; + + /** + * If each index is merged into a single segment after indexing. + */ + private static final String MERGE_SEGMENTS_ON_FINISH = "mergeSegmentsOnFinish"; + + /** + * If each index is merged into a single segment after the initial index purge, just before indexing. + */ + private static final String MERGE_SEGMENTS_AFTER_PURGE = "mergeSegmentsAfterPurge"; + + /** + * If the indexes and their schema (if they exist) should be dropped and re-created before indexing. + */ + private static final String DROP_AND_CREATE_SCHEMA_ON_START = "dropAndCreateSchemaOnStart"; + + /** + * If all entities are removed from the indexes before indexing. + */ + private static final String PURGE_ALL_ON_START = "purgeAllOnStart"; + + /** + * Specifies the fetch size to be used when loading primary keys if objects to be indexed. + */ + private static final String ID_FETCH_SIZE = "idFetchSize"; + + /** + * Specifies the timeout of transactions for loading ids and entities to be re-indexed. + */ + private static final String TRANSACTION_TIMEOUT = "transactionTimeout"; + + private static MassIndexer configure(MassIndexer massIndexer, JsonObject config) { + if (config == null) { + return massIndexer; + } + if (config.getInteger(TYPES_TO_INDEX_IN_PARALLEL) != null) { + massIndexer.typesToIndexInParallel(config.getInteger(TYPES_TO_INDEX_IN_PARALLEL)); + } + if (config.getInteger(THREADS_TO_LOAD_OBJECTS) != null) { + massIndexer.threadsToLoadObjects(config.getInteger(THREADS_TO_LOAD_OBJECTS)); + } + if (config.getInteger(BATCH_SIZE_TO_LOAD_OBJECTS) != null) { + massIndexer.batchSizeToLoadObjects(config.getInteger(BATCH_SIZE_TO_LOAD_OBJECTS)); + } + if (config.getString(CACHE_MODE) != null) { + massIndexer.cacheMode(CacheMode.valueOf(config.getString(CACHE_MODE))); + } + if (config.getBoolean(MERGE_SEGMENTS_ON_FINISH) != null) { + massIndexer.mergeSegmentsOnFinish(config.getBoolean(MERGE_SEGMENTS_ON_FINISH)); + } + if (config.getBoolean(MERGE_SEGMENTS_AFTER_PURGE) != null) { + massIndexer.mergeSegmentsAfterPurge(config.getBoolean(MERGE_SEGMENTS_AFTER_PURGE)); + } + if (config.getBoolean(DROP_AND_CREATE_SCHEMA_ON_START) != null) { + massIndexer.dropAndCreateSchemaOnStart(config.getBoolean(DROP_AND_CREATE_SCHEMA_ON_START)); + } + if (config.getBoolean(PURGE_ALL_ON_START) != null) { + massIndexer.purgeAllOnStart(config.getBoolean(PURGE_ALL_ON_START)); + } + if (config.getInteger(ID_FETCH_SIZE) != null) { + massIndexer.idFetchSize(config.getInteger(ID_FETCH_SIZE)); + } + if (config.getInteger(TRANSACTION_TIMEOUT) != null) { + massIndexer.transactionTimeout(config.getInteger(TRANSACTION_TIMEOUT)); + } + + return massIndexer; + } + } +} diff --git a/extensions/info/deployment/src/main/java/io/quarkus/info/deployment/InfoProcessor.java b/extensions/info/deployment/src/main/java/io/quarkus/info/deployment/InfoProcessor.java index 32762ff7f36ca..c8882b07d9f1d 100644 --- a/extensions/info/deployment/src/main/java/io/quarkus/info/deployment/InfoProcessor.java +++ b/extensions/info/deployment/src/main/java/io/quarkus/info/deployment/InfoProcessor.java @@ -26,6 +26,7 @@ import org.jboss.logging.Logger; import io.quarkus.arc.deployment.SyntheticBeanBuildItem; +import io.quarkus.arc.deployment.UnremovableBeanBuildItem; import io.quarkus.bootstrap.model.ApplicationModel; import io.quarkus.bootstrap.workspace.WorkspaceModule; import io.quarkus.builder.Version; @@ -45,7 +46,7 @@ import io.quarkus.info.runtime.spi.InfoContributor; import io.quarkus.maven.dependency.ResolvedDependency; import io.quarkus.vertx.http.deployment.NonApplicationRootPathBuildItem; -import io.quarkus.vertx.http.deployment.RouteBuildItem; +import io.quarkus.vertx.http.deployment.spi.RouteBuildItem; public class InfoProcessor { @@ -274,6 +275,7 @@ RouteBuildItem defineRoute(InfoBuildTimeConfig buildTimeConfig, List buildTimeValues, List contributors, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, + BuildProducer unremovableBeanBuildItemBuildProducer, InfoRecorder recorder) { Map buildTimeInfo = buildTimeValues.stream().collect( Collectors.toMap(InfoBuildTimeValuesBuildItem::getName, InfoBuildTimeValuesBuildItem::getValue, (x, y) -> y, @@ -281,13 +283,12 @@ RouteBuildItem defineRoute(InfoBuildTimeConfig buildTimeConfig, List infoContributors = contributors.stream() .map(InfoBuildTimeContributorBuildItem::getInfoContributor) .collect(Collectors.toList()); - return nonApplicationRootPathBuildItem.routeBuilder() - .management() - .route(buildTimeConfig.path()) - .routeConfigKey("quarkus.info.path") - .handler(recorder.handler(buildTimeInfo, infoContributors)) - .displayOnNotFoundPage() - .blockingRoute() + + unremovableBeanBuildItemBuildProducer.produce(UnremovableBeanBuildItem.beanTypes(InfoContributor.class)); + + return RouteBuildItem.newManagementRoute(buildTimeConfig.path()) + .withRoutePathConfigKey("quarkus.info.path") + .withRequestHandler(recorder.handler(buildTimeInfo, infoContributors)) .build(); } } diff --git a/extensions/info/deployment/src/main/resources/dev-ui/qwc-info.js b/extensions/info/deployment/src/main/resources/dev-ui/qwc-info.js index a3f8d21476686..5ba4c71f05d00 100644 --- a/extensions/info/deployment/src/main/resources/dev-ui/qwc-info.js +++ b/extensions/info/deployment/src/main/resources/dev-ui/qwc-info.js @@ -54,7 +54,7 @@ export class QwcInfo extends LitElement { super.connectedCallback(); await this.load(); } - + async load() { const response = await fetch(this._infoUrl); const data = await response.json(); @@ -68,6 +68,7 @@ export class QwcInfo extends LitElement { ${this._renderJavaInfo(this._info)} ${this._renderBuildInfo(this._info)} ${this._renderGitInfo(this._info)} + ${this._renderExternalContributedInfo(this._info)} `; }else{ return html` @@ -78,7 +79,7 @@ export class QwcInfo extends LitElement { `; } } - + _renderOsInfo(info){ if(info.os){ let os = info.os; @@ -94,7 +95,7 @@ export class QwcInfo extends LitElement { `; } } - + _renderJavaInfo(info){ if(info.java){ let java = info.java; @@ -108,9 +109,9 @@ export class QwcInfo extends LitElement { `; } } - + _renderOsIcon(osname){ - + if(osname){ if(osname.toLowerCase().startsWith("linux")){ return html``; @@ -121,7 +122,7 @@ export class QwcInfo extends LitElement { } } } - + _renderGitInfo(info){ if(info.git){ let git = info.git; @@ -138,7 +139,7 @@ export class QwcInfo extends LitElement { `; } } - + _renderCommitId(git){ if(typeof git.commit.id === "string"){ return html`${git.commit.id}`; @@ -146,18 +147,18 @@ export class QwcInfo extends LitElement { return html`${git.commit.id.full}`; } } - + _renderOptionalData(git){ if(typeof git.commit.id !== "string"){ return html`Commit User${git.commit.user.name} <${git.commit.user.email}> Commit Message${unsafeHTML(this._replaceNewLine(git.commit.id.message.full))}` } } - + _replaceNewLine(line){ return line.replace(new RegExp('\r?\n','g'), '
'); } - + _renderBuildInfo(info){ if(info.build){ let build = info.build; @@ -173,5 +174,32 @@ export class QwcInfo extends LitElement { `; } } + + _renderExternalContributedInfo(info){ + const externalConstributors = Object.keys(info) + .filter(key => key !== 'build') + .filter(key => key !== 'os') + .filter(key => key !== 'git') + .filter(key => key !== 'java') + if(externalConstributors.length > 0){ + const cards = []; + externalConstributors.map(key => { + const extInfo = info[key]; + const rows = []; + for (const property of Object.keys(extInfo)){ + rows.push(html`${property}${extInfo[property]}`); + } + cards.push(html` +
+ + + ${rows} +
+
+
`); + }) + return html`${cards}`; + } + } } customElements.define('qwc-info', QwcInfo); \ No newline at end of file diff --git a/extensions/info/deployment/src/test/java/io/quarkus/info/deployment/ExternalInfoContributorTest.java b/extensions/info/deployment/src/test/java/io/quarkus/info/deployment/ExternalInfoContributorTest.java new file mode 100644 index 0000000000000..003c776304e72 --- /dev/null +++ b/extensions/info/deployment/src/test/java/io/quarkus/info/deployment/ExternalInfoContributorTest.java @@ -0,0 +1,61 @@ +package io.quarkus.info.deployment; + +import static io.restassured.RestAssured.when; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +import java.util.Map; + +import jakarta.enterprise.context.ApplicationScoped; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.deployment.AdditionalBeanBuildItem; +import io.quarkus.info.runtime.spi.InfoContributor; +import io.quarkus.test.QuarkusUnitTest; + +public class ExternalInfoContributorTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .addBuildChainCustomizer( + buildChainBuilder -> buildChainBuilder.addBuildStep( + context -> new AdditionalBeanBuildItem(TestInfoContributor.class)) + .produces(AdditionalBeanBuildItem.class) + .build()) + .withApplicationRoot((jar) -> jar + .addClasses(TestInfoContributor.class)); + + @Test + public void test() { + when().get("/q/info") + .then() + .statusCode(200) + .body("os", is(notNullValue())) + .body("os.name", is(notNullValue())) + .body("java", is(notNullValue())) + .body("java.version", is(notNullValue())) + .body("build", is(notNullValue())) + .body("build.time", is(notNullValue())) + .body("git", is(notNullValue())) + .body("git.branch", is(notNullValue())) + .body("test", is(notNullValue())) + .body("test.foo", is("bar")); + + } + + @ApplicationScoped + public static class TestInfoContributor implements InfoContributor { + + @Override + public String name() { + return "test"; + } + + @Override + public Map data() { + return Map.of("foo", "bar"); + } + } +} diff --git a/extensions/info/runtime/src/main/java/io/quarkus/info/runtime/InfoRecorder.java b/extensions/info/runtime/src/main/java/io/quarkus/info/runtime/InfoRecorder.java index 0cb67207f89a9..e589f7f341fa0 100644 --- a/extensions/info/runtime/src/main/java/io/quarkus/info/runtime/InfoRecorder.java +++ b/extensions/info/runtime/src/main/java/io/quarkus/info/runtime/InfoRecorder.java @@ -8,6 +8,8 @@ import java.util.Map; import java.util.function.Supplier; +import io.quarkus.arc.Arc; +import io.quarkus.arc.InstanceHandle; import io.quarkus.info.BuildInfo; import io.quarkus.info.GitInfo; import io.quarkus.info.JavaInfo; @@ -143,6 +145,10 @@ public InfoHandler(Map buildTimeInfo, List know // also, do we want to merge information or simply replace like we are doing here? finalBuildInfo.put(contributor.name(), contributor.data()); } + for (InstanceHandle handler : Arc.container().listAll(InfoContributor.class)) { + InfoContributor contributor = handler.get(); + finalBuildInfo.put(contributor.name(), contributor.data()); + } } @Override diff --git a/extensions/jaxb/deployment/src/main/java/io/quarkus/jaxb/deployment/JaxbProcessor.java b/extensions/jaxb/deployment/src/main/java/io/quarkus/jaxb/deployment/JaxbProcessor.java index 17768f6d8e62b..3d488e9d012d9 100644 --- a/extensions/jaxb/deployment/src/main/java/io/quarkus/jaxb/deployment/JaxbProcessor.java +++ b/extensions/jaxb/deployment/src/main/java/io/quarkus/jaxb/deployment/JaxbProcessor.java @@ -70,6 +70,7 @@ import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBundleBuildItem; import io.quarkus.deployment.builditem.nativeimage.NativeImageSystemPropertyBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveHierarchyBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveHierarchyIgnoreWarningBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ServiceProviderBuildItem; @@ -186,6 +187,7 @@ void processAnnotationsAndIndexFiles( BuildProducer proxyDefinitions, CombinedIndexBuildItem combinedIndexBuildItem, List fileRoots, + BuildProducer reflectiveHierarchies, BuildProducer reflectiveClass, BuildProducer resource, BuildProducer resourceBundle, @@ -202,10 +204,11 @@ void processAnnotationsAndIndexFiles( for (DotName jaxbRootAnnotation : JAXB_ROOT_ANNOTATIONS) { for (AnnotationInstance jaxbRootAnnotationInstance : index .getAnnotations(jaxbRootAnnotation)) { - if (jaxbRootAnnotationInstance.target().kind() == Kind.CLASS) { - String className = jaxbRootAnnotationInstance.target().asClass().name().toString(); - reflectiveClass.produce(ReflectiveClassBuildItem.builder(className).methods().fields().build()); - classesToBeBound.add(className); + if (jaxbRootAnnotationInstance.target().kind() == Kind.CLASS + && !JAXB_ANNOTATIONS.contains(jaxbRootAnnotationInstance.target().asClass().getClass())) { + DotName targetClass = jaxbRootAnnotationInstance.target().asClass().name(); + addReflectiveHierarchyClass(targetClass, reflectiveHierarchies, index); + classesToBeBound.add(targetClass.toString()); jaxbRootAnnotationsDetected = true; } } @@ -412,6 +415,17 @@ public static Stream safeWalk(Path p) { } } + private void addReflectiveHierarchyClass(DotName className, + BuildProducer reflectiveHierarchy, + IndexView index) { + Type jandexType = Type.create(className, Type.Kind.CLASS); + reflectiveHierarchy.produce(new ReflectiveHierarchyBuildItem.Builder() + .type(jandexType) + .index(index) + .source(getClass().getSimpleName() + " > " + jandexType.name().toString()) + .build()); + } + private void addReflectiveClass(BuildProducer reflectiveClass, boolean methods, boolean fields, String... className) { reflectiveClass.produce(new ReflectiveClassBuildItem(methods, fields, className)); diff --git a/extensions/kafka-client/runtime/src/main/java/io/smallrye/reactive/kafka/graal/StrimziSubstitutions.java b/extensions/kafka-client/runtime/src/main/java/io/smallrye/reactive/kafka/graal/StrimziSubstitutions.java index e556ed94f2aa2..2219060f6b7a8 100644 --- a/extensions/kafka-client/runtime/src/main/java/io/smallrye/reactive/kafka/graal/StrimziSubstitutions.java +++ b/extensions/kafka-client/runtime/src/main/java/io/smallrye/reactive/kafka/graal/StrimziSubstitutions.java @@ -9,7 +9,7 @@ import com.jayway.jsonpath.spi.mapper.JacksonMappingProvider; import com.jayway.jsonpath.spi.mapper.MappingProvider; import com.oracle.svm.core.annotate.Alias; -import com.oracle.svm.core.annotate.RecomputeFieldValue; +import com.oracle.svm.core.annotate.Delete; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; @@ -72,10 +72,8 @@ private static boolean isJson(Object o) { @TargetClass(className = "com.jayway.jsonpath.internal.DefaultsImpl", onlyWith = HasStrimzi.class) final class Target_com_jayway_jsonpath_internal_DefaultsImpl { - - @RecomputeFieldValue(kind = RecomputeFieldValue.Kind.FromAlias) - @Alias - public static Target_com_jayway_jsonpath_internal_DefaultsImpl INSTANCE = new Target_com_jayway_jsonpath_internal_DefaultsImpl(); + @Delete // Delete the no longer used mappingProvider + private MappingProvider mappingProvider; @Substitute public JsonProvider jsonProvider() { diff --git a/extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/KubernetesConfigRecorder.java b/extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/KubernetesConfigRecorder.java index 2d53a04f7caf9..9fcb5d25f91e2 100644 --- a/extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/KubernetesConfigRecorder.java +++ b/extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/KubernetesConfigRecorder.java @@ -9,8 +9,6 @@ public class KubernetesConfigRecorder { private static final Logger log = Logger.getLogger(KubernetesConfigRecorder.class); - private static final String CONFIG_ENABLED_PROPERTY_NAME = "quarkus.kubernetes-config.enabled"; - public void warnAboutSecrets(KubernetesConfigBuildTimeConfig buildTimeConfig, KubernetesConfigSourceConfig config) { if (config.secrets().isPresent() && !config.secrets().get().isEmpty() diff --git a/extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/KubernetesConfigSourceFactoryBuilder.java b/extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/KubernetesConfigSourceFactoryBuilder.java index 1cd67db69e6e4..2b7b40fe012a3 100644 --- a/extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/KubernetesConfigSourceFactoryBuilder.java +++ b/extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/KubernetesConfigSourceFactoryBuilder.java @@ -2,9 +2,12 @@ import static io.smallrye.config.Converters.getImplicitConverter; +import java.util.Collections; + import org.eclipse.microprofile.config.spi.ConfigSource; import io.fabric8.kubernetes.client.KubernetesClient; +import io.quarkus.arc.runtime.appcds.AppCDSRecorder; import io.quarkus.kubernetes.client.runtime.KubernetesClientBuildConfig; import io.quarkus.kubernetes.client.runtime.KubernetesClientUtils; import io.quarkus.runtime.TlsConfig; @@ -23,6 +26,12 @@ static class KubernetesConfigFactory implements ConfigurableConfigSourceFactory< @Override public Iterable getConfigSources(final ConfigSourceContext context, final KubernetesClientBuildConfig config) { + boolean inAppCDsGeneration = Boolean + .parseBoolean(System.getProperty(AppCDSRecorder.QUARKUS_APPCDS_GENERATE_PROP, "false")); + if (inAppCDsGeneration) { + return Collections.emptyList(); + } + // TODO - TlsConfig is used in a lot of place. This is to avoid having it to migrate to ConfigMapping. boolean trustAll = getImplicitConverter(Boolean.class) .convert(context.getValue("quarkus.tls.trust-all").getValue()); diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/Constants.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/Constants.java index dd11245f8abf1..a8590aaa6e50c 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/Constants.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/Constants.java @@ -54,6 +54,7 @@ public final class Constants { static final String QUARKUS_ANNOTATIONS_COMMIT_ID = "app.quarkus.io/commit-id"; static final String QUARKUS_ANNOTATIONS_VCS_URL = "app.quarkus.io/vcs-uri"; static final String QUARKUS_ANNOTATIONS_BUILD_TIMESTAMP = "app.quarkus.io/build-timestamp"; + static final String QUARKUS_ANNOTATIONS_QUARKUS_VERSION = "app.quarkus.io/quarkus-version"; public static final String HTTP_PORT = "http"; public static final int DEFAULT_HTTP_PORT = 8080; diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesCommonHelper.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesCommonHelper.java index f3ec0a8eb039a..429c201834fab 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesCommonHelper.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesCommonHelper.java @@ -7,6 +7,7 @@ import static io.quarkus.kubernetes.deployment.Constants.KNATIVE; import static io.quarkus.kubernetes.deployment.Constants.QUARKUS_ANNOTATIONS_BUILD_TIMESTAMP; import static io.quarkus.kubernetes.deployment.Constants.QUARKUS_ANNOTATIONS_COMMIT_ID; +import static io.quarkus.kubernetes.deployment.Constants.QUARKUS_ANNOTATIONS_QUARKUS_VERSION; import static io.quarkus.kubernetes.deployment.Constants.QUARKUS_ANNOTATIONS_VCS_URL; import static io.quarkus.kubernetes.deployment.Constants.SERVICE_ACCOUNT; @@ -81,6 +82,7 @@ import io.fabric8.kubernetes.api.model.PodSpecBuilder; import io.fabric8.kubernetes.api.model.rbac.PolicyRule; import io.fabric8.kubernetes.api.model.rbac.PolicyRuleBuilder; +import io.quarkus.builder.Version; import io.quarkus.container.spi.ContainerImageInfoBuildItem; import io.quarkus.deployment.builditem.ApplicationInfoBuildItem; import io.quarkus.deployment.metrics.MetricsCapabilityBuildItem; @@ -977,6 +979,8 @@ private static List createAnnotationDecorators(Optionalquarkus-junit5-internal test + + org.assertj + assertj-core + test + io.quarkus quarkus-test-h2 diff --git a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigEmptyDefaultDatasourceTest.java b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigEmptyDefaultDatasourceTest.java new file mode 100644 index 0000000000000..ac28eb20acf10 --- /dev/null +++ b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigEmptyDefaultDatasourceTest.java @@ -0,0 +1,31 @@ +package io.quarkus.liquibase.test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +public class LiquibaseExtensionConfigEmptyDefaultDatasourceTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false") + .assertException(t -> assertThat(t).cause().cause() + .hasMessageContainingAll("Unable to find datasource '' for Liquibase", + "Datasource '' is not configured.", + "To solve this, configure datasource ''.", + "Refer to https://quarkus.io/guides/datasource for guidance.")); + + @Test + @DisplayName("If there is no config for the default datasource, the application should fail to boot") + public void testBootFails() { + // Should not be reached because boot should fail. + assertTrue(false); + } + +} diff --git a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigEmptyNamedDatasourceTest.java b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigEmptyNamedDatasourceTest.java new file mode 100644 index 0000000000000..8e532e29c17a0 --- /dev/null +++ b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigEmptyNamedDatasourceTest.java @@ -0,0 +1,40 @@ +package io.quarkus.liquibase.test; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import jakarta.enterprise.inject.Instance; +import jakarta.enterprise.inject.UnsatisfiedResolutionException; +import jakarta.inject.Inject; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.liquibase.LiquibaseDataSource; +import io.quarkus.liquibase.LiquibaseFactory; +import io.quarkus.test.QuarkusUnitTest; + +public class LiquibaseExtensionConfigEmptyNamedDatasourceTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false") + // We need this otherwise it's going to be the *default* datasource making everything fail + .overrideConfigKey("quarkus.datasource.db-kind", "h2") + .overrideConfigKey("quarkus.datasource.username", "sa") + .overrideConfigKey("quarkus.datasource.password", "sa") + .overrideConfigKey("quarkus.datasource.jdbc.url", + "jdbc:h2:tcp://localhost/mem:test-quarkus-migrate-at-start;DB_CLOSE_DELAY=-1"); + @Inject + @LiquibaseDataSource("users") + Instance liquibaseForNamedDatasource; + + @Test + @DisplayName("If there is no config for a named datasource, the application should boot, but Liquibase should be deactivated for that datasource") + public void testBootSucceedsButLiquibaseDeactivated() { + assertThatThrownBy(() -> liquibaseForNamedDatasource.get().getConfiguration()) + .isInstanceOf(UnsatisfiedResolutionException.class) + .hasMessageContaining("No bean found"); + } +} diff --git a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigEmptyTest.java b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigEmptyTest.java deleted file mode 100644 index d051c5fe33ad4..0000000000000 --- a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigEmptyTest.java +++ /dev/null @@ -1,36 +0,0 @@ -package io.quarkus.liquibase.test; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -import jakarta.enterprise.inject.Instance; -import jakarta.enterprise.inject.UnsatisfiedResolutionException; -import jakarta.inject.Inject; - -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.liquibase.LiquibaseFactory; -import io.quarkus.test.QuarkusUnitTest; - -/** - * Liquibase needs a datasource to work. - * This tests assures, that an error occurs, - * as soon as the default liquibase configuration points to an missing default datasource. - */ -public class LiquibaseExtensionConfigEmptyTest { - - @Inject - Instance liquibase; - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .withApplicationRoot((jar) -> jar - .addAsResource("config-empty.properties", "application.properties")); - - @Test - @DisplayName("Injecting (default) liquibase should fail if there is no datasource configured") - public void testLiquibaseNotAvailableWithoutDataSource() { - assertThrows(UnsatisfiedResolutionException.class, () -> liquibase.get().getConfiguration()); - } -} diff --git a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigMissingNamedDataSourceTest.java b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigMissingNamedDataSourceTest.java deleted file mode 100644 index 9f215776eb804..0000000000000 --- a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigMissingNamedDataSourceTest.java +++ /dev/null @@ -1,38 +0,0 @@ -package io.quarkus.liquibase.test; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -import jakarta.enterprise.inject.Instance; -import jakarta.enterprise.inject.UnsatisfiedResolutionException; -import jakarta.inject.Inject; - -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.liquibase.LiquibaseDataSource; -import io.quarkus.liquibase.LiquibaseFactory; -import io.quarkus.test.QuarkusUnitTest; - -/** - * Liquibase needs a datasource to work. - * This tests assures, that an error occurs, as soon as a named liquibase configuration points to a missing datasource. - */ -public class LiquibaseExtensionConfigMissingNamedDataSourceTest { - - @Inject - @LiquibaseDataSource("users") - Instance liquibase; - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .withApplicationRoot((jar) -> jar - .addAsResource("db/changeLog.xml", "db/changeLog.xml") - .addAsResource("config-for-missing-named-datasource.properties", "application.properties")); - - @Test - @DisplayName("Injecting liquibase should fail if the named datasource is missing") - public void testLiquibaseNotAvailableWithoutDataSource() { - assertThrows(UnsatisfiedResolutionException.class, liquibase::get); - } -} diff --git a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionMigrateAtStartDefaultDatasourceConfigEmptyTest.java b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionMigrateAtStartDefaultDatasourceConfigEmptyTest.java new file mode 100644 index 0000000000000..bad255b85ccf3 --- /dev/null +++ b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionMigrateAtStartDefaultDatasourceConfigEmptyTest.java @@ -0,0 +1,34 @@ +package io.quarkus.liquibase.test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +public class LiquibaseExtensionMigrateAtStartDefaultDatasourceConfigEmptyTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addAsResource("db/changeLog.xml", "db/changeLog.xml")) + .overrideConfigKey("quarkus.liquibase.migrate-at-start", "true") + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false") + .assertException(t -> assertThat(t).cause().cause() + .hasMessageContainingAll("Unable to find datasource '' for Liquibase", + "Datasource '' is not configured.", + "To solve this, configure datasource ''.", + "Refer to https://quarkus.io/guides/datasource for guidance.")); + + @Test + @DisplayName("If there is no config for the default datasource, and if migrate-at-start is enabled, the application should fail to boot") + public void testBootFails() { + // Should not be reached because boot should fail. + assertTrue(false); + } + +} diff --git a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionMigrateAtStartNamedDatasourceConfigEmptyTest.java b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionMigrateAtStartNamedDatasourceConfigEmptyTest.java new file mode 100644 index 0000000000000..3cc31a063195b --- /dev/null +++ b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionMigrateAtStartNamedDatasourceConfigEmptyTest.java @@ -0,0 +1,44 @@ +package io.quarkus.liquibase.test; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import jakarta.enterprise.inject.Instance; +import jakarta.enterprise.inject.UnsatisfiedResolutionException; +import jakarta.inject.Inject; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.liquibase.LiquibaseDataSource; +import io.quarkus.liquibase.LiquibaseFactory; +import io.quarkus.test.QuarkusUnitTest; + +public class LiquibaseExtensionMigrateAtStartNamedDatasourceConfigEmptyTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addAsResource("db/changeLog.xml", "db/changeLog.xml")) + .overrideConfigKey("quarkus.liquibase.users.migrate-at-start", "true") + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false") + // We need this otherwise it's going to be the *default* datasource making everything fail + .overrideConfigKey("quarkus.datasource.db-kind", "h2") + .overrideConfigKey("quarkus.datasource.username", "sa") + .overrideConfigKey("quarkus.datasource.password", "sa") + .overrideConfigKey("quarkus.datasource.jdbc.url", + "jdbc:h2:tcp://localhost/mem:test-quarkus-migrate-at-start;DB_CLOSE_DELAY=-1"); + + @Inject + @LiquibaseDataSource("users") + Instance liquibaseForNamedDatasource; + + @Test + @DisplayName("If there is no config for a named datasource, even if migrate-at-start is enabled, the application should boot, but Liquibase should be deactivated for that datasource") + public void testBootSucceedsButLiquibaseDeactivated() { + assertThatThrownBy(() -> liquibaseForNamedDatasource.get().getConfiguration()) + .isInstanceOf(UnsatisfiedResolutionException.class) + .hasMessageContaining("No bean found"); + } +} diff --git a/extensions/liquibase/deployment/src/test/resources/config-empty.properties b/extensions/liquibase/deployment/src/test/resources/config-empty.properties deleted file mode 100644 index 7484177fc8b23..0000000000000 --- a/extensions/liquibase/deployment/src/test/resources/config-empty.properties +++ /dev/null @@ -1 +0,0 @@ -quarkus.datasource.devservices.enabled=false \ No newline at end of file diff --git a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRecorder.java b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRecorder.java index d85785d61e35b..4b045a357417b 100644 --- a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRecorder.java +++ b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRecorder.java @@ -1,5 +1,6 @@ package io.quarkus.liquibase.runtime; +import java.util.Locale; import java.util.function.Function; import javax.sql.DataSource; @@ -13,6 +14,7 @@ import io.quarkus.arc.InjectableInstance; import io.quarkus.arc.InstanceHandle; import io.quarkus.arc.SyntheticCreationalContext; +import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.liquibase.LiquibaseFactory; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; @@ -32,9 +34,16 @@ public Function, LiquibaseFactory> return new Function, LiquibaseFactory>() { @Override public LiquibaseFactory apply(SyntheticCreationalContext context) { - DataSource dataSource = context.getInjectedReference(DataSources.class).getDataSource(dataSourceName); - if (dataSource instanceof UnconfiguredDataSource) { - throw new UnsatisfiedResolutionException("No datasource has been configured"); + DataSource dataSource; + try { + dataSource = context.getInjectedReference(DataSources.class).getDataSource(dataSourceName); + if (dataSource instanceof UnconfiguredDataSource) { + throw DataSourceUtil.dataSourceNotConfigured(dataSourceName); + } + } catch (RuntimeException e) { + throw new UnsatisfiedResolutionException(String.format(Locale.ROOT, + "Unable to find datasource '%s' for Liquibase: %s", + dataSourceName, e.getMessage()), e); } LiquibaseFactoryProducer liquibaseProducer = context.getInjectedReference(LiquibaseFactoryProducer.class); diff --git a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfConfig.java b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfConfig.java index e67d288e8d14b..0d604ae5c3b9f 100644 --- a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfConfig.java +++ b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfConfig.java @@ -100,6 +100,27 @@ public class GelfConfig { @ConfigItem public boolean includeFullMdc; + /** + * Send additional fields whose values are obtained from MDC. Name of the Fields are comma-separated. Example: + * mdcFields=Application,Version,SomeOtherFieldName + */ + @ConfigItem() + public Optional mdcFields; + + /** + * Dynamic MDC Fields allows you to extract MDC values based on one or more regular expressions. Multiple regexes are + * comma-separated. The name of the MDC entry is used as GELF field name. + */ + @ConfigItem + public Optional dynamicMdcFields; + + /** + * Pattern-based type specification for additional and MDC fields. Key-value pairs are comma-separated. Example: + * my_field.*=String,business\..*\.field=double + */ + @ConfigItem + public Optional dynamicMdcFieldTypes; + /** * Maximum message size (in bytes). * If the message size is exceeded, the appender will submit the message in multiple chunks. diff --git a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfLogHandlerRecorder.java b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfLogHandlerRecorder.java index 6000d36215459..95d4a9b03e32d 100644 --- a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfLogHandlerRecorder.java +++ b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfLogHandlerRecorder.java @@ -39,6 +39,9 @@ public RuntimeValue> initializeHandler(final GelfConfig config handler.setFilterStackTrace(config.filterStackTrace); handler.setTimestampPattern(config.timestampPattern); handler.setIncludeFullMdc(config.includeFullMdc); + handler.setDynamicMdcFields(config.dynamicMdcFields.orElse(null)); + handler.setMdcFields(config.mdcFields.orElse(null)); + handler.setDynamicMdcFieldTypes(config.dynamicMdcFieldTypes.orElse(null)); handler.setHost(config.host); handler.setPort(config.port); handler.setLevel(config.level); diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/impl/ReactiveMongoCollectionImpl.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/impl/ReactiveMongoCollectionImpl.java index acac2af661e43..a122df0796b70 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/impl/ReactiveMongoCollectionImpl.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/impl/ReactiveMongoCollectionImpl.java @@ -584,6 +584,27 @@ public Uni updateOne(ClientSession clientSession, Bson filter, Bso return Wrappers.toUni(collection.updateOne(clientSession, filter, update, options)); } + @Override + public Uni updateOne(Bson filter, List update) { + return Wrappers.toUni(collection.updateOne(filter, update)); + } + + @Override + public Uni updateOne(Bson filter, List update, UpdateOptions options) { + return Wrappers.toUni(collection.updateOne(filter, update, options)); + } + + @Override + public Uni updateOne(ClientSession clientSession, Bson filter, List update) { + return Wrappers.toUni(collection.updateOne(clientSession, filter, update)); + } + + @Override + public Uni updateOne(ClientSession clientSession, Bson filter, List update, + UpdateOptions options) { + return Wrappers.toUni(collection.updateOne(clientSession, filter, update, options)); + } + @Override public Uni updateMany(Bson filter, Bson update) { return Wrappers.toUni(collection.updateMany(filter, update)); @@ -605,6 +626,27 @@ public Uni updateMany(ClientSession clientSession, Bson filter, Bs return Wrappers.toUni(collection.updateMany(clientSession, filter, update, options)); } + @Override + public Uni updateMany(Bson filter, List update) { + return Wrappers.toUni(collection.updateMany(filter, update)); + } + + @Override + public Uni updateMany(Bson filter, List update, UpdateOptions options) { + return Wrappers.toUni(collection.updateMany(filter, update, options)); + } + + @Override + public Uni updateMany(ClientSession clientSession, Bson filter, List update) { + return Wrappers.toUni(collection.updateMany(clientSession, filter, update)); + } + + @Override + public Uni updateMany(ClientSession clientSession, Bson filter, List update, + UpdateOptions options) { + return Wrappers.toUni(collection.updateMany(clientSession, filter, update, options)); + } + @Override public Uni findOneAndDelete(Bson filter) { return Wrappers.toUni(collection.findOneAndDelete(filter)); @@ -667,6 +709,27 @@ public Uni findOneAndUpdate(ClientSession clientSession, Bson filter, Bson up return Wrappers.toUni(collection.findOneAndUpdate(clientSession, filter, update, options)); } + @Override + public Uni findOneAndUpdate(Bson filter, List update) { + return Wrappers.toUni(collection.findOneAndUpdate(filter, update)); + } + + @Override + public Uni findOneAndUpdate(Bson filter, List update, FindOneAndUpdateOptions options) { + return Wrappers.toUni(collection.findOneAndUpdate(filter, update, options)); + } + + @Override + public Uni findOneAndUpdate(ClientSession clientSession, Bson filter, List update) { + return Wrappers.toUni(collection.findOneAndUpdate(clientSession, filter, update)); + } + + @Override + public Uni findOneAndUpdate(ClientSession clientSession, Bson filter, List update, + FindOneAndUpdateOptions options) { + return Wrappers.toUni(collection.findOneAndUpdate(clientSession, filter, update, options)); + } + @Override public Uni drop() { return Wrappers.toUni(collection.drop()); diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/reactive/ReactiveMongoCollection.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/reactive/ReactiveMongoCollection.java index 65ecdb58f72a4..34c7532d1a7c8 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/reactive/ReactiveMongoCollection.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/reactive/ReactiveMongoCollection.java @@ -1014,6 +1014,62 @@ Uni replaceOne(ClientSession clientSession, Bson filter, T replace Uni updateOne(ClientSession clientSession, Bson filter, Bson update, UpdateOptions options); + /** + * Update a single document in the collection according to the specified arguments. + * + *

+ * Note: Supports retryable writes on MongoDB server versions 3.6 or higher when the retryWrites setting is enabled. + *

+ * + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @return a publisher with a single element the UpdateResult + */ + Uni updateOne(Bson filter, List update); + + /** + * Update a single document in the collection according to the specified arguments. + * + *

+ * Note: Supports retryable writes on MongoDB server versions 3.6 or higher when the retryWrites setting is enabled. + *

+ * + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @param options the options to apply to the update operation + * @return a publisher with a single element the UpdateResult + */ + Uni updateOne(Bson filter, List update, UpdateOptions options); + + /** + * Update a single document in the collection according to the specified arguments. + * + *

+ * Note: Supports retryable writes on MongoDB server versions 3.6 or higher when the retryWrites setting is enabled. + *

+ * + * @param clientSession the client session with which to associate this operation + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @return a publisher with a single element the UpdateResult + */ + Uni updateOne(ClientSession clientSession, Bson filter, List update); + + /** + * Update a single document in the collection according to the specified arguments. + * + *

+ * Note: Supports retryable writes on MongoDB server versions 3.6 or higher when the retryWrites setting is enabled. + *

+ * + * @param clientSession the client session with which to associate this operation + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @param options the options to apply to the update operation + * @return a publisher with a single element the UpdateResult + */ + Uni updateOne(ClientSession clientSession, Bson filter, List update, UpdateOptions options); + /** * Update all documents in the collection according to the specified arguments. * @@ -1059,6 +1115,46 @@ Uni updateOne(ClientSession clientSession, Bson filter, Bson updat Uni updateMany(ClientSession clientSession, Bson filter, Bson update, UpdateOptions options); + /** + * Update all documents in the collection according to the specified arguments. + * + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @return a publisher with a single element the UpdateResult + */ + Uni updateMany(Bson filter, List update); + + /** + * Update all documents in the collection according to the specified arguments. + * + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @param options the options to apply to the update operation + * @return a publisher with a single element the UpdateResult + */ + Uni updateMany(Bson filter, List update, UpdateOptions options); + + /** + * Update all documents in the collection according to the specified arguments. + * + * @param clientSession the client session with which to associate this operation + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @return a publisher with a single element the UpdateResult + */ + Uni updateMany(ClientSession clientSession, Bson filter, List update); + + /** + * Update all documents in the collection according to the specified arguments. + * + * @param clientSession the client session with which to associate this operation + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @param options the options to apply to the update operation + * @return a publisher with a single element the UpdateResult + */ + Uni updateMany(ClientSession clientSession, Bson filter, List update, UpdateOptions options); + /** * Atomically find a document and remove it. * @@ -1217,6 +1313,79 @@ Uni findOneAndReplace(ClientSession clientSession, Bson filter, T replacement Uni findOneAndUpdate(ClientSession clientSession, Bson filter, Bson update, FindOneAndUpdateOptions options); + /** + * Atomically find a document and update it. + * + *

+ * Note: Supports retryable writes on MongoDB server versions 3.6 or higher when the retryWrites setting is enabled. + *

+ * + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @return a publisher with a single element the document that was updated. Depending on the value of the + * {@code returnOriginal} + * property, this will either be the document as it was before the update or as it is after the update. If no + * documents matched the + * query filter, then null will be returned + */ + Uni findOneAndUpdate(Bson filter, List update); + + /** + * Atomically find a document and update it. + * + *

+ * Note: Supports retryable writes on MongoDB server versions 3.6 or higher when the retryWrites setting is enabled. + *

+ * + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @param options the options to apply to the operation + * @return a publisher with a single element the document that was updated. Depending on the value of the + * {@code returnOriginal} + * property, this will either be the document as it was before the update or as it is after the update. If no + * documents matched the + * query filter, then null will be returned + */ + Uni findOneAndUpdate(Bson filter, List update, FindOneAndUpdateOptions options); + + /** + * Atomically find a document and update it. + * + *

+ * Note: Supports retryable writes on MongoDB server versions 3.6 or higher when the retryWrites setting is enabled. + *

+ * + * @param clientSession the client session with which to associate this operation + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @return a publisher with a single element the document that was updated. Depending on the value of the + * {@code returnOriginal} + * property, this will either be the document as it was before the update or as it is after the update. If no + * documents matched the + * query filter, then null will be returned + */ + Uni findOneAndUpdate(ClientSession clientSession, Bson filter, List update); + + /** + * Atomically find a document and update it. + * + *

+ * Note: Supports retryable writes on MongoDB server versions 3.6 or higher when the retryWrites setting is enabled. + *

+ * + * @param clientSession the client session with which to associate this operation + * @param filter a document describing the query filter, which may not be null. + * @param update a pipeline describing the update, which may not be null. + * @param options the options to apply to the operation + * @return a publisher with a single element the document that was updated. Depending on the value of the + * {@code returnOriginal} + * property, this will either be the document as it was before the update or as it is after the update. If no + * documents matched the + * query filter, then null will be returned + */ + Uni findOneAndUpdate(ClientSession clientSession, Bson filter, List update, + FindOneAndUpdateOptions options); + /** * Drops this collection from the database. * diff --git a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/OpenTelemetryProcessor.java b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/OpenTelemetryProcessor.java index f263832b8d810..4238f8b615937 100644 --- a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/OpenTelemetryProcessor.java +++ b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/OpenTelemetryProcessor.java @@ -28,7 +28,6 @@ import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.exporter.otlp.internal.OtlpSpanExporterProvider; import io.opentelemetry.instrumentation.annotations.AddingSpanAttributes; -import io.opentelemetry.instrumentation.annotations.SpanAttribute; import io.opentelemetry.instrumentation.annotations.WithSpan; import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurablePropagatorProvider; @@ -47,6 +46,8 @@ import io.quarkus.arc.processor.InterceptorBindingRegistrar; import io.quarkus.arc.processor.Transformation; import io.quarkus.datasource.common.runtime.DataSourceUtil; +import io.quarkus.deployment.Capabilities; +import io.quarkus.deployment.Capability; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.BuildSteps; @@ -92,7 +93,6 @@ public boolean test(AnnotationInstance annotationInstance) { private static final DotName WITH_SPAN_INTERCEPTOR = DotName.createSimple(WithSpanInterceptor.class.getName()); private static final DotName ADD_SPAN_ATTRIBUTES_INTERCEPTOR = DotName .createSimple(AddingSpanAttributesInterceptor.class.getName()); - private static final DotName SPAN_ATTRIBUTE = DotName.createSimple(SpanAttribute.class.getName()); @BuildStep AdditionalBeanBuildItem ensureProducerIsRetained() { @@ -263,10 +263,14 @@ void createOpenTelemetry( @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - void setupVertx(InstrumentationRecorder recorder, - BeanContainerBuildItem beanContainerBuildItem) { - - recorder.setupVertxTracer(beanContainerBuildItem.getValue()); + void setupVertx(InstrumentationRecorder recorder, BeanContainerBuildItem beanContainerBuildItem, + Capabilities capabilities) { + boolean sqlClientAvailable = capabilities.isPresent(Capability.REACTIVE_DB2_CLIENT) + || capabilities.isPresent(Capability.REACTIVE_MSSQL_CLIENT) + || capabilities.isPresent(Capability.REACTIVE_MYSQL_CLIENT) + || capabilities.isPresent(Capability.REACTIVE_ORACLE_CLIENT) + || capabilities.isPresent(Capability.REACTIVE_PG_CLIENT); + recorder.setupVertxTracer(beanContainerBuildItem.getValue(), sqlClientAvailable); } @BuildStep diff --git a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/tracing/instrumentation/InstrumentationProcessor.java b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/tracing/instrumentation/InstrumentationProcessor.java index d8aa5e59bd0cb..ff6e4dda31e2e 100644 --- a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/tracing/instrumentation/InstrumentationProcessor.java +++ b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/tracing/instrumentation/InstrumentationProcessor.java @@ -20,6 +20,7 @@ import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.AdditionalIndexedClassesBuildItem; import io.quarkus.opentelemetry.deployment.tracing.TracerEnabled; +import io.quarkus.opentelemetry.runtime.config.build.OTelBuildConfig; import io.quarkus.opentelemetry.runtime.tracing.intrumentation.InstrumentationRecorder; import io.quarkus.opentelemetry.runtime.tracing.intrumentation.grpc.GrpcTracingClientInterceptor; import io.quarkus.opentelemetry.runtime.tracing.intrumentation.grpc.GrpcTracingServerInterceptor; @@ -71,17 +72,21 @@ public boolean getAsBoolean() { } @BuildStep(onlyIf = GrpcExtensionAvailable.class) - void grpcTracers(BuildProducer additionalBeans) { - additionalBeans.produce(new AdditionalBeanBuildItem(GrpcTracingServerInterceptor.class)); - additionalBeans.produce(new AdditionalBeanBuildItem(GrpcTracingClientInterceptor.class)); + void grpcTracers(BuildProducer additionalBeans, OTelBuildConfig config) { + if (config.instrument().grpc()) { + additionalBeans.produce(new AdditionalBeanBuildItem(GrpcTracingServerInterceptor.class)); + additionalBeans.produce(new AdditionalBeanBuildItem(GrpcTracingClientInterceptor.class)); + } } @BuildStep void registerRestClientClassicProvider( Capabilities capabilities, BuildProducer additionalIndexed, - BuildProducer additionalBeans) { - if (capabilities.isPresent(Capability.REST_CLIENT) && capabilities.isMissing(Capability.REST_CLIENT_REACTIVE)) { + BuildProducer additionalBeans, + OTelBuildConfig config) { + if (capabilities.isPresent(Capability.REST_CLIENT) && capabilities.isMissing(Capability.REST_CLIENT_REACTIVE) + && config.instrument().restClientClassic()) { additionalIndexed.produce(new AdditionalIndexedClassesBuildItem(OpenTelemetryClientFilter.class.getName())); additionalBeans.produce(new AdditionalBeanBuildItem(OpenTelemetryClientFilter.class)); } @@ -90,8 +95,9 @@ void registerRestClientClassicProvider( @BuildStep void registerReactiveMessagingMessageDecorator( Capabilities capabilities, - BuildProducer additionalBeans) { - if (capabilities.isPresent(Capability.SMALLRYE_REACTIVE_MESSAGING)) { + BuildProducer additionalBeans, + OTelBuildConfig config) { + if (capabilities.isPresent(Capability.SMALLRYE_REACTIVE_MESSAGING) && config.instrument().reactiveMessaging()) { additionalBeans.produce(new AdditionalBeanBuildItem(ReactiveMessagingTracingOutgoingDecorator.class)); additionalBeans.produce(new AdditionalBeanBuildItem(ReactiveMessagingTracingIncomingDecorator.class)); additionalBeans.produce(new AdditionalBeanBuildItem(ReactiveMessagingTracingEmitterDecorator.class)); @@ -115,35 +121,27 @@ VertxOptionsConsumerBuildItem vertxTracingOptions( // RESTEasy and Vert.x web @BuildStep - void registerResteasyClassicAndOrResteasyReactiveProvider( + void registerResteasyClassicAndOrResteasyReactiveProvider(OTelBuildConfig config, Capabilities capabilities, BuildProducer resteasyJaxrsProviderBuildItemBuildProducer) { - - boolean isResteasyClassicAvailable = capabilities.isPresent(Capability.RESTEASY); - - if (!isResteasyClassicAvailable) { - // if RestEasy is not available then no need to continue - return; + if (capabilities.isPresent(Capability.RESTEASY) && config.instrument().resteasyClassic()) { + resteasyJaxrsProviderBuildItemBuildProducer + .produce(new ResteasyJaxrsProviderBuildItem(OpenTelemetryClassicServerFilter.class.getName())); } - - resteasyJaxrsProviderBuildItemBuildProducer - .produce(new ResteasyJaxrsProviderBuildItem(OpenTelemetryClassicServerFilter.class.getName())); } @BuildStep void resteasyReactiveIntegration( Capabilities capabilities, BuildProducer containerRequestFilterBuildItemBuildProducer, - BuildProducer preExceptionMapperHandlerBuildItemBuildProducer) { - - if (!capabilities.isPresent(Capability.RESTEASY_REACTIVE)) { - // if RESTEasy Reactive is not available then no need to continue - return; + BuildProducer preExceptionMapperHandlerBuildItemBuildProducer, + OTelBuildConfig config) { + if (capabilities.isPresent(Capability.RESTEASY_REACTIVE) && config.instrument().resteasyReactive()) { + containerRequestFilterBuildItemBuildProducer + .produce(new CustomContainerRequestFilterBuildItem(OpenTelemetryReactiveServerFilter.class.getName())); + preExceptionMapperHandlerBuildItemBuildProducer + .produce(new PreExceptionMapperHandlerBuildItem(new AttachExceptionHandler())); } - containerRequestFilterBuildItemBuildProducer - .produce(new CustomContainerRequestFilterBuildItem(OpenTelemetryReactiveServerFilter.class.getName())); - preExceptionMapperHandlerBuildItemBuildProducer - .produce(new PreExceptionMapperHandlerBuildItem(new AttachExceptionHandler())); } } diff --git a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/common/TestSpanExporter.java b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/common/TestSpanExporter.java index 5f934bac68810..533e3ca62cd5b 100644 --- a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/common/TestSpanExporter.java +++ b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/common/TestSpanExporter.java @@ -51,7 +51,8 @@ public List getFinishedSpanItems(int spanCount) { } public void assertSpanCount(int spanCount) { - await().atMost(30, SECONDS).untilAsserted(() -> assertEquals(spanCount, finishedSpanItems.size())); + await().atMost(30, SECONDS).untilAsserted( + () -> assertEquals(spanCount, finishedSpanItems.size(), "Spans: " + finishedSpanItems.toString())); } public void reset() { diff --git a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/GrpcOpenInstrumentationDisabledTest.java b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/GrpcOpenInstrumentationDisabledTest.java new file mode 100644 index 0000000000000..5c6bb07a37f23 --- /dev/null +++ b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/GrpcOpenInstrumentationDisabledTest.java @@ -0,0 +1,100 @@ +package io.quarkus.opentelemetry.deployment.instrumentation; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; +import static io.opentelemetry.api.trace.SpanKind.INTERNAL; +import static io.quarkus.opentelemetry.deployment.common.TestSpanExporter.getSpanByKindAndParentId; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.time.Duration; +import java.util.List; + +import jakarta.inject.Inject; + +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.quarkus.grpc.GrpcClient; +import io.quarkus.grpc.GrpcService; +import io.quarkus.opentelemetry.deployment.Greeter; +import io.quarkus.opentelemetry.deployment.GreeterBean; +import io.quarkus.opentelemetry.deployment.GreeterClient; +import io.quarkus.opentelemetry.deployment.GreeterGrpc; +import io.quarkus.opentelemetry.deployment.HelloProto; +import io.quarkus.opentelemetry.deployment.HelloReply; +import io.quarkus.opentelemetry.deployment.HelloReplyOrBuilder; +import io.quarkus.opentelemetry.deployment.HelloRequest; +import io.quarkus.opentelemetry.deployment.HelloRequestOrBuilder; +import io.quarkus.opentelemetry.deployment.MutinyGreeterGrpc; +import io.quarkus.opentelemetry.deployment.common.TestSpanExporter; +import io.quarkus.opentelemetry.deployment.common.TestSpanExporterProvider; +import io.quarkus.test.QuarkusUnitTest; +import io.smallrye.mutiny.Uni; + +public class GrpcOpenInstrumentationDisabledTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .withApplicationRoot(root -> root + .addClasses(TestSpanExporter.class, TestSpanExporterProvider.class) + .addClasses(HelloService.class) + .addClasses(GreeterGrpc.class, MutinyGreeterGrpc.class, + Greeter.class, GreeterBean.class, GreeterClient.class, + HelloProto.class, HelloRequest.class, HelloRequestOrBuilder.class, + HelloReply.class, HelloReplyOrBuilder.class) + .addAsResource(new StringAsset(TestSpanExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider")) + .withConfigurationResource("application-default.properties") + .overrideConfigKey("quarkus.grpc.clients.hello.host", "localhost") + .overrideConfigKey("quarkus.grpc.clients.hello.port", "9001") + .overrideConfigKey("quarkus.otel.instrument.grpc", "false"); + + @Inject + TestSpanExporter spanExporter; + + @GrpcClient + Greeter hello; + + @AfterEach + void tearDown() { + spanExporter.reset(); + } + + @Test + void testTratestTracingDisabled() { + String response = hello.sayHello( + HelloRequest.newBuilder().setName("ping").build()) + .map(HelloReply::getMessage) + .await().atMost(Duration.ofSeconds(5)); + assertEquals("Hello ping", response); + + List spans = spanExporter.getFinishedSpanItems(1); + assertEquals(1, spans.size()); + + SpanData internal = getSpanByKindAndParentId(spans, INTERNAL, "0000000000000000"); + assertEquals("span.internal", internal.getName()); + assertEquals("value", internal.getAttributes().get(stringKey("grpc.internal"))); + } + + @GrpcService + public static class HelloService implements Greeter { + + @Inject + Tracer tracer; + + @Override + public Uni sayHello(HelloRequest request) { + Span span = tracer.spanBuilder("span.internal") + .setSpanKind(INTERNAL) + .setAttribute("grpc.internal", "value") + .startSpan(); + span.end(); + return Uni.createFrom().item(HelloReply.newBuilder().setMessage("Hello " + request.getName()).build()); + } + } + +} diff --git a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/VertxEventBusInstrumentationDisabledTest.java b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/VertxEventBusInstrumentationDisabledTest.java new file mode 100644 index 0000000000000..8956b57879970 --- /dev/null +++ b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/VertxEventBusInstrumentationDisabledTest.java @@ -0,0 +1,92 @@ +package io.quarkus.opentelemetry.deployment.instrumentation; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; +import static io.opentelemetry.api.trace.SpanKind.INTERNAL; +import static io.quarkus.opentelemetry.deployment.common.TestSpanExporter.getSpanByKindAndParentId; +import static java.net.HttpURLConnection.HTTP_OK; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.List; + +import jakarta.enterprise.event.Observes; +import jakarta.inject.Inject; +import jakarta.inject.Singleton; + +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.quarkus.opentelemetry.deployment.common.TestSpanExporter; +import io.quarkus.opentelemetry.deployment.common.TestSpanExporterProvider; +import io.quarkus.opentelemetry.deployment.common.TestUtil; +import io.quarkus.test.QuarkusUnitTest; +import io.quarkus.vertx.ConsumeEvent; +import io.restassured.RestAssured; +import io.vertx.core.eventbus.EventBus; +import io.vertx.ext.web.Router; + +public class VertxEventBusInstrumentationDisabledTest { + + @RegisterExtension + static final QuarkusUnitTest unitTest = new QuarkusUnitTest() + .withApplicationRoot(root -> root + .addClasses(Events.class, TestUtil.class, TestSpanExporter.class, TestSpanExporterProvider.class) + .addAsResource(new StringAsset(TestSpanExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider")) + .overrideConfigKey("quarkus.otel.traces.exporter", "test-span-exporter") + .overrideConfigKey("quarkus.otel.metrics.exporter", "none") + .overrideConfigKey("quarkus.otel.logs.exporter", "none") + .overrideConfigKey("quarkus.otel.bsp.schedule.delay", "200") + .overrideConfigKey("quarkus.otel.instrument.vertx-event-bus", "false"); + + @Inject + TestSpanExporter spanExporter; + + @AfterEach + void tearDown() { + spanExporter.reset(); + } + + @Test + void testTracingDisabled() throws Exception { + + RestAssured.when().get("/hello/event") + .then() + .statusCode(HTTP_OK) + .body(equalTo("BAR")); + + // http request and dummy + List spans = spanExporter.getFinishedSpanItems(2); + assertEquals(2, spans.size()); + + SpanData internal = getSpanByKindAndParentId(spans, INTERNAL, "0000000000000000"); + assertEquals("io.quarkus.vertx.opentelemetry", internal.getName()); + assertEquals("dummy", internal.getAttributes().get(stringKey("test.message"))); + } + + @Singleton + public static class Events { + + @Inject + Tracer tracer; + + @ConsumeEvent("foo") + String echo(String foo) { + tracer.spanBuilder("io.quarkus.vertx.opentelemetry").startSpan() + .setAttribute("test.message", "dummy") + .end(); + return foo.toUpperCase(); + } + + void registerRoutes(@Observes Router router, EventBus eventBus) { + router.get("/hello/event").handler(rc -> { + eventBus.request("foo", "bar").onComplete(r -> rc.end(r.result().body().toString())); + }); + } + } + +} diff --git a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/VertxHttpInstrumentationDisabledTest.java b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/VertxHttpInstrumentationDisabledTest.java new file mode 100644 index 0000000000000..eb8cc3eb05375 --- /dev/null +++ b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/VertxHttpInstrumentationDisabledTest.java @@ -0,0 +1,85 @@ +package io.quarkus.opentelemetry.deployment.instrumentation; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; +import static io.opentelemetry.api.trace.SpanKind.INTERNAL; +import static io.quarkus.opentelemetry.deployment.common.TestSpanExporter.getSpanByKindAndParentId; +import static java.net.HttpURLConnection.HTTP_OK; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.List; + +import jakarta.enterprise.event.Observes; +import jakarta.inject.Inject; +import jakarta.inject.Singleton; + +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.quarkus.opentelemetry.deployment.common.TestSpanExporter; +import io.quarkus.opentelemetry.deployment.common.TestSpanExporterProvider; +import io.quarkus.opentelemetry.deployment.common.TestUtil; +import io.quarkus.test.QuarkusUnitTest; +import io.restassured.RestAssured; +import io.vertx.core.eventbus.EventBus; +import io.vertx.ext.web.Router; + +public class VertxHttpInstrumentationDisabledTest { + + @RegisterExtension + static final QuarkusUnitTest unitTest = new QuarkusUnitTest() + .withApplicationRoot(root -> root + .addClasses(Events.class, TestUtil.class, TestSpanExporter.class, + TestSpanExporterProvider.class) + .addAsResource(new StringAsset(TestSpanExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider")) + .overrideConfigKey("quarkus.otel.traces.exporter", "test-span-exporter") + .overrideConfigKey("quarkus.otel.metrics.exporter", "none") + .overrideConfigKey("quarkus.otel.logs.exporter", "none") + .overrideConfigKey("quarkus.otel.bsp.schedule.delay", "200") + .overrideConfigKey("quarkus.otel.instrument.vertx-http", "false"); + + @Inject + TestSpanExporter spanExporter; + + @AfterEach + void tearDown() { + spanExporter.reset(); + } + + @Test + void testTracingDisabled() throws Exception { + RestAssured.when().get("/hello/foo") + .then() + .statusCode(HTTP_OK) + .body(equalTo("oof")); + + List spans = spanExporter.getFinishedSpanItems(1); + assertEquals(1, spans.size()); + + SpanData internal = getSpanByKindAndParentId(spans, INTERNAL, "0000000000000000"); + assertEquals("io.quarkus.vertx.opentelemetry", internal.getName()); + assertEquals("dummy", internal.getAttributes().get(stringKey("test.message"))); + } + + @Singleton + public static class Events { + + @Inject + Tracer tracer; + + void registerRoutes(@Observes Router router, EventBus eventBus) { + router.get("/hello/foo").handler(rc -> { + tracer.spanBuilder("io.quarkus.vertx.opentelemetry").startSpan() + .setAttribute("test.message", "dummy") + .end(); + rc.end("oof"); + }); + } + } + +} diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/OTelFallbackConfigSourceInterceptor.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/OTelFallbackConfigSourceInterceptor.java index 4813bbcf61c1a..6eddefe11db8e 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/OTelFallbackConfigSourceInterceptor.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/OTelFallbackConfigSourceInterceptor.java @@ -42,7 +42,7 @@ public OTelFallbackConfigSourceInterceptor() { @Override public ConfigValue getValue(final ConfigSourceInterceptorContext context, final String name) { ConfigValue value = super.getValue(context, name); - if (name.equals("quarkus.otel.traces.sampler")) { + if (value != null && name.equals("quarkus.otel.traces.sampler")) { return value.withValue(LEGACY_SAMPLER_NAME_CONVERTER.convert(value.getValue())); } return value; diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/build/InstrumentBuildTimeConfig.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/build/InstrumentBuildTimeConfig.java new file mode 100644 index 0000000000000..09ecb1532018b --- /dev/null +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/build/InstrumentBuildTimeConfig.java @@ -0,0 +1,41 @@ +package io.quarkus.opentelemetry.runtime.config.build; + +import io.quarkus.runtime.annotations.ConfigGroup; +import io.smallrye.config.WithDefault; + +@ConfigGroup +public interface InstrumentBuildTimeConfig { + + /** + * Enables instrumentation for gRPC. + */ + @WithDefault("true") + boolean grpc(); + + /** + * Enables instrumentation for SmallRye Reactive Messaging. + */ + @WithDefault("true") + boolean reactiveMessaging(); + + /** + * Enables instrumentation for JAX-RS Rest Client backed by RESTEasy Classic. + */ + @WithDefault("true") + boolean restClientClassic(); + + /** + * Enables instrumentation for RESTEasy Reactive. + */ + @WithDefault("true") + boolean resteasyReactive(); + + /** + * Enables instrumentation for RESTEasy Classic. + */ + @WithDefault("true") + boolean resteasyClassic(); + + // NOTE: agroal, graphql and scheduler have their own config properties + +} diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/build/OTelBuildConfig.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/build/OTelBuildConfig.java index e7d2620b9c8de..679cf07f40d4c 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/build/OTelBuildConfig.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/build/OTelBuildConfig.java @@ -18,6 +18,7 @@ @ConfigMapping(prefix = "quarkus.otel") @ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) public interface OTelBuildConfig { + String INSTRUMENTATION_NAME = "io.quarkus.opentelemetry"; /** @@ -61,4 +62,9 @@ public interface OTelBuildConfig { */ @WithDefault(TRACE_CONTEXT + "," + BAGGAGE) List propagators(); + + /** + * Enable/disable instrumentation for specific technologies. + */ + InstrumentBuildTimeConfig instrument(); } diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/InstrumentRuntimeConfig.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/InstrumentRuntimeConfig.java new file mode 100644 index 0000000000000..f5c5cdddd104b --- /dev/null +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/InstrumentRuntimeConfig.java @@ -0,0 +1,27 @@ +package io.quarkus.opentelemetry.runtime.config.runtime; + +import io.quarkus.runtime.annotations.ConfigGroup; +import io.smallrye.config.WithDefault; + +@ConfigGroup +public interface InstrumentRuntimeConfig { + + /** + * Enables instrumentation for Vert.x HTTP. + */ + @WithDefault("true") + boolean vertxHttp(); + + /** + * Enables instrumentation for Vert.x Event Bus. + */ + @WithDefault("true") + boolean vertxEventBus(); + + /** + * Enables instrumentation for Vert.x SQL Client. + */ + @WithDefault("true") + boolean vertxSqlClient(); + +} diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/OTelRuntimeConfig.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/OTelRuntimeConfig.java index d38e1fc83fe7a..f428629d4f957 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/OTelRuntimeConfig.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/OTelRuntimeConfig.java @@ -71,4 +71,9 @@ public interface OTelRuntimeConfig { @WithName("experimental.shutdown-wait-time") @WithDefault("1s") Duration experimentalShutdownWaitTime(); + + /** + * Enable/disable instrumentation for specific technologies. + */ + InstrumentRuntimeConfig instrument(); } diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/InstrumentationRecorder.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/InstrumentationRecorder.java index 9613b7a13eebc..105eb7f7a1881 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/InstrumentationRecorder.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/InstrumentationRecorder.java @@ -1,16 +1,20 @@ package io.quarkus.opentelemetry.runtime.tracing.intrumentation; +import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; import io.opentelemetry.api.OpenTelemetry; import io.quarkus.arc.runtime.BeanContainer; +import io.quarkus.opentelemetry.runtime.config.runtime.OTelRuntimeConfig; import io.quarkus.opentelemetry.runtime.tracing.intrumentation.vertx.EventBusInstrumenterVertxTracer; import io.quarkus.opentelemetry.runtime.tracing.intrumentation.vertx.HttpInstrumenterVertxTracer; +import io.quarkus.opentelemetry.runtime.tracing.intrumentation.vertx.InstrumenterVertxTracer; import io.quarkus.opentelemetry.runtime.tracing.intrumentation.vertx.OpenTelemetryVertxMetricsFactory; import io.quarkus.opentelemetry.runtime.tracing.intrumentation.vertx.OpenTelemetryVertxTracer; import io.quarkus.opentelemetry.runtime.tracing.intrumentation.vertx.OpenTelemetryVertxTracingFactory; import io.quarkus.opentelemetry.runtime.tracing.intrumentation.vertx.SqlClientInstrumenterVertxTracer; +import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; import io.vertx.core.VertxOptions; import io.vertx.core.metrics.MetricsOptions; @@ -21,6 +25,12 @@ public class InstrumentationRecorder { public static final OpenTelemetryVertxTracingFactory FACTORY = new OpenTelemetryVertxTracingFactory(); + private final RuntimeValue config; + + public InstrumentationRecorder(RuntimeValue config) { + this.config = config; + } + /* RUNTIME INIT */ public Consumer getVertxTracingOptions() { TracingOptions tracingOptions = new TracingOptions() @@ -29,13 +39,19 @@ public Consumer getVertxTracingOptions() { } /* RUNTIME INIT */ - public void setupVertxTracer(BeanContainer beanContainer) { + public void setupVertxTracer(BeanContainer beanContainer, boolean sqlClientAvailable) { OpenTelemetry openTelemetry = beanContainer.beanInstance(OpenTelemetry.class); - OpenTelemetryVertxTracer openTelemetryVertxTracer = new OpenTelemetryVertxTracer(List.of( - new HttpInstrumenterVertxTracer(openTelemetry), - new EventBusInstrumenterVertxTracer(openTelemetry), - // TODO - Selectively register this in the recorder if the SQL Client is available. - new SqlClientInstrumenterVertxTracer(openTelemetry))); + List> tracers = new ArrayList<>(3); + if (config.getValue().instrument().vertxHttp()) { + tracers.add(new HttpInstrumenterVertxTracer(openTelemetry)); + } + if (config.getValue().instrument().vertxEventBus()) { + tracers.add(new EventBusInstrumenterVertxTracer(openTelemetry)); + } + if (sqlClientAvailable && config.getValue().instrument().vertxSqlClient()) { + tracers.add(new SqlClientInstrumenterVertxTracer(openTelemetry)); + } + OpenTelemetryVertxTracer openTelemetryVertxTracer = new OpenTelemetryVertxTracer(tracers); FACTORY.getVertxTracerDelegator().setDelegate(openTelemetryVertxTracer); } diff --git a/extensions/panache/hibernate-orm-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/orm/rest/data/panache/deployment/entity/PanacheEntityResourceGetMethodTest.java b/extensions/panache/hibernate-orm-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/orm/rest/data/panache/deployment/entity/PanacheEntityResourceGetMethodTest.java index 55f7197d40a19..8f28973b0a27e 100644 --- a/extensions/panache/hibernate-orm-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/orm/rest/data/panache/deployment/entity/PanacheEntityResourceGetMethodTest.java +++ b/extensions/panache/hibernate-orm-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/orm/rest/data/panache/deployment/entity/PanacheEntityResourceGetMethodTest.java @@ -1,6 +1,7 @@ package io.quarkus.hibernate.orm.rest.data.panache.deployment.entity; import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import org.junit.jupiter.api.Test; @@ -29,4 +30,20 @@ void shouldCopyAdditionalMethodsAsResources() { .and().body("name", is("full collection")); } + @Test + void shouldReturnItemsForFullCollection() { + given().accept("application/json") + .when().get("/items?collection.id=full") + .then().statusCode(200) + .body("$", hasSize(2)); + } + + @Test + void shouldReturnNoItemsForEmptyCollection() { + given().accept("application/json") + .when().get("/items?collection.id=empty") + .then().statusCode(200) + .body("$", hasSize(0)); + } + } diff --git a/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/runtime/MongoOperations.java b/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/runtime/MongoOperations.java index c2285c29c4356..93f411727938b 100644 --- a/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/runtime/MongoOperations.java +++ b/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/runtime/MongoOperations.java @@ -333,9 +333,8 @@ ClientSession getSession(Object entity) { return getSession(entity.getClass()); } - ClientSession getSession(Class entityClass) { + public ClientSession getSession(Class entityClass) { ClientSession clientSession = null; - MongoEntity mongoEntity = entityClass.getAnnotation(MongoEntity.class); InstanceHandle instance = Arc.container() .instance(TransactionSynchronizationRegistry.class); if (instance.isAvailable()) { @@ -343,6 +342,7 @@ ClientSession getSession(Class entityClass) { if (registry.getTransactionStatus() == Status.STATUS_ACTIVE) { clientSession = (ClientSession) registry.getResource(SESSION_KEY); if (clientSession == null) { + MongoEntity mongoEntity = entityClass == null ? null : entityClass.getAnnotation(MongoEntity.class); return registerClientSession(mongoEntity, registry); } } @@ -350,6 +350,10 @@ ClientSession getSession(Class entityClass) { return clientSession; } + public ClientSession getSession() { + return getSession(null); + } + private ClientSession registerClientSession(MongoEntity mongoEntity, TransactionSynchronizationRegistry registry) { TransactionManager transactionManager = Arc.container().instance(TransactionManager.class).get(); diff --git a/extensions/panache/mongodb-panache-kotlin/runtime/src/main/kotlin/io/quarkus/mongodb/panache/kotlin/Panache.kt b/extensions/panache/mongodb-panache-kotlin/runtime/src/main/kotlin/io/quarkus/mongodb/panache/kotlin/Panache.kt new file mode 100644 index 0000000000000..817c55f54bd43 --- /dev/null +++ b/extensions/panache/mongodb-panache-kotlin/runtime/src/main/kotlin/io/quarkus/mongodb/panache/kotlin/Panache.kt @@ -0,0 +1,27 @@ +package io.quarkus.mongodb.panache.kotlin + +import com.mongodb.session.ClientSession +import io.quarkus.mongodb.panache.kotlin.runtime.KotlinMongoOperations + +object Panache { + /** + * Access the current MongoDB ClientSession from the transaction context. Can be used inside a + * method annotated with `@Transactional` to manually access the client session. + * + * @return ClientSession or null if not in the context of a transaction. + */ + val session: ClientSession + get() = KotlinMongoOperations.INSTANCE.session + + /** + * Access the current MongoDB ClientSession from the transaction context. + * + * @param entityClass the class of the MongoDB entity in case it is configured to use the + * non-default client. + * @return ClientSession or null if not in the context of a transaction. + * @see [session] + */ + fun getSession(entityClass: Class<*>?): ClientSession { + return KotlinMongoOperations.INSTANCE.getSession(entityClass) + } +} diff --git a/extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/Panache.java b/extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/Panache.java new file mode 100644 index 0000000000000..2e1f900dee367 --- /dev/null +++ b/extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/Panache.java @@ -0,0 +1,30 @@ +package io.quarkus.mongodb.panache; + +import com.mongodb.session.ClientSession; + +import io.quarkus.mongodb.panache.runtime.JavaMongoOperations; + +public class Panache { + + /** + * Access the current MongoDB ClientSession from the transaction context. + * Can be used inside a method annotated with `@Transactional` to manually access the client session. + * + * @return ClientSession or null if not in the context of a transaction. + */ + public static ClientSession getSession() { + return JavaMongoOperations.INSTANCE.getSession(); + } + + /** + * Access the current MongoDB ClientSession from the transaction context. + * + * @see #getSession() + * + * @param entityClass the class of the MongoDB entity in case it is configured to use the non-default client. + * @return ClientSession or null if not in the context of a transaction. + */ + public static ClientSession getSession(Class entityClass) { + return JavaMongoOperations.INSTANCE.getSession(entityClass); + } +} diff --git a/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/methods/ListMethodImplementor.java b/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/methods/ListMethodImplementor.java index 1b8ec8077590f..99abbdabdc13e 100644 --- a/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/methods/ListMethodImplementor.java +++ b/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/methods/ListMethodImplementor.java @@ -184,7 +184,11 @@ private void implementPaged(ClassCreator classCreator, ResourceMetadata resource parameters.add(param("size", int.class, intType())); parameters.add(param("uriInfo", UriInfo.class)); parameters.add(param("namedQuery", String.class)); - parameters.addAll(compatibleFieldsForQuery); + for (SignatureMethodCreator.Parameter param : compatibleFieldsForQuery) { + parameters.add(param( + param.getName().replace(".", "__"), + param.getClazz())); + } MethodCreator methodCreator = SignatureMethodCreator.getMethodCreator(getMethodName(), classCreator, isNotReactivePanache() ? responseType() : uniType(resourceMetadata.getEntityType()), parameters.toArray(new SignatureMethodCreator.Parameter[0])); @@ -271,7 +275,11 @@ private void implementNotPaged(ClassCreator classCreator, ResourceMetadata resou List parameters = new ArrayList<>(); parameters.add(param("sort", List.class, parameterizedType(classType(List.class), classType(String.class)))); parameters.add(param("namedQuery", String.class)); - parameters.addAll(compatibleFieldsForQuery); + for (SignatureMethodCreator.Parameter param : compatibleFieldsForQuery) { + parameters.add(param( + param.getName().replace(".", "__"), + param.getClazz())); + } MethodCreator methodCreator = SignatureMethodCreator.getMethodCreator(getMethodName(), classCreator, isNotReactivePanache() ? responseType() : uniType(resourceMetadata.getEntityType()), parameters.toArray(new SignatureMethodCreator.Parameter[0])); @@ -321,13 +329,14 @@ public ResultHandle list(BytecodeCreator creator, ResourceMetadata resourceMetad ResultHandle queryList = creator.newInstance(ofConstructor(ArrayList.class)); for (Map.Entry field : fieldValues.entrySet()) { String fieldName = field.getKey(); + String paramName = fieldName.replace(".", "__"); ResultHandle fieldValueFromQuery = field.getValue(); BytecodeCreator fieldValueFromQueryIsSet = creator.ifNotNull(fieldValueFromQuery).trueBranch(); fieldValueFromQueryIsSet.invokeInterfaceMethod(ofMethod(List.class, "add", boolean.class, Object.class), - queryList, fieldValueFromQueryIsSet.load(fieldName + "=:" + fieldName)); + queryList, fieldValueFromQueryIsSet.load(fieldName + "=:" + paramName)); fieldValueFromQueryIsSet.invokeInterfaceMethod( ofMethod(Map.class, "put", Object.class, Object.class, Object.class), - dataParams, fieldValueFromQueryIsSet.load(fieldName), fieldValueFromQuery); + dataParams, fieldValueFromQueryIsSet.load(paramName), fieldValueFromQuery); } /** diff --git a/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/utils/EntityTypeUtils.java b/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/utils/EntityTypeUtils.java index 17e1dbe5d82b4..cedede3c18971 100644 --- a/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/utils/EntityTypeUtils.java +++ b/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/utils/EntityTypeUtils.java @@ -16,6 +16,10 @@ public final class EntityTypeUtils { + // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.5 + public static final int ACC_STATIC = 0x0008; + public static final int ACC_FINAL = 0x0010; + private EntityTypeUtils() { } @@ -25,7 +29,46 @@ public static Map getEntityFields(IndexView index, String entityTy ClassInfo currentEntityClass = index.getClassByName(entityTypeName); while (currentEntityClass != null) { for (FieldInfo field : currentEntityClass.fields()) { + // skip static fields + if ((field.flags() & ACC_STATIC) != 0) { + continue; + } + // skip final fields + if ((field.flags() & ACC_FINAL) != 0) { + continue; + } + // skip fields with Transient annotation + if (field.hasAnnotation(DotName.createSimple("jakarta.persistence.Transient"))) { + continue; + } + fields.put(field.name(), field.type()); + + // if the field is a ManyToOne relation, add the Id field of the relation to the fields map + if (field.type().kind() == Type.Kind.CLASS + && field.hasAnnotation(DotName.createSimple("jakarta.persistence.ManyToOne"))) { + // get the class info for the relation field + ClassInfo currentRelationClass = index.getClassByName(field.type().name()); + while (currentRelationClass != null) { + // get the field with Id annotation + FieldInfo relationIdField = currentRelationClass.fields().stream().filter((relationField) -> { + return relationField.hasAnnotation(DotName.createSimple("jakarta.persistence.Id")); + }).findFirst().orElse(null); + // if the field is not null, add it to the fields map + if (relationIdField != null) { + fields.put(field.name() + "." + relationIdField.name(), relationIdField.type()); + } + + // get the super class of the relation class + if (currentRelationClass.superName() != null) { + currentRelationClass = index.getClassByName(currentRelationClass.superName()); + } else { + currentRelationClass = null; + } + } + + } + } if (currentEntityClass.superName() != null) { diff --git a/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/utils/SignatureMethodCreator.java b/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/utils/SignatureMethodCreator.java index 20dddea276b35..402dc1a124834 100644 --- a/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/utils/SignatureMethodCreator.java +++ b/extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/utils/SignatureMethodCreator.java @@ -72,6 +72,14 @@ public static class Parameter { public String getName() { return name; } + + public Type getType() { + return type; + } + + public Object getClazz() { + return clazz; + } } public static class ReturnType { diff --git a/extensions/picocli/deployment/src/main/java/io/quarkus/picocli/deployment/PicocliProcessor.java b/extensions/picocli/deployment/src/main/java/io/quarkus/picocli/deployment/PicocliProcessor.java index b2ed32c390f8b..18c452474e5f7 100644 --- a/extensions/picocli/deployment/src/main/java/io/quarkus/picocli/deployment/PicocliProcessor.java +++ b/extensions/picocli/deployment/src/main/java/io/quarkus/picocli/deployment/PicocliProcessor.java @@ -12,6 +12,7 @@ import io.quarkus.arc.deployment.AdditionalBeanBuildItem; import io.quarkus.arc.deployment.AnnotationsTransformerBuildItem; import io.quarkus.arc.deployment.AutoAddScopeBuildItem; +import io.quarkus.arc.deployment.UnremovableBeanBuildItem; import io.quarkus.arc.processor.AnnotationsTransformer; import io.quarkus.arc.processor.BuiltinScope; import io.quarkus.deployment.Feature; @@ -76,6 +77,7 @@ IndexDependencyBuildItem picocliIndexDependency() { void picocliRunner(ApplicationIndexBuildItem applicationIndex, CombinedIndexBuildItem combinedIndex, BuildProducer additionalBean, + BuildProducer unremovableBean, BuildProducer quarkusApplicationClass, BuildProducer annotationsTransformer) { IndexView index = combinedIndex.getIndex(); @@ -99,6 +101,17 @@ void picocliRunner(ApplicationIndexBuildItem applicationIndex, additionalBean.produce(AdditionalBeanBuildItem.unremovableOf(DefaultPicocliCommandLineFactory.class)); quarkusApplicationClass.produce(new QuarkusApplicationClassBuildItem(PicocliRunner.class)); } + + // Make all classes that can be instantiated by IFactory unremovable + unremovableBean.produce(UnremovableBeanBuildItem.beanTypes(CommandLine.ITypeConverter.class, + CommandLine.IVersionProvider.class, + CommandLine.IModelTransformer.class, + CommandLine.IModelTransformer.class, + CommandLine.IDefaultValueProvider.class, + CommandLine.IParameterConsumer.class, + CommandLine.IParameterPreprocessor.class, + CommandLine.INegatableOptionTransformer.class, + CommandLine.IHelpFactory.class)); } private List classesAnnotatedWith(IndexView indexView, String annotationClassName) { diff --git a/extensions/reactive-db2-client/deployment/src/main/java/io/quarkus/reactive/db2/client/deployment/ReactiveDB2ClientProcessor.java b/extensions/reactive-db2-client/deployment/src/main/java/io/quarkus/reactive/db2/client/deployment/ReactiveDB2ClientProcessor.java index d21106cfd1baf..60187addf5680 100644 --- a/extensions/reactive-db2-client/deployment/src/main/java/io/quarkus/reactive/db2/client/deployment/ReactiveDB2ClientProcessor.java +++ b/extensions/reactive-db2-client/deployment/src/main/java/io/quarkus/reactive/db2/client/deployment/ReactiveDB2ClientProcessor.java @@ -219,9 +219,11 @@ private void createPoolIfDefined(DB2PoolRecorder recorder, ExtendedBeanConfigurator mutinyDB2PoolConfigurator = SyntheticBeanBuildItem .configure(io.vertx.mutiny.db2client.DB2Pool.class) .defaultBean() + .addType(io.vertx.mutiny.sqlclient.Pool.class) .scope(ApplicationScoped.class) .addInjectionPoint(POOL_INJECTION_TYPE, injectionPointAnnotations(dataSourceName)) .createWith(recorder.mutinyDB2Pool(poolFunction)) + .unremovable() .setRuntimeInit(); addQualifiers(mutinyDB2PoolConfigurator, dataSourceName); diff --git a/extensions/reactive-mssql-client/deployment/src/main/java/io/quarkus/reactive/mssql/client/deployment/ReactiveMSSQLClientProcessor.java b/extensions/reactive-mssql-client/deployment/src/main/java/io/quarkus/reactive/mssql/client/deployment/ReactiveMSSQLClientProcessor.java index fc29eb683d158..2707e8aba1424 100644 --- a/extensions/reactive-mssql-client/deployment/src/main/java/io/quarkus/reactive/mssql/client/deployment/ReactiveMSSQLClientProcessor.java +++ b/extensions/reactive-mssql-client/deployment/src/main/java/io/quarkus/reactive/mssql/client/deployment/ReactiveMSSQLClientProcessor.java @@ -218,9 +218,11 @@ private void createPoolIfDefined(MSSQLPoolRecorder recorder, ExtendedBeanConfigurator mutinyMSSQLPoolConfigurator = SyntheticBeanBuildItem .configure(io.vertx.mutiny.mssqlclient.MSSQLPool.class) .defaultBean() + .addType(io.vertx.mutiny.sqlclient.Pool.class) .scope(ApplicationScoped.class) .addInjectionPoint(POOL_INJECTION_TYPE, injectionPointAnnotations(dataSourceName)) .createWith(recorder.mutinyMSSQLPool(poolFunction)) + .unremovable() .setRuntimeInit(); addQualifiers(mutinyMSSQLPoolConfigurator, dataSourceName); diff --git a/extensions/reactive-mssql-client/deployment/src/test/java/io/quarkus/reactive/mssql/client/NoConfigTest.java b/extensions/reactive-mssql-client/deployment/src/test/java/io/quarkus/reactive/mssql/client/NoConfigTest.java new file mode 100644 index 0000000000000..16b287f159e63 --- /dev/null +++ b/extensions/reactive-mssql-client/deployment/src/test/java/io/quarkus/reactive/mssql/client/NoConfigTest.java @@ -0,0 +1,145 @@ +package io.quarkus.reactive.mssql.client; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.time.Duration; +import java.util.concurrent.CompletionStage; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.reactive.datasource.ReactiveDataSource; +import io.quarkus.test.QuarkusUnitTest; +import io.vertx.mssqlclient.MSSQLPool; +import io.vertx.sqlclient.Pool; + +/** + * We should be able to start the application, even with no configuration at all. + */ +public class NoConfigTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + private static final Duration MAX_WAIT = Duration.ofSeconds(10); + + @Inject + MyBean myBean; + + @Test + public void pool_default() { + Pool pool = Arc.container().instance(Pool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().toCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void mutinyPool_default() { + io.vertx.mutiny.sqlclient.Pool pool = Arc.container().instance(io.vertx.mutiny.sqlclient.Pool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().subscribeAsCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void vendorPool_default() { + MSSQLPool pool = Arc.container().instance(MSSQLPool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().toCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void mutinyVendorPool_default() { + io.vertx.mutiny.mssqlclient.MSSQLPool pool = Arc.container().instance(io.vertx.mutiny.mssqlclient.MSSQLPool.class) + .get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().subscribeAsCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void pool_named() { + Pool pool = Arc.container().instance(Pool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void mutinyPool_named() { + io.vertx.mutiny.sqlclient.Pool pool = Arc.container().instance(io.vertx.mutiny.sqlclient.Pool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void vendorPool_named() { + MSSQLPool pool = Arc.container().instance(MSSQLPool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void mutinyVendorPool_named() { + io.vertx.mutiny.mssqlclient.MSSQLPool pool = Arc.container().instance(io.vertx.mutiny.mssqlclient.MSSQLPool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void injectedBean_default() { + assertThat(myBean.usePool()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @ApplicationScoped + public static class MyBean { + @Inject + MSSQLPool pool; + + public CompletionStage usePool() { + return pool.getConnection().toCompletionStage(); + } + } +} diff --git a/extensions/reactive-mysql-client/deployment/src/main/java/io/quarkus/reactive/mysql/client/deployment/ReactiveMySQLClientProcessor.java b/extensions/reactive-mysql-client/deployment/src/main/java/io/quarkus/reactive/mysql/client/deployment/ReactiveMySQLClientProcessor.java index 7f61ab1eb9231..4bb36e3156dd0 100644 --- a/extensions/reactive-mysql-client/deployment/src/main/java/io/quarkus/reactive/mysql/client/deployment/ReactiveMySQLClientProcessor.java +++ b/extensions/reactive-mysql-client/deployment/src/main/java/io/quarkus/reactive/mysql/client/deployment/ReactiveMySQLClientProcessor.java @@ -219,9 +219,11 @@ private void createPoolIfDefined(MySQLPoolRecorder recorder, ExtendedBeanConfigurator mutinyMySQLPoolConfigurator = SyntheticBeanBuildItem .configure(io.vertx.mutiny.mysqlclient.MySQLPool.class) .defaultBean() + .addType(io.vertx.mutiny.sqlclient.Pool.class) .scope(ApplicationScoped.class) .addInjectionPoint(POOL_INJECTION_TYPE, injectionPointAnnotations(dataSourceName)) .createWith(recorder.mutinyMySQLPool(poolFunction)) + .unremovable() .setRuntimeInit(); addQualifiers(mutinyMySQLPoolConfigurator, dataSourceName); diff --git a/extensions/reactive-mysql-client/deployment/src/test/java/io/quarkus/reactive/mysql/client/NoConfigTest.java b/extensions/reactive-mysql-client/deployment/src/test/java/io/quarkus/reactive/mysql/client/NoConfigTest.java new file mode 100644 index 0000000000000..ea98c0acb5e8b --- /dev/null +++ b/extensions/reactive-mysql-client/deployment/src/test/java/io/quarkus/reactive/mysql/client/NoConfigTest.java @@ -0,0 +1,145 @@ +package io.quarkus.reactive.mysql.client; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.time.Duration; +import java.util.concurrent.CompletionStage; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.reactive.datasource.ReactiveDataSource; +import io.quarkus.test.QuarkusUnitTest; +import io.vertx.mysqlclient.MySQLPool; +import io.vertx.sqlclient.Pool; + +/** + * We should be able to start the application, even with no configuration at all. + */ +public class NoConfigTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + private static final Duration MAX_WAIT = Duration.ofSeconds(10); + + @Inject + MyBean myBean; + + @Test + public void pool_default() { + Pool pool = Arc.container().instance(Pool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().toCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void mutinyPool_default() { + io.vertx.mutiny.sqlclient.Pool pool = Arc.container().instance(io.vertx.mutiny.sqlclient.Pool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().subscribeAsCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void vendorPool_default() { + MySQLPool pool = Arc.container().instance(MySQLPool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().toCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void mutinyVendorPool_default() { + io.vertx.mutiny.mysqlclient.MySQLPool pool = Arc.container().instance(io.vertx.mutiny.mysqlclient.MySQLPool.class) + .get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().subscribeAsCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void pool_named() { + Pool pool = Arc.container().instance(Pool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void mutinyPool_named() { + io.vertx.mutiny.sqlclient.Pool pool = Arc.container().instance(io.vertx.mutiny.sqlclient.Pool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void vendorPool_named() { + MySQLPool pool = Arc.container().instance(MySQLPool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void mutinyVendorPool_named() { + io.vertx.mutiny.mysqlclient.MySQLPool pool = Arc.container().instance(io.vertx.mutiny.mysqlclient.MySQLPool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void injectedBean_default() { + assertThat(myBean.usePool()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @ApplicationScoped + public static class MyBean { + @Inject + MySQLPool pool; + + public CompletionStage usePool() { + return pool.getConnection().toCompletionStage(); + } + } +} diff --git a/extensions/reactive-oracle-client/deployment/src/main/java/io/quarkus/reactive/oracle/client/deployment/ReactiveOracleClientProcessor.java b/extensions/reactive-oracle-client/deployment/src/main/java/io/quarkus/reactive/oracle/client/deployment/ReactiveOracleClientProcessor.java index a82812d3d8409..ab1cf2dceff79 100644 --- a/extensions/reactive-oracle-client/deployment/src/main/java/io/quarkus/reactive/oracle/client/deployment/ReactiveOracleClientProcessor.java +++ b/extensions/reactive-oracle-client/deployment/src/main/java/io/quarkus/reactive/oracle/client/deployment/ReactiveOracleClientProcessor.java @@ -220,9 +220,11 @@ private void createPoolIfDefined(OraclePoolRecorder recorder, ExtendedBeanConfigurator mutinyOraclePoolConfigurator = SyntheticBeanBuildItem .configure(io.vertx.mutiny.oracleclient.OraclePool.class) .defaultBean() + .addType(io.vertx.mutiny.sqlclient.Pool.class) .scope(ApplicationScoped.class) .addInjectionPoint(POOL_INJECTION_TYPE, injectionPointAnnotations(dataSourceName)) .createWith(recorder.mutinyOraclePool(poolFunction)) + .unremovable() .setRuntimeInit(); addQualifiers(mutinyOraclePoolConfigurator, dataSourceName); diff --git a/extensions/reactive-oracle-client/deployment/src/test/java/io/quarkus/reactive/oracle/client/NoConfigTest.java b/extensions/reactive-oracle-client/deployment/src/test/java/io/quarkus/reactive/oracle/client/NoConfigTest.java new file mode 100644 index 0000000000000..7b2899780263f --- /dev/null +++ b/extensions/reactive-oracle-client/deployment/src/test/java/io/quarkus/reactive/oracle/client/NoConfigTest.java @@ -0,0 +1,145 @@ +package io.quarkus.reactive.oracle.client; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.time.Duration; +import java.util.concurrent.CompletionStage; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.reactive.datasource.ReactiveDataSource; +import io.quarkus.test.QuarkusUnitTest; +import io.vertx.oracleclient.OraclePool; +import io.vertx.sqlclient.Pool; + +/** + * We should be able to start the application, even with no configuration at all. + */ +public class NoConfigTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + private static final Duration MAX_WAIT = Duration.ofSeconds(10); + + @Inject + MyBean myBean; + + @Test + public void pool_default() { + Pool pool = Arc.container().instance(Pool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().toCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Cannot connect"); + } + + @Test + public void mutinyPool_default() { + io.vertx.mutiny.sqlclient.Pool pool = Arc.container().instance(io.vertx.mutiny.sqlclient.Pool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().subscribeAsCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Cannot connect"); + } + + @Test + public void vendorPool_default() { + OraclePool pool = Arc.container().instance(OraclePool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().toCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Cannot connect"); + } + + @Test + public void mutinyVendorPool_default() { + io.vertx.mutiny.oracleclient.OraclePool pool = Arc.container().instance(io.vertx.mutiny.oracleclient.OraclePool.class) + .get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().subscribeAsCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Cannot connect"); + } + + @Test + public void pool_named() { + Pool pool = Arc.container().instance(Pool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void mutinyPool_named() { + io.vertx.mutiny.sqlclient.Pool pool = Arc.container().instance(io.vertx.mutiny.sqlclient.Pool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void vendorPool_named() { + OraclePool pool = Arc.container().instance(OraclePool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void mutinyVendorPool_named() { + io.vertx.mutiny.oracleclient.OraclePool pool = Arc.container().instance(io.vertx.mutiny.oracleclient.OraclePool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void injectedBean_default() { + assertThat(myBean.usePool()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Cannot connect"); + } + + @ApplicationScoped + public static class MyBean { + @Inject + OraclePool pool; + + public CompletionStage usePool() { + return pool.getConnection().toCompletionStage(); + } + } +} diff --git a/extensions/reactive-pg-client/deployment/src/main/java/io/quarkus/reactive/pg/client/deployment/ReactivePgClientProcessor.java b/extensions/reactive-pg-client/deployment/src/main/java/io/quarkus/reactive/pg/client/deployment/ReactivePgClientProcessor.java index e1db7a21692b0..cce55cfa31a5c 100644 --- a/extensions/reactive-pg-client/deployment/src/main/java/io/quarkus/reactive/pg/client/deployment/ReactivePgClientProcessor.java +++ b/extensions/reactive-pg-client/deployment/src/main/java/io/quarkus/reactive/pg/client/deployment/ReactivePgClientProcessor.java @@ -224,9 +224,11 @@ private void createPoolIfDefined(PgPoolRecorder recorder, ExtendedBeanConfigurator mutinyPgPoolConfigurator = SyntheticBeanBuildItem .configure(io.vertx.mutiny.pgclient.PgPool.class) .defaultBean() + .addType(io.vertx.mutiny.sqlclient.Pool.class) .scope(ApplicationScoped.class) .addInjectionPoint(POOL_INJECTION_TYPE, injectionPointAnnotations(dataSourceName)) .createWith(recorder.mutinyPgPool(poolFunction)) + .unremovable() .setRuntimeInit(); addQualifiers(mutinyPgPoolConfigurator, dataSourceName); diff --git a/extensions/reactive-pg-client/deployment/src/test/java/io/quarkus/reactive/pg/client/NoConfigTest.java b/extensions/reactive-pg-client/deployment/src/test/java/io/quarkus/reactive/pg/client/NoConfigTest.java new file mode 100644 index 0000000000000..ceaa86c73563f --- /dev/null +++ b/extensions/reactive-pg-client/deployment/src/test/java/io/quarkus/reactive/pg/client/NoConfigTest.java @@ -0,0 +1,144 @@ +package io.quarkus.reactive.pg.client; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.time.Duration; +import java.util.concurrent.CompletionStage; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.reactive.datasource.ReactiveDataSource; +import io.quarkus.test.QuarkusUnitTest; +import io.vertx.pgclient.PgPool; +import io.vertx.sqlclient.Pool; + +/** + * We should be able to start the application, even with no configuration at all. + */ +public class NoConfigTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + // The datasource won't be truly "unconfigured" if dev services are enabled + .overrideConfigKey("quarkus.devservices.enabled", "false"); + + private static final Duration MAX_WAIT = Duration.ofSeconds(10); + + @Inject + MyBean myBean; + + @Test + public void pool_default() { + Pool pool = Arc.container().instance(Pool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().toCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void mutinyPool_default() { + io.vertx.mutiny.sqlclient.Pool pool = Arc.container().instance(io.vertx.mutiny.sqlclient.Pool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().subscribeAsCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void vendorPool_default() { + PgPool pool = Arc.container().instance(PgPool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().toCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void mutinyVendorPool_default() { + io.vertx.mutiny.pgclient.PgPool pool = Arc.container().instance(io.vertx.mutiny.pgclient.PgPool.class).get(); + + // The default datasource is a bit special; + // it's historically always been considered as "present" even if there was no explicit configuration. + // So the bean will never be null. + assertThat(pool).isNotNull(); + // However, if unconfigured, it will use default connection config (host, port, username, ...) and will fail. + assertThat(pool.getConnection().subscribeAsCompletionStage()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @Test + public void pool_named() { + Pool pool = Arc.container().instance(Pool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void mutinyPool_named() { + io.vertx.mutiny.sqlclient.Pool pool = Arc.container().instance(io.vertx.mutiny.sqlclient.Pool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void vendorPool_named() { + PgPool pool = Arc.container().instance(PgPool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void mutinyVendorPool_named() { + io.vertx.mutiny.pgclient.PgPool pool = Arc.container().instance(io.vertx.mutiny.pgclient.PgPool.class, + new ReactiveDataSource.ReactiveDataSourceLiteral("ds-1")).get(); + // An unconfigured, named datasource has no corresponding bean. + assertThat(pool).isNull(); + } + + @Test + public void injectedBean_default() { + assertThat(myBean.usePool()) + .failsWithin(MAX_WAIT) + .withThrowableThat() + .withMessageContaining("Connection refused"); + } + + @ApplicationScoped + public static class MyBean { + @Inject + PgPool pool; + + public CompletionStage usePool() { + return pool.getConnection().toCompletionStage(); + } + } +} diff --git a/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/deployment/src/main/java/io/quarkus/resteasy/reactive/kotlin/serialization/deployment/KotlinSerializationProcessor.java b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/deployment/src/main/java/io/quarkus/resteasy/reactive/kotlin/serialization/deployment/KotlinSerializationProcessor.java index b11780c04f24f..e57370512695a 100644 --- a/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/deployment/src/main/java/io/quarkus/resteasy/reactive/kotlin/serialization/deployment/KotlinSerializationProcessor.java +++ b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/deployment/src/main/java/io/quarkus/resteasy/reactive/kotlin/serialization/deployment/KotlinSerializationProcessor.java @@ -10,12 +10,15 @@ import jakarta.ws.rs.core.MediaType; import io.quarkus.arc.deployment.AdditionalBeanBuildItem; +import io.quarkus.deployment.Capabilities; +import io.quarkus.deployment.Capability; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.resteasy.reactive.common.deployment.ServerDefaultProducesHandlerBuildItem; import io.quarkus.resteasy.reactive.kotlin.serialization.runtime.KotlinSerializationMessageBodyReader; import io.quarkus.resteasy.reactive.kotlin.serialization.runtime.KotlinSerializationMessageBodyWriter; +import io.quarkus.resteasy.reactive.kotlin.serialization.runtime.ValidationJsonBuilderCustomizer; import io.quarkus.resteasy.reactive.spi.MessageBodyReaderBuildItem; import io.quarkus.resteasy.reactive.spi.MessageBodyWriterBuildItem; @@ -25,11 +28,15 @@ public class KotlinSerializationProcessor { public void additionalProviders( BuildProducer additionalBean, BuildProducer additionalReaders, - BuildProducer additionalWriters) { - additionalBean.produce(AdditionalBeanBuildItem.builder() - .addBeanClass(KotlinSerializationMessageBodyReader.class.getName()) - .addBeanClass(KotlinSerializationMessageBodyWriter.class.getName()) - .setUnremovable().build()); + BuildProducer additionalWriters, + Capabilities capabilities) { + AdditionalBeanBuildItem.Builder builder = AdditionalBeanBuildItem.builder() + .addBeanClasses(KotlinSerializationMessageBodyReader.class.getName(), + KotlinSerializationMessageBodyWriter.class.getName()); + if (capabilities.isPresent(Capability.HIBERNATE_VALIDATOR)) { + builder.addBeanClass(ValidationJsonBuilderCustomizer.class.getName()); + } + additionalBean.produce(builder.setUnremovable().build()); additionalReaders.produce(new MessageBodyReaderBuildItem( KotlinSerializationMessageBodyReader.class.getName(), Object.class.getName(), List.of( MediaType.APPLICATION_JSON), diff --git a/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/pom.xml b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/pom.xml index 8a26a26c73fe6..051f526603966 100644 --- a/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/pom.xml +++ b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/pom.xml @@ -23,6 +23,11 @@ io.quarkus quarkus-resteasy-reactive
+ + io.quarkus + quarkus-hibernate-validator + true + diff --git a/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/KotlinSerializationMessageBodyWriter.kt b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/KotlinSerializationMessageBodyWriter.kt index 8c03b414962a6..4bd2d6b23b78c 100644 --- a/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/KotlinSerializationMessageBodyWriter.kt +++ b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/KotlinSerializationMessageBodyWriter.kt @@ -31,7 +31,7 @@ class KotlinSerializationMessageBodyWriter(private val json: Json) : if (o is String) { // YUK: done in order to avoid adding extra quotes... entityStream.write(o.toByteArray(StandardCharsets.UTF_8)) } else { - json.encodeToStream(serializer(genericType), o, entityStream) + json.encodeToStream(json.serializersModule.serializer(genericType), o, entityStream) } } @@ -42,7 +42,7 @@ class KotlinSerializationMessageBodyWriter(private val json: Json) : if (o is String) { // YUK: done in order to avoid adding extra quotes... stream.write(o.toByteArray(StandardCharsets.UTF_8)) } else { - json.encodeToStream(serializer(genericType), o, stream) + json.encodeToStream(json.serializersModule.serializer(genericType), o, stream) } // we don't use try-with-resources because that results in writing to the http output // without the exception mapping coming into play diff --git a/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/ValidationJsonBuilderCustomizer.kt b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/ValidationJsonBuilderCustomizer.kt new file mode 100644 index 0000000000000..4d6e0da918600 --- /dev/null +++ b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/ValidationJsonBuilderCustomizer.kt @@ -0,0 +1,21 @@ +package io.quarkus.resteasy.reactive.kotlin.serialization.runtime + +import io.quarkus.resteasy.reactive.kotlin.serialization.common.JsonBuilderCustomizer +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.json.JsonBuilder +import kotlinx.serialization.modules.SerializersModule +import kotlinx.serialization.modules.contextual +import kotlinx.serialization.modules.plus + +class ValidationJsonBuilderCustomizer : JsonBuilderCustomizer { + @ExperimentalSerializationApi + override fun customize(jsonBuilder: JsonBuilder) { + jsonBuilder.serializersModule = + jsonBuilder.serializersModule.plus( + SerializersModule { + contextual(ViolationReportSerializer) + contextual(ViolationReportViolationSerializer) + } + ) + } +} diff --git a/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/ViolationReportSerializer.kt b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/ViolationReportSerializer.kt new file mode 100644 index 0000000000000..bec3923ce94ef --- /dev/null +++ b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/ViolationReportSerializer.kt @@ -0,0 +1,72 @@ +package io.quarkus.resteasy.reactive.kotlin.serialization.runtime + +import io.quarkus.hibernate.validator.runtime.jaxrs.ViolationReport +import jakarta.ws.rs.core.Response +import kotlinx.serialization.* +import kotlinx.serialization.builtins.ListSerializer +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.descriptors.buildClassSerialDescriptor +import kotlinx.serialization.descriptors.listSerialDescriptor +import kotlinx.serialization.descriptors.serialDescriptor +import kotlinx.serialization.encoding.CompositeDecoder.Companion.DECODE_DONE +import kotlinx.serialization.encoding.Decoder +import kotlinx.serialization.encoding.Encoder +import kotlinx.serialization.encoding.decodeStructure +import kotlinx.serialization.encoding.encodeStructure + +@OptIn(ExperimentalSerializationApi::class) +@Serializer(forClass = ViolationReport::class) +object ViolationReportSerializer : KSerializer { + override val descriptor: SerialDescriptor = + buildClassSerialDescriptor("io.quarkus.hibernate.validator.runtime.jaxrs.ViolationReport") { + element("title", serialDescriptor()) + element("status", serialDescriptor()) + element( + "violations", + listSerialDescriptor(ListSerializer(ViolationReportViolationSerializer).descriptor) + ) + } + + override fun deserialize(decoder: Decoder): ViolationReport { + return decoder.decodeStructure(descriptor) { + var title: String? = null + var status: Int? = null + var violations: List = emptyList() + + loop@ while (true) { + when (val index = decodeElementIndex(descriptor)) { + DECODE_DONE -> break@loop + 0 -> title = decodeStringElement(descriptor, 0) + 1 -> status = decodeIntElement(descriptor, 1) + 2 -> + violations = + decodeSerializableElement( + descriptor, + 2, + ListSerializer(ViolationReportViolationSerializer) + ) + else -> throw SerializationException("Unexpected index $index") + } + } + + ViolationReport( + requireNotNull(title), + status?.let { Response.Status.fromStatusCode(it) }, + violations + ) + } + } + + override fun serialize(encoder: Encoder, value: ViolationReport) { + encoder.encodeStructure(descriptor) { + encodeStringElement(descriptor, 0, value.title) + encodeIntElement(descriptor, 1, value.status) + encodeSerializableElement( + descriptor, + 2, + ListSerializer(ViolationReportViolationSerializer), + value.violations + ) + } + } +} diff --git a/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/ViolationReportViolationSerializer.kt b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/ViolationReportViolationSerializer.kt new file mode 100644 index 0000000000000..c8d1615893398 --- /dev/null +++ b/extensions/resteasy-reactive/quarkus-resteasy-reactive-kotlin-serialization/runtime/src/main/kotlin/io/quarkus/resteasy/reactive/kotlin/serialization/runtime/ViolationReportViolationSerializer.kt @@ -0,0 +1,51 @@ +package io.quarkus.resteasy.reactive.kotlin.serialization.runtime + +import io.quarkus.hibernate.validator.runtime.jaxrs.ViolationReport +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.KSerializer +import kotlinx.serialization.SerializationException +import kotlinx.serialization.Serializer +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.descriptors.buildClassSerialDescriptor +import kotlinx.serialization.descriptors.serialDescriptor +import kotlinx.serialization.encoding.* + +@OptIn(ExperimentalSerializationApi::class) +@Serializer(forClass = ViolationReport.Violation::class) +object ViolationReportViolationSerializer : KSerializer { + override val descriptor: SerialDescriptor = + buildClassSerialDescriptor( + "io.quarkus.hibernate.validator.runtime.jaxrs.ViolationReport.Violation" + ) { + element("field", serialDescriptor()) + element("message", serialDescriptor()) + } + + override fun deserialize(decoder: Decoder): ViolationReport.Violation { + return decoder.decodeStructure(descriptor) { + var field: String? = null + var message: String? = null + + loop@ while (true) { + when (val index = decodeElementIndex(descriptor)) { + CompositeDecoder.DECODE_DONE -> break@loop + 0 -> field = decodeStringElement(descriptor, 0) + 1 -> message = decodeStringElement(descriptor, 1) + else -> throw SerializationException("Unexpected index $index") + } + } + + ViolationReport.Violation( + requireNotNull(field), + requireNotNull(message), + ) + } + } + + override fun serialize(encoder: Encoder, value: ViolationReport.Violation) { + encoder.encodeStructure(descriptor) { + encodeStringElement(descriptor, 0, value.field) + encodeStringElement(descriptor, 1, value.message) + } + } +} diff --git a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/HelloClient.java b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/HelloClient.java index 664dea477ef7d..baa2e8d3771d5 100644 --- a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/HelloClient.java +++ b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/HelloClient.java @@ -1,7 +1,11 @@ package io.quarkus.rest.client.reactive.stork; +import jakarta.ws.rs.Consumes; import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.core.MediaType; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; @@ -10,4 +14,13 @@ public interface HelloClient { @GET String hello(); + + @POST + @Consumes(MediaType.TEXT_PLAIN) + @Path("/") + String echo(String name); + + @GET + @Path("/{name}") + public String helloWithPathParam(@PathParam("name") String name); } diff --git a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/HelloResource.java b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/HelloResource.java index e9966a8d8eac6..1a544e2ab878e 100644 --- a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/HelloResource.java +++ b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/HelloResource.java @@ -1,7 +1,13 @@ package io.quarkus.rest.client.reactive.stork; import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Request; @Path("/hello") public class HelloResource { @@ -12,4 +18,16 @@ public class HelloResource { public String hello() { return HELLO_WORLD; } + + @GET + @Path("/{name}") + @Produces(MediaType.TEXT_PLAIN) + public String invoke(@PathParam("name") String name) { + return "Hello, " + name; + } + + @POST + public String echo(String name, @Context Request request) { + return "hello, " + name; + } } diff --git a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/PassThroughResource.java b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/PassThroughResource.java index 129b7aece4cda..51f11c1b539ca 100644 --- a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/PassThroughResource.java +++ b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/PassThroughResource.java @@ -4,6 +4,7 @@ import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; import org.eclipse.microprofile.rest.client.RestClientBuilder; import org.eclipse.microprofile.rest.client.inject.RestClient; @@ -22,6 +23,18 @@ public String invokeClient() { return client.hello(); } + @Path("/v2/{name}") + @GET + public String invokeClientWithPathParamContainingSlash(@PathParam("name") String name) { + return client.helloWithPathParam(name + "/" + name); + } + + @Path("/{name}") + @GET + public String invokeClientWithPathParam(@PathParam("name") String name) { + return client.helloWithPathParam(name); + } + @Path("/cdi") @GET public String invokeCdiClient() { diff --git a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkDevModeTest.java b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkDevModeTest.java index f30d13b937008..5a12b520c497b 100644 --- a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkDevModeTest.java +++ b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkDevModeTest.java @@ -67,4 +67,25 @@ void shouldModifyStorkSettings() { .body(equalTo(WIREMOCK_RESPONSE)); // @formatter:on } + + @Test + void shouldSayHelloNameWithSlash() { + when() + .get("/helper/v2/stork") + .then() + .statusCode(200) + // The response contains an encoded `/` + .body(equalTo("Hello, stork/stork")); + + } + + @Test + void shouldSayHelloNameWithBlank() { + when() + .get("/helper/smallrye stork") + .then() + .statusCode(200) + // The response contains an encoded blank espace + .body(equalTo("Hello, smallrye stork")); + } } diff --git a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkIntegrationTest.java b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkIntegrationTest.java index cb22c1393db59..639ae39cd8fac 100644 --- a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkIntegrationTest.java +++ b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkIntegrationTest.java @@ -15,8 +15,6 @@ import org.junit.jupiter.api.Timeout; import org.junit.jupiter.api.extension.RegisterExtension; -import io.quarkus.rest.client.reactive.HelloClient2; -import io.quarkus.rest.client.reactive.HelloResource; import io.quarkus.test.QuarkusUnitTest; import io.smallrye.stork.api.NoSuchServiceDefinitionException; @@ -24,45 +22,58 @@ public class StorkIntegrationTest { @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar - .addClasses(HelloClient2.class, HelloResource.class)) + .addClasses(HelloClient.class, HelloResource.class)) .withConfigurationResource("stork-application.properties"); @RestClient - HelloClient2 client; + HelloClient client; @Test void shouldDetermineUrlViaStork() { String greeting = RestClientBuilder.newBuilder().baseUri(URI.create("stork://hello-service/hello")) - .build(HelloClient2.class) + .build(HelloClient.class) .echo("black and white bird"); assertThat(greeting).isEqualTo("hello, black and white bird"); + + greeting = RestClientBuilder.newBuilder().baseUri(URI.create("stork://hello-service/hello")) + .build(HelloClient.class) + .helloWithPathParam("black and white bird"); + assertThat(greeting).isEqualTo("Hello, black and white bird"); } @Test void shouldDetermineUrlViaStorkWhenUsingTarget() throws URISyntaxException { - String greeting = ClientBuilder.newClient().target("stork://hello-service/hello").request().get(String.class); - assertThat(greeting).isEqualTo("Hello"); + String greeting = ClientBuilder.newClient().target("stork://hello-service/hello").request() + .get(String.class); + assertThat(greeting).isEqualTo("Hello, World!"); greeting = ClientBuilder.newClient().target(new URI("stork://hello-service/hello")).request().get(String.class); - assertThat(greeting).isEqualTo("Hello"); + assertThat(greeting).isEqualTo("Hello, World!"); greeting = ClientBuilder.newClient().target(UriBuilder.fromUri("stork://hello-service/hello")).request() .get(String.class); - assertThat(greeting).isEqualTo("Hello"); + assertThat(greeting).isEqualTo("Hello, World!"); + + greeting = ClientBuilder.newClient().target("stork://hello-service/hello").path("big bird").request() + .get(String.class); + assertThat(greeting).isEqualTo("Hello, big bird"); } @Test void shouldDetermineUrlViaStorkCDI() { String greeting = client.echo("big bird"); assertThat(greeting).isEqualTo("hello, big bird"); + + greeting = client.helloWithPathParam("big bird"); + assertThat(greeting).isEqualTo("Hello, big bird"); } @Test @Timeout(20) void shouldFailOnUnknownService() { - HelloClient2 client2 = RestClientBuilder.newBuilder() + HelloClient client = RestClientBuilder.newBuilder() .baseUri(URI.create("stork://nonexistent-service")) - .build(HelloClient2.class); - assertThatThrownBy(() -> client2.echo("foo")).isInstanceOf(NoSuchServiceDefinitionException.class); + .build(HelloClient.class); + assertThatThrownBy(() -> client.echo("foo")).isInstanceOf(NoSuchServiceDefinitionException.class); } } diff --git a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkResponseTimeLoadBalancerTest.java b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkResponseTimeLoadBalancerTest.java index 507ca9eb31b1a..9dc52a8d0d271 100644 --- a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkResponseTimeLoadBalancerTest.java +++ b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkResponseTimeLoadBalancerTest.java @@ -16,8 +16,6 @@ import com.github.tomakehurst.wiremock.WireMockServer; import com.github.tomakehurst.wiremock.client.WireMock; -import io.quarkus.rest.client.reactive.HelloClient2; -import io.quarkus.rest.client.reactive.HelloResource; import io.quarkus.test.QuarkusUnitTest; public class StorkResponseTimeLoadBalancerTest { @@ -28,7 +26,7 @@ public class StorkResponseTimeLoadBalancerTest { @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar - .addClasses(HelloClient2.class, HelloResource.class)) + .addClasses(HelloClient.class, HelloResource.class)) .withConfigurationResource("stork-stat-lb.properties"); @BeforeAll @@ -46,7 +44,7 @@ public static void shutDown() { } @RestClient - HelloClient2 client; + HelloClient client; @Test void shouldUseFasterService() { diff --git a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkWithPathIntegrationTest.java b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkWithPathIntegrationTest.java index 26ba43279cbae..26ac15b363f45 100644 --- a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkWithPathIntegrationTest.java +++ b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/stork/StorkWithPathIntegrationTest.java @@ -15,8 +15,6 @@ import org.junit.jupiter.api.Timeout; import org.junit.jupiter.api.extension.RegisterExtension; -import io.quarkus.rest.client.reactive.HelloClient2; -import io.quarkus.rest.client.reactive.HelloResource; import io.quarkus.test.QuarkusUnitTest; import io.smallrye.stork.api.NoSuchServiceDefinitionException; @@ -24,45 +22,57 @@ public class StorkWithPathIntegrationTest { @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar - .addClasses(HelloClient2.class, HelloResource.class)) + .addClasses(HelloClient.class, HelloResource.class)) .withConfigurationResource("stork-application-with-path.properties"); @RestClient - HelloClient2 client; + HelloClient client; @Test void shouldDetermineUrlViaStork() { String greeting = RestClientBuilder.newBuilder().baseUri(URI.create("stork://hello-service")) - .build(HelloClient2.class) + .build(HelloClient.class) .echo("black and white bird"); assertThat(greeting).isEqualTo("hello, black and white bird"); + + greeting = RestClientBuilder.newBuilder().baseUri(URI.create("stork://hello-service")) + .build(HelloClient.class) + .helloWithPathParam("black and white bird"); + assertThat(greeting).isEqualTo("Hello, black and white bird"); } @Test void shouldDetermineUrlViaStorkWhenUsingTarget() throws URISyntaxException { String greeting = ClientBuilder.newClient().target("stork://hello-service").request().get(String.class); - assertThat(greeting).isEqualTo("Hello"); + assertThat(greeting).isEqualTo("Hello, World!"); greeting = ClientBuilder.newClient().target(new URI("stork://hello-service")).request().get(String.class); - assertThat(greeting).isEqualTo("Hello"); + assertThat(greeting).isEqualTo("Hello, World!"); greeting = ClientBuilder.newClient().target(UriBuilder.fromUri("stork://hello-service/")).request() .get(String.class); - assertThat(greeting).isEqualTo("Hello"); + assertThat(greeting).isEqualTo("Hello, World!"); + + greeting = ClientBuilder.newClient().target("stork://hello-service/").path("big bird").request() + .get(String.class); + assertThat(greeting).isEqualTo("Hello, big bird"); } @Test void shouldDetermineUrlViaStorkCDI() { String greeting = client.echo("big bird"); assertThat(greeting).isEqualTo("hello, big bird"); + + greeting = client.helloWithPathParam("big bird"); + assertThat(greeting).isEqualTo("Hello, big bird"); } @Test @Timeout(20) void shouldFailOnUnknownService() { - HelloClient2 client2 = RestClientBuilder.newBuilder() + HelloClient client = RestClientBuilder.newBuilder() .baseUri(URI.create("stork://nonexistent-service")) - .build(HelloClient2.class); - assertThatThrownBy(() -> client2.echo("foo")).isInstanceOf(NoSuchServiceDefinitionException.class); + .build(HelloClient.class); + assertThatThrownBy(() -> client.echo("foo")).isInstanceOf(NoSuchServiceDefinitionException.class); } } diff --git a/extensions/schema-registry/confluent/pom.xml b/extensions/schema-registry/confluent/pom.xml index f1f3fd770436f..8331593118dbf 100644 --- a/extensions/schema-registry/confluent/pom.xml +++ b/extensions/schema-registry/confluent/pom.xml @@ -25,7 +25,7 @@ org.jetbrains.kotlin kotlin-scripting-compiler-embeddable - 1.6.0 + 1.9.22 org.json diff --git a/extensions/smallrye-openapi/deployment/src/main/java/io/quarkus/smallrye/openapi/deployment/SmallRyeOpenApiProcessor.java b/extensions/smallrye-openapi/deployment/src/main/java/io/quarkus/smallrye/openapi/deployment/SmallRyeOpenApiProcessor.java index c9a12f3a1264a..b5308159a27d3 100644 --- a/extensions/smallrye-openapi/deployment/src/main/java/io/quarkus/smallrye/openapi/deployment/SmallRyeOpenApiProcessor.java +++ b/extensions/smallrye-openapi/deployment/src/main/java/io/quarkus/smallrye/openapi/deployment/SmallRyeOpenApiProcessor.java @@ -107,9 +107,9 @@ import io.quarkus.vertx.http.deployment.FilterBuildItem; import io.quarkus.vertx.http.deployment.HttpRootPathBuildItem; import io.quarkus.vertx.http.deployment.NonApplicationRootPathBuildItem; -import io.quarkus.vertx.http.deployment.RouteBuildItem; import io.quarkus.vertx.http.deployment.SecurityInformationBuildItem; import io.quarkus.vertx.http.deployment.devmode.NotFoundPageDisplayableEndpointBuildItem; +import io.quarkus.vertx.http.deployment.spi.RouteBuildItem; import io.quarkus.vertx.http.runtime.management.ManagementInterfaceBuildTimeConfig; import io.quarkus.vertx.http.runtime.management.ManagementInterfaceConfiguration; import io.smallrye.openapi.api.OpenApiConfig; @@ -305,32 +305,31 @@ void handler(LaunchModeBuildItem launch, } } - routes.produce(nonApplicationRootPathBuildItem.routeBuilder() - .management("quarkus.smallrye-openapi.management.enabled") - .routeFunction(openApiConfig.path, corsFilter) - .routeConfigKey("quarkus.smallrye-openapi.path") - .handler(handler) + routes.produce(RouteBuildItem.newManagementRoute(openApiConfig.path, "quarkus.smallrye-openapi.management.enabled") + .withRouteCustomizer(corsFilter) + .withRoutePathConfigKey("quarkus.smallrye-openapi.path") + .withRequestHandler(handler) .displayOnNotFoundPage("Open API Schema document") - .blockingRoute() + .asBlockingRoute() .build()); - routes.produce(nonApplicationRootPathBuildItem.routeBuilder() - .management("quarkus.smallrye-openapi.management.enabled") - .routeFunction(openApiConfig.path + ".json", corsFilter) - .handler(handler) - .build()); - - routes.produce(nonApplicationRootPathBuildItem.routeBuilder() - .management("quarkus.smallrye-openapi.management.enabled") - .routeFunction(openApiConfig.path + ".yaml", corsFilter) - .handler(handler) - .build()); - - routes.produce(nonApplicationRootPathBuildItem.routeBuilder() - .management("quarkus.smallrye-openapi.management.enabled") - .routeFunction(openApiConfig.path + ".yml", corsFilter) - .handler(handler) - .build()); + routes.produce( + RouteBuildItem.newManagementRoute(openApiConfig.path + ".json", "quarkus.smallrye-openapi.management.enabled") + .withRouteCustomizer(corsFilter) + .withRequestHandler(handler) + .build()); + + routes.produce( + RouteBuildItem.newManagementRoute(openApiConfig.path + ".yaml", "quarkus.smallrye-openapi.management.enabled") + .withRouteCustomizer(corsFilter) + .withRequestHandler(handler) + .build()); + + routes.produce( + RouteBuildItem.newManagementRoute(openApiConfig.path + ".yml", "quarkus.smallrye-openapi.management.enabled") + .withRouteCustomizer(corsFilter) + .withRequestHandler(handler) + .build()); // If management is enabled and swagger-ui is part of management, we need to add CORS so that swagger can hit the endpoint if (isManagement(managementInterfaceBuildTimeConfig, openApiConfig, launch)) { diff --git a/extensions/spring-cloud-config-client/runtime/src/main/java/io/quarkus/spring/cloud/config/client/runtime/SpringCloudConfigClientConfigSourceFactory.java b/extensions/spring-cloud-config-client/runtime/src/main/java/io/quarkus/spring/cloud/config/client/runtime/SpringCloudConfigClientConfigSourceFactory.java index 81f764071275b..85b8068d808c4 100644 --- a/extensions/spring-cloud-config-client/runtime/src/main/java/io/quarkus/spring/cloud/config/client/runtime/SpringCloudConfigClientConfigSourceFactory.java +++ b/extensions/spring-cloud-config-client/runtime/src/main/java/io/quarkus/spring/cloud/config/client/runtime/SpringCloudConfigClientConfigSourceFactory.java @@ -11,6 +11,7 @@ import org.eclipse.microprofile.config.spi.ConfigSource; import org.jboss.logging.Logger; +import io.quarkus.arc.runtime.appcds.AppCDSRecorder; import io.quarkus.spring.cloud.config.client.runtime.Response.PropertySource; import io.smallrye.config.ConfigSourceContext; import io.smallrye.config.ConfigSourceFactory.ConfigurableConfigSourceFactory; @@ -24,6 +25,12 @@ public class SpringCloudConfigClientConfigSourceFactory @Override public Iterable getConfigSources(final ConfigSourceContext context, final SpringCloudConfigClientConfig config) { + boolean inAppCDsGeneration = Boolean + .parseBoolean(System.getProperty(AppCDSRecorder.QUARKUS_APPCDS_GENERATE_PROP, "false")); + if (inAppCDsGeneration) { + return Collections.emptyList(); + } + List sources = new ArrayList<>(); if (!config.enabled()) { diff --git a/extensions/vertx-http/deployment-spi/pom.xml b/extensions/vertx-http/deployment-spi/pom.xml index b56cb27d89a19..6b7d3426684ff 100644 --- a/extensions/vertx-http/deployment-spi/pom.xml +++ b/extensions/vertx-http/deployment-spi/pom.xml @@ -18,6 +18,11 @@ io.quarkus quarkus-core-deployment + + + io.vertx + vertx-web + diff --git a/extensions/vertx-http/deployment-spi/src/main/java/io/quarkus/vertx/http/deployment/spi/RouteBuildItem.java b/extensions/vertx-http/deployment-spi/src/main/java/io/quarkus/vertx/http/deployment/spi/RouteBuildItem.java new file mode 100644 index 0000000000000..d43538ce33c76 --- /dev/null +++ b/extensions/vertx-http/deployment-spi/src/main/java/io/quarkus/vertx/http/deployment/spi/RouteBuildItem.java @@ -0,0 +1,366 @@ +package io.quarkus.vertx.http.deployment.spi; + +import java.util.OptionalInt; +import java.util.function.Consumer; + +import org.eclipse.microprofile.config.Config; +import org.eclipse.microprofile.config.ConfigProvider; + +import io.quarkus.builder.item.MultiBuildItem; +import io.vertx.core.Handler; +import io.vertx.ext.web.Route; +import io.vertx.ext.web.RoutingContext; + +/** + * A build item that represents a route that should be added to the router. + *

+ * Producing this build item does not mean the HTTP server is available. + * It will be consumed if the Quarkus Vert.x HTTP extension is present. + */ +public final class RouteBuildItem extends MultiBuildItem { + + /** + * The type of route handler + */ + public enum HandlerType { + + /** + * A regular route handler invoked on the event loop. + * + * @see io.vertx.ext.web.Route#handler(Handler) + */ + NORMAL, + /** + * A blocking route handler, invoked on a worker thread. + * + * @see io.vertx.ext.web.Route#blockingHandler(Handler) + */ + BLOCKING, + /** + * A failure handler, invoked when an exception is thrown from a route handler. + * This is invoked on the event loop. + * + * @see io.vertx.ext.web.Route#failureHandler(Handler) + */ + FAILURE + + } + + /** + * Type of routes. + */ + public enum RouteType { + + /** + * Framework routes are provided by the Quarkus framework (or extensions). + * They are not related to the application business logic, but provide a non-functional feature (health, metrics...). + *

+ * Framework route can be mounted on the application router (under the non application route path) or on the management + * router when enabled. + */ + FRAMEWORK_ROUTE, + /** + * Application routes are part of the application business logic. + * They are mounted on the application router (so the application prefix is applied). + */ + APPLICATION_ROUTE, + /** + * Absolute routes are part of the application business logic, and are mounted on the root router (exposed on /). + */ + ABSOLUTE_ROUTE + } + + private RouteType typeOfRoute = RouteType.APPLICATION_ROUTE; + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + private OptionalInt order = OptionalInt.empty(); + + private String path; + private Consumer customizer; + + private boolean isManagement; + + private Handler handler; + + private HandlerType typeOfHandler = HandlerType.NORMAL; + + private boolean displayOnNotFoundPage; + private String notFoundPageTitle; + + private String routeConfigKey; + + public RouteType getTypeOfRoute() { + return typeOfRoute; + } + + public boolean hasOrder() { + return order.isPresent(); + } + + public int getOrder() { + if (order.isPresent()) { + return order.getAsInt(); + } else { + throw new IllegalStateException("No order set"); + } + } + + public boolean hasRouteConfigKey() { + return routeConfigKey != null; + } + + public String getRouteConfigKey() { + return routeConfigKey; + } + + public Handler getHandler() { + return handler; + } + + public HandlerType getHandlerType() { + return typeOfHandler; + } + + public String getPath() { + return path; + } + + public Consumer getCustomizer() { + return customizer; + } + + public String getNotFoundPageTitle() { + return notFoundPageTitle; + } + + public boolean isDisplayOnNotFoundPage() { + return displayOnNotFoundPage; + } + + /** + * Declares a new application route. + * Application routes are part of the application business logic and are mounted on the application router. + * The {@code quarkus.http.root-path} property is applied in front of the route path (if set). + * + * @param path the path, must not be {@code null} or empty + * @return the builder to configure the route + */ + public static Builder newApplicationRoute(String path) { + return new Builder(RouteType.APPLICATION_ROUTE, path, false); + } + + /** + * Declares a new absolute route. + * Application routes are part of the application business logic and are mounted at the root of the server. + * The {@code quarkus.http.root-path} property is not applied. + * + * @param path the path, must not be {@code null} or empty, and must start with a slash + * @return the builder to configure the route + */ + public static Builder newAbsoluteRoute(String path) { + return new Builder(RouteType.ABSOLUTE_ROUTE, path, false); + } + + /** + * Declares a new framework route. + * A framework route is provided by the Quarkus framework (or extensions). + *

+ * The {@code quarkus.http.non-application-root-path} property is applied in front of the route path (defaults to + * {@code /q}). + *

+ * The declared route is not considered as a management route, meaning it will be mounted on the application router + * and exposed on the main HTTP server. See {@link #newManagementRoute(String)} to declare a management route. + * + * @param path the path, must not be {@code null} or empty. + * @return the builder to configure the route + */ + public static Builder newFrameworkRoute(String path) { + return new Builder(RouteType.FRAMEWORK_ROUTE, path, false); + } + + /** + * Declares a new management route. + *

+ * A management route is provided by the Quarkus framework (or extensions), and unlike routes declared with + * {@link #newFrameworkRoute(String)}, + * are mounted on the management router (exposed on the management HTTP server) when the management interface is + * enabled (see the management interface + * documentation for further details). + *

+ * If the management interface is not enabled, the {@code quarkus.http.non-application-root-path} property is applied in + * front of the route path (defaults to {@code /q}). + * If the management interface is enabled, the {@code quarkus.management.root-path} property is applied in front of the + * route path (also defaults to {@code /q} but exposed on another port, 9000 by default). + * + * @param path the path, must not be {@code null} or empty. + * @return the builder to configure the route + */ + public static Builder newManagementRoute(String path) { + return new Builder(RouteType.FRAMEWORK_ROUTE, path, true); + } + + /** + * Declares a new framework route, conditionally considered as a management route depending on the value of the + * {@code managementConfigKey} property. + * + *

+ * The route is provided by the Quarkus framework (or extensions). Depending on the value associated to the + * {@code managementConfigKey} property, + * the route is either mounted to the application router (exposed on the main HTTP server) or on the management router + * (exposed on the management HTTP server). + * The property must be a boolean (set to {@code true} to expose the route on the management server or {@code false} to + * expose it on the main HTTP server). + *

+ * If the management interface is not enabled, regardless the value of the property, the route is exposed on the main HTTP + * server. + * The {@code quarkus.http.non-application-root-path} property is applied in front of the route path (defaults to + * {@code /q}). + *

+ * If the management interface is enabled and if the property is set to {@code true}, the route is exposed on the management + * server and the {@code quarkus.management.root-path} property is applied in front of the route path (also defaults to + * {@code /q} but exposed on another port, 9000 by default). + *

+ * If the management interface is enabled and if the property is set to {@code false}, the route is exposed on the main HTTP + * server. + * The {@code quarkus.http.non-application-root-path} property is applied in front of the route path (defaults to + * {@code /q}). + * + * @param path the path, must not be {@code null} or empty. + * @return the builder to configure the route + */ + public static Builder newManagementRoute(String path, String managementConfigKey) { + return new Builder(RouteType.FRAMEWORK_ROUTE, path, + (managementConfigKey == null || isManagement(managementConfigKey))); + } + + private static boolean isManagement(String managementConfigKey) { + Config config = ConfigProvider.getConfig(); + return config.getValue(managementConfigKey, boolean.class); + } + + public boolean isManagement() { + return isManagement; + } + + /** + * A builder to configure the route. + */ + public static class Builder { + + private final RouteBuildItem item; + + private Builder(RouteType type, String path, boolean isManagement) { + item = new RouteBuildItem(); + item.typeOfRoute = type; + item.path = path; + item.isManagement = isManagement; + } + + /** + * Sets a function to customize the route. + * + * @param customizer the customizer, must not be {@code null} + * @return the current builder + */ + public Builder withRouteCustomizer(Consumer customizer) { + item.customizer = customizer; + return this; + } + + /** + * Defines the route order. + * + * @param order the order + * @return the current builder + */ + public Builder withOrder(int order) { + item.order = OptionalInt.of(order); + return this; + } + + /** + * Sets the request handler (mandatory) + * + * @param handler the handler, must not be {@code null} + * @return the current builder + */ + public Builder withRequestHandler(Handler handler) { + item.handler = handler; + return this; + } + + /** + * Sets the route as a blocking route. + * A blocking route handler is invoked on a worker thread, and thus is allowed to block. + * + * @return the current builder + */ + public Builder asBlockingRoute() { + if (item.typeOfHandler == HandlerType.FAILURE) { + throw new IllegalArgumentException("A failure route cannot be a blocking route"); + } + item.typeOfHandler = HandlerType.BLOCKING; + return this; + } + + /** + * Sets the route as a failure route. + * A failure route handler is invoked when an exception is thrown from a route handler. + * + * @return the current builder + */ + public Builder asFailureRoute() { + if (item.typeOfHandler == HandlerType.BLOCKING) { + throw new IllegalArgumentException("A blocking route cannot be a failure route"); + } + item.typeOfHandler = HandlerType.FAILURE; + return this; + } + + /** + * Adds the route to the page returned when a 404 error is returned. + * + * @return the current builder + */ + public Builder displayOnNotFoundPage() { + item.displayOnNotFoundPage = true; + return this; + } + + /** + * Adds the route to the page returned when a 404 error is returned, and sets the title of the page. + * + * @param notFoundPageTitle the title of the route + * @return the current builder + */ + public Builder displayOnNotFoundPage(String notFoundPageTitle) { + item.displayOnNotFoundPage = true; + item.notFoundPageTitle = notFoundPageTitle; + return this; + } + + /** + * Sets a property configuring the route path. + * + * @param attributeName the name of the property configuring the route path + * @return the current builder + */ + public Builder withRoutePathConfigKey(String attributeName) { + item.routeConfigKey = attributeName; + return this; + } + + /** + * Validates the route and build the {@code RouteBuildItem}. + * + * @return the route build item + */ + public RouteBuildItem build() { + if (item.handler == null) { + throw new IllegalArgumentException("The route handler must be set"); + } + + return item; + } + } + +} diff --git a/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/NonApplicationRootPathBuildItem.java b/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/NonApplicationRootPathBuildItem.java index d978518a6295e..3d6720009d289 100644 --- a/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/NonApplicationRootPathBuildItem.java +++ b/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/NonApplicationRootPathBuildItem.java @@ -21,8 +21,6 @@ public final class NonApplicationRootPathBuildItem extends SimpleBuildItem { - // TODO Should be handle the management root path? - /** * Normalized of quarkus.http.root-path. * Must end in a slash diff --git a/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/RouteConverter.java b/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/RouteConverter.java new file mode 100644 index 0000000000000..606889a3d2f81 --- /dev/null +++ b/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/RouteConverter.java @@ -0,0 +1,44 @@ +package io.quarkus.vertx.http.deployment; + +import io.quarkus.vertx.http.runtime.HandlerType; + +/** + * Convert the route build item from the SPI to the internal representation + */ +public class RouteConverter { + + public static RouteBuildItem convert(io.quarkus.vertx.http.deployment.spi.RouteBuildItem item, + HttpRootPathBuildItem httpRootPathBuildItem, + NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) { + // The builder depends on the type of route + RouteBuildItem.Builder builder; + if (item.getTypeOfRoute() == io.quarkus.vertx.http.deployment.spi.RouteBuildItem.RouteType.FRAMEWORK_ROUTE) { + builder = nonApplicationRootPathBuildItem.routeBuilder(); + } else { + builder = httpRootPathBuildItem.routeBuilder(); + } + + if (item.isManagement()) { + builder = builder.management(); + } + if (item.hasRouteConfigKey()) { + builder = builder.routeConfigKey(item.getRouteConfigKey()); + } + + builder = builder.handler(item.getHandler()).handlerType(HandlerType.valueOf(item.getHandlerType().name())); + if (item.isDisplayOnNotFoundPage()) { + builder = builder + .displayOnNotFoundPage(item.getNotFoundPageTitle()); + } + + if (item.hasOrder()) { + builder = builder.orderedRoute(item.getPath(), item.getOrder(), item.getCustomizer()); + } else { + builder = builder.routeFunction(item.getPath(), item.getCustomizer()); + } + + return builder.build(); + + } + +} diff --git a/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/VertxHttpProcessor.java b/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/VertxHttpProcessor.java index 0d6835565655e..dc19568107b9a 100644 --- a/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/VertxHttpProcessor.java +++ b/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/VertxHttpProcessor.java @@ -95,6 +95,19 @@ HttpRootPathBuildItem httpRoot(HttpBuildTimeConfig httpBuildTimeConfig) { return new HttpRootPathBuildItem(httpBuildTimeConfig.rootPath); } + @BuildStep + List convertRoutes( + List items, + HttpRootPathBuildItem httpRootPathBuildItem, + NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) { + List list = new ArrayList<>(); + for (io.quarkus.vertx.http.deployment.spi.RouteBuildItem item : items) { + RouteBuildItem converted = RouteConverter.convert(item, httpRootPathBuildItem, nonApplicationRootPathBuildItem); + list.add(converted); + } + return list; + } + @BuildStep NonApplicationRootPathBuildItem frameworkRoot(HttpBuildTimeConfig httpBuildTimeConfig, ManagementInterfaceBuildTimeConfig managementBuildTimeConfig) { @@ -276,7 +289,7 @@ VertxWebRouterBuildItem initializeRouter(VertxHttpRecorder recorder, } } - /** + /* * To create mainrouter when `${quarkus.http.root-path}` is not {@literal /} * Refer https://github.com/quarkusio/quarkus/issues/34261 */ @@ -475,7 +488,7 @@ void registerExchangeAttributeBuilders(final BuildProducer "quarkus.http.insecure-requests" is not explicitly disabled * <2> any of the http SSL runtime properties are set at build time - * + *

* If any of the above rules applied, the port "https" will be generated as part of the Kubernetes resources. */ private static boolean isSslConfigured() { diff --git a/extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/router/ReqContextActivationTerminationTest.java b/extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/router/ReqContextActivationTerminationTest.java new file mode 100644 index 0000000000000..fbacffd72cead --- /dev/null +++ b/extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/router/ReqContextActivationTerminationTest.java @@ -0,0 +1,51 @@ +package io.quarkus.vertx.http.router; + +import static org.hamcrest.Matchers.is; + +import jakarta.enterprise.event.Observes; +import jakarta.inject.Singleton; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.runtime.StartupEvent; +import io.quarkus.test.QuarkusUnitTest; +import io.restassured.RestAssured; +import io.vertx.ext.web.Router; + +/** + * Test is located here so that {@code VertxCurrentContextFactory} is used within req. context implementation. + * See also https://github.com/quarkusio/quarkus/issues/37741 + */ +public class ReqContextActivationTerminationTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar.addClasses(BeanWithObserver.class)); + + @Test + public void testRoute() { + RestAssured.when().get("/boom").then().statusCode(200).body(is("ok")); + } + + @Singleton + public static class BeanWithObserver { + + private static int counter; + + void observeRouter(@Observes StartupEvent startup, Router router) { + router.get("/boom").handler(ctx -> { + // context starts as inactive; we perform manual activation/termination and assert + Assertions.assertEquals(false, Arc.container().requestContext().isActive()); + Arc.container().requestContext().activate(); + Assertions.assertEquals(true, Arc.container().requestContext().isActive()); + Arc.container().requestContext().terminate(); + Assertions.assertEquals(false, Arc.container().requestContext().isActive()); + ctx.response().setStatusCode(200).end("ok"); + }); + } + + } +} diff --git a/extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/HandlerType.java b/extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/HandlerType.java index a8ae971d3a479..881dbb672bbef 100644 --- a/extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/HandlerType.java +++ b/extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/HandlerType.java @@ -2,22 +2,26 @@ import io.vertx.core.Handler; +/** + * The type of route handler + */ public enum HandlerType { /** - * A request handler. + * A regular route handler invoked on the event loop. * * @see io.vertx.ext.web.Route#handler(Handler) */ NORMAL, /** - * A blocking request handler. + * A blocking route handler, invoked on a worker thread. * * @see io.vertx.ext.web.Route#blockingHandler(Handler) */ BLOCKING, /** - * A failure handler. + * A failure handler, invoked when an exception is thrown from a route handler. + * This is invoked on the event loop. * * @see io.vertx.ext.web.Route#failureHandler(Handler) */ diff --git a/independent-projects/arc/pom.xml b/independent-projects/arc/pom.xml index fe754411961c1..0d4d3ef63e26a 100644 --- a/independent-projects/arc/pom.xml +++ b/independent-projects/arc/pom.xml @@ -50,7 +50,7 @@ 2.5.1 1.6.Final - 3.24.2 + 3.25.1 5.10.1 1.9.22 1.7.3 diff --git a/independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/BeanProcessor.java b/independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/BeanProcessor.java index beb80d5153a80..b15c310e4b1cd 100644 --- a/independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/BeanProcessor.java +++ b/independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/BeanProcessor.java @@ -78,7 +78,7 @@ public static Builder builder() { private final boolean generateSources; private final boolean allowMocking; private final boolean transformUnproxyableClasses; - private final boolean optimizeContexts; + private final Predicate optimizeContexts; private final List>> suppressConditionGenerators; // This predicate is used to filter annotations for InjectionPoint metadata @@ -187,6 +187,7 @@ public List generateResources(ReflectionRegistration reflectionRegistr ReflectionRegistration refReg = reflectionRegistration != null ? reflectionRegistration : this.reflectionRegistration; PrivateMembersCollector privateMembers = new PrivateMembersCollector(); + boolean optimizeContextsValue = optimizeContexts != null ? optimizeContexts.test(beanDeployment) : false; // These maps are precomputed and then used in the ComponentsProviderGenerator which is generated first Map beanToGeneratedName = new HashMap<>(); @@ -240,7 +241,7 @@ public List generateResources(ReflectionRegistration reflectionRegistr ContextInstancesGenerator contextInstancesGenerator = new ContextInstancesGenerator(generateSources, refReg, beanDeployment, scopeToGeneratedName); - if (optimizeContexts) { + if (optimizeContextsValue) { contextInstancesGenerator.precomputeGeneratedName(BuiltinScope.APPLICATION.getName()); contextInstancesGenerator.precomputeGeneratedName(BuiltinScope.REQUEST.getName()); } @@ -364,7 +365,7 @@ public Collection call() throws Exception { })); } - if (optimizeContexts) { + if (optimizeContextsValue) { // Generate _ContextInstances primaryTasks.add(executor.submit(new Callable>() { @@ -450,7 +451,7 @@ public Collection call() throws Exception { observerToGeneratedName, scopeToGeneratedName)); - if (optimizeContexts) { + if (optimizeContextsValue) { // Generate _ContextInstances resources.addAll(contextInstancesGenerator.generate(BuiltinScope.APPLICATION.getName())); resources.addAll(contextInstancesGenerator.generate(BuiltinScope.REQUEST.getName())); @@ -564,7 +565,7 @@ public static class Builder { boolean failOnInterceptedPrivateMethod; boolean allowMocking; boolean strictCompatibility; - boolean optimizeContexts; + Predicate optimizeContexts; AlternativePriorities alternativePriorities; final List> excludeTypes; @@ -600,7 +601,6 @@ public Builder() { failOnInterceptedPrivateMethod = false; allowMocking = false; strictCompatibility = false; - optimizeContexts = false; excludeTypes = new ArrayList<>(); @@ -842,7 +842,21 @@ public Builder setStrictCompatibility(boolean strictCompatibility) { * @return self */ public Builder setOptimizeContexts(boolean value) { - this.optimizeContexts = value; + return setOptimizeContexts(new Predicate() { + @Override + public boolean test(BeanDeployment t) { + return value; + } + }); + } + + /** + * + * @param fun + * @return self + */ + public Builder setOptimizeContexts(Predicate fun) { + this.optimizeContexts = fun; return this; } diff --git a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/Arc.java b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/Arc.java index 349f5793aad73..364d1746aa1ff 100644 --- a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/Arc.java +++ b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/Arc.java @@ -35,8 +35,7 @@ public static ArcContainer initialize(ArcInitConfig config) { container = INSTANCE.get(); if (container == null) { // Set the container instance first because Arc.container() can be used within ArcContainerImpl.init() - container = new ArcContainerImpl(config.getCurrentContextFactory(), - config.isStrictCompatibility(), config.isOptimizeContexts()); + container = new ArcContainerImpl(config.getCurrentContextFactory(), config.isStrictCompatibility()); INSTANCE.set(container); container.init(); } diff --git a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/ArcInitConfig.java b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/ArcInitConfig.java index 976bf4b0d08d1..79464ba3eb135 100644 --- a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/ArcInitConfig.java +++ b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/ArcInitConfig.java @@ -39,6 +39,12 @@ public CurrentContextFactory getCurrentContextFactory() { return currentContextFactory; } + /** + * + * @return {@code true} if optimized contexts should be used, {@code false} otherwise + * @deprecated This method was never used and will be removed at some point after Quarkus 3.10 + */ + @Deprecated(since = "3.7", forRemoval = true) public boolean isOptimizeContexts() { return optimizeContexts; } @@ -65,6 +71,14 @@ public Builder setCurrentContextFactory(CurrentContextFactory currentContextFact return this; } + /** + * The value was actually never used. + * + * @param value + * @return this + * @deprecated This value was never used; this method will be removed at some point after Quarkus 3.10 + */ + @Deprecated(since = "3.7", forRemoval = true) public Builder setOptimizeContexts(boolean value) { optimizeContexts = value; return this; diff --git a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/ArcContainerImpl.java b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/ArcContainerImpl.java index 27813170b5a0b..31972ac95db20 100644 --- a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/ArcContainerImpl.java +++ b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/ArcContainerImpl.java @@ -104,7 +104,7 @@ public class ArcContainerImpl implements ArcContainer { private final boolean strictMode; - public ArcContainerImpl(CurrentContextFactory currentContextFactory, boolean strictMode, boolean optimizeContexts) { + public ArcContainerImpl(CurrentContextFactory currentContextFactory, boolean strictMode) { this.strictMode = strictMode; id = String.valueOf(ID_GENERATOR.incrementAndGet()); running = new AtomicBoolean(true); diff --git a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/RequestContext.java b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/RequestContext.java index 762663007603f..0e81d5b5865b4 100644 --- a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/RequestContext.java +++ b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/RequestContext.java @@ -112,7 +112,8 @@ public T get(Contextual contextual) { @Override public boolean isActive() { - return currentContext.get() != null; + RequestContextState requestContextState = currentContext.get(); + return requestContextState == null ? false : requestContextState.isValid(); } @Override diff --git a/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/ApplicationModel.java b/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/ApplicationModel.java index 7fa5a41514af1..8695f93573fdc 100644 --- a/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/ApplicationModel.java +++ b/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/ApplicationModel.java @@ -40,6 +40,14 @@ public interface ApplicationModel { */ Iterable getDependencies(int flags); + /** + * Returns application dependencies that have any of the flags passed in as arguments set. + * + * @param flags dependency flags to match + * @return application dependencies that matched the flags + */ + Iterable getDependenciesWithAnyFlag(int... flags); + /** * Runtime dependencies of an application * diff --git a/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/DefaultApplicationModel.java b/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/DefaultApplicationModel.java index 091c21cfa4035..d245c1cad796c 100644 --- a/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/DefaultApplicationModel.java +++ b/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/DefaultApplicationModel.java @@ -3,15 +3,14 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; -import java.util.stream.Collectors; import io.quarkus.maven.dependency.ArtifactKey; -import io.quarkus.maven.dependency.Dependency; import io.quarkus.maven.dependency.DependencyFlags; import io.quarkus.maven.dependency.ResolvedDependency; @@ -42,24 +41,20 @@ public ResolvedDependency getAppArtifact() { @Override public Collection getDependencies() { - var result = new ArrayList(dependencies.size()); - for (var d : getDependencies(DependencyFlags.DEPLOYMENT_CP)) { - result.add(d); - } - return result; + return collectDependencies(DependencyFlags.DEPLOYMENT_CP); } @Override public Collection getRuntimeDependencies() { - var result = new ArrayList(); - for (var d : getDependencies(DependencyFlags.RUNTIME_CP)) { - result.add(d); - } - return result; + return collectDependencies(DependencyFlags.RUNTIME_CP); } @Override public Iterable getDependencies(int flags) { + return new FlagDependencyIterator(new int[] { flags }); + } + + public Iterable getDependenciesWithAnyFlag(int... flags) { return new FlagDependencyIterator(flags); } @@ -75,20 +70,17 @@ public Collection getExtensionCapabilities() { @Override public Set getParentFirst() { - return getDependencies().stream().filter(Dependency::isClassLoaderParentFirst).map(Dependency::getKey) - .collect(Collectors.toSet()); + return collectKeys(DependencyFlags.DEPLOYMENT_CP | DependencyFlags.CLASSLOADER_PARENT_FIRST); } @Override public Set getRunnerParentFirst() { - return getDependencies().stream().filter(d -> d.isFlagSet(DependencyFlags.CLASSLOADER_RUNNER_PARENT_FIRST)) - .map(Dependency::getKey).collect(Collectors.toSet()); + return collectKeys(DependencyFlags.DEPLOYMENT_CP | DependencyFlags.CLASSLOADER_RUNNER_PARENT_FIRST); } @Override public Set getLowerPriorityArtifacts() { - return getDependencies().stream().filter(d -> d.isFlagSet(DependencyFlags.CLASSLOADER_LESSER_PRIORITY)) - .map(Dependency::getKey).collect(Collectors.toSet()); + return collectKeys(DependencyFlags.DEPLOYMENT_CP | DependencyFlags.CLASSLOADER_LESSER_PRIORITY); } @Override @@ -101,11 +93,27 @@ public Map> getRemovedResources() { return excludedResources; } + private Collection collectDependencies(int flags) { + var result = new ArrayList(); + for (var d : getDependencies(flags)) { + result.add(d); + } + return result; + } + + private Set collectKeys(int flags) { + var keys = new HashSet(); + for (var d : getDependencies(flags)) { + keys.add(d.getKey()); + } + return keys; + } + private class FlagDependencyIterator implements Iterable { - private final int flags; + private final int[] flags; - private FlagDependencyIterator(int flags) { + private FlagDependencyIterator(int[] flags) { this.flags = flags; } @@ -139,7 +147,7 @@ private void moveOn() { next = null; while (i.hasNext()) { var d = i.next(); - if ((d.getFlags() & flags) == flags) { + if (d.hasAnyFlag(flags)) { next = d; break; } diff --git a/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/Dependency.java b/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/Dependency.java index 21d7985a1bfb7..8fe5601ca64dc 100644 --- a/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/Dependency.java +++ b/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/Dependency.java @@ -60,7 +60,43 @@ default boolean isClassLoaderParentFirst() { return isFlagSet(DependencyFlags.CLASSLOADER_PARENT_FIRST); } + /** + * Checks whether a dependency has a given flag set. + * + * @param flag flag to check + * @return true if the flag is set, otherwise false + */ default boolean isFlagSet(int flag) { - return (getFlags() & flag) > 0; + return (getFlags() & flag) == flag; + } + + /** + * Checks whether any of the flags are set on a dependency + * + * @param flags flags to check + * @return true if any of the flags are set, otherwise false + */ + default boolean hasAnyFlag(int... flags) { + for (var flag : flags) { + if (isFlagSet(flag)) { + return true; + } + } + return false; + } + + /** + * Checks whether all the passed in flags are set on a dependency + * + * @param flags flags to check + * @return true if all the passed in flags are set on a dependency, otherwise false + */ + default boolean hasAllFlags(int... flags) { + for (var flag : flags) { + if (!isFlagSet(flag)) { + return false; + } + } + return true; } } diff --git a/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/DependencyFlags.java b/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/DependencyFlags.java index 8d9c50148784a..641c677f562dd 100644 --- a/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/DependencyFlags.java +++ b/independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/DependencyFlags.java @@ -23,6 +23,27 @@ public interface DependencyFlags { // once the processing of the whole tree has completed. int VISITED = 0b00100000000000; + /** + * Compile-only dependencies are those that are configured + * to be included only for the compile phase ({@code provided} dependency scope in Maven, + * {@code compileOnly} configuration in Gradle). + *

+ * These dependencies will not be present on the Quarkus application runtime or + * augmentation (deployment) classpath when the application is bootstrapped in production mode + * ({@code io.quarkus.runtime.LaunchMode.NORMAL}). + *

+ * In Maven projects, compile-only dependencies will be present on both the runtime and the augmentation classpath + * of a Quarkus application launched in dev and test modes, since {@code provided} dependencies are included + * in the test classpath by Maven. + *

+ * In Gradle projects, compile-only dependencies will be present on both the runtime and the augmentation classpath + * of a Quarkus application launched in dev modes only. + *

+ * In any case though, these dependencies will be available during augmentation for processing + * using {@link io.quarkus.bootstrap.model.ApplicationModel#getDependencies(int)} by passing + * this flag as an argument. + */ + int COMPILE_ONLY = 0b01000000000000; /* @formatter:on */ } diff --git a/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/BootstrapAppModelResolver.java b/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/BootstrapAppModelResolver.java index 8d486d0c7a989..e7109757aa759 100644 --- a/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/BootstrapAppModelResolver.java +++ b/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/BootstrapAppModelResolver.java @@ -1,5 +1,8 @@ package io.quarkus.bootstrap.resolver; +import static io.quarkus.bootstrap.util.DependencyUtils.getKey; +import static io.quarkus.bootstrap.util.DependencyUtils.toAppArtifact; + import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -12,7 +15,6 @@ import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; -import org.eclipse.aether.collection.CollectRequest; import org.eclipse.aether.graph.Dependency; import org.eclipse.aether.graph.DependencyNode; import org.eclipse.aether.graph.DependencyVisitor; @@ -134,7 +136,8 @@ public boolean visitEnter(DependencyNode node) { public boolean visitLeave(DependencyNode node) { final Dependency dep = node.getDependency(); if (dep != null) { - result.add(toAppArtifact(dep.getArtifact()).setScope(dep.getScope()).setOptional(dep.isOptional()).build()); + result.add(toAppArtifact(dep.getArtifact(), null).setScope(dep.getScope()).setOptional(dep.isOptional()) + .build()); } return true; } @@ -231,9 +234,8 @@ public ApplicationModel resolveModel(WorkspaceModule module) final List constraints = managedMap.isEmpty() ? List.of() : new ArrayList<>(managedMap.values()); return buildAppModel(mainDep, - MavenArtifactResolver.newCollectRequest(mainArtifact, directDeps, constraints, List.of(), - mvn.getRepositories()), - Set.of(), constraints, List.of()); + mainArtifact, directDeps, mvn.getRepositories(), + Set.of(), constraints); } private ApplicationModel doResolveModel(ArtifactCoords coords, @@ -244,7 +246,7 @@ private ApplicationModel doResolveModel(ArtifactCoords coords, if (coords == null) { throw new IllegalArgumentException("Application artifact is null"); } - final Artifact mvnArtifact = toAetherArtifact(coords); + Artifact mvnArtifact = toAetherArtifact(coords); List managedDeps = List.of(); List managedRepos = List.of(); @@ -256,11 +258,12 @@ private ApplicationModel doResolveModel(ArtifactCoords coords, List aggregatedRepos = mvn.aggregateRepositories(managedRepos, mvn.getRepositories()); final ResolvedDependency appArtifact = resolve(coords, mvnArtifact, aggregatedRepos); - final ArtifactDescriptorResult appArtifactDescr = resolveDescriptor(toAetherArtifact(appArtifact), aggregatedRepos); + mvnArtifact = toAetherArtifact(appArtifact); + final ArtifactDescriptorResult appArtifactDescr = resolveDescriptor(mvnArtifact, aggregatedRepos); Map managedVersions = Map.of(); if (!managedDeps.isEmpty()) { - final List mergedManagedDeps = new ArrayList( + final List mergedManagedDeps = new ArrayList<>( managedDeps.size() + appArtifactDescr.getManagedDependencies().size()); managedVersions = new HashMap<>(managedDeps.size()); for (Dependency dep : managedDeps) { @@ -278,14 +281,13 @@ private ApplicationModel doResolveModel(ArtifactCoords coords, managedDeps = appArtifactDescr.getManagedDependencies(); } - directMvnDeps = DependencyUtils.mergeDeps(directMvnDeps, appArtifactDescr.getDependencies(), managedVersions, - getExcludedScopes()); + directMvnDeps = DependencyUtils.mergeDeps(directMvnDeps, appArtifactDescr.getDependencies(), managedVersions, Set.of()); aggregatedRepos = mvn.aggregateRepositories(aggregatedRepos, mvn.newResolutionRepositories(appArtifactDescr.getRepositories())); return buildAppModel(appArtifact, - MavenArtifactResolver.newCollectRequest(mvnArtifact, directMvnDeps, managedDeps, List.of(), aggregatedRepos), - reloadableModules, managedDeps, aggregatedRepos); + mvnArtifact, directMvnDeps, aggregatedRepos, + reloadableModules, managedDeps); } private Set getExcludedScopes() { @@ -298,9 +300,10 @@ private Set getExcludedScopes() { return Set.of(JavaScopes.PROVIDED, JavaScopes.TEST); } - private ApplicationModel buildAppModel(ResolvedDependency appArtifact, CollectRequest collectRtDepsRequest, - Set reloadableModules, List managedDeps, List repos) - throws AppModelResolverException, BootstrapMavenException { + private ApplicationModel buildAppModel(ResolvedDependency appArtifact, + Artifact artifact, List directDeps, List repos, + Set reloadableModules, List managedDeps) + throws AppModelResolverException { final ApplicationModelBuilder appBuilder = new ApplicationModelBuilder().setAppArtifact(appArtifact); if (appArtifact.getWorkspaceModule() != null) { @@ -310,13 +313,26 @@ private ApplicationModel buildAppModel(ResolvedDependency appArtifact, CollectRe appBuilder.addReloadableWorkspaceModules(reloadableModules); } + var filteredProvidedDeps = new ArrayList(0); + var excludedScopes = getExcludedScopes(); + if (!excludedScopes.isEmpty()) { + var filtered = new ArrayList(directDeps.size()); + for (var d : directDeps) { + if (!excludedScopes.contains(d.getScope())) { + filtered.add(d); + } else if (JavaScopes.PROVIDED.equals(d.getScope())) { + filteredProvidedDeps.add(d); + } + } + directDeps = filtered; + } + var collectRtDepsRequest = MavenArtifactResolver.newCollectRequest(artifact, directDeps, managedDeps, List.of(), repos); try { ApplicationDependencyTreeResolver.newInstance() .setArtifactResolver(mvn) - .setManagedDependencies(managedDeps) - .setMainRepositories(repos) .setApplicationModelBuilder(appBuilder) .setCollectReloadableModules(collectReloadableDeps && reloadableModules.isEmpty()) + .setCollectCompileOnly(filteredProvidedDeps) .setBuildTreeConsumer(buildTreeConsumer) .resolve(collectRtDepsRequest); } catch (BootstrapDependencyProcessingException e) { @@ -482,18 +498,6 @@ private static Artifact toAetherArtifact(ArtifactCoords artifact) { artifact.getClassifier(), artifact.getType(), artifact.getVersion()); } - private ResolvedDependencyBuilder toAppArtifact(Artifact artifact) { - return toAppArtifact(artifact, null); - } - - private ResolvedDependencyBuilder toAppArtifact(Artifact artifact, WorkspaceModule module) { - return ApplicationDependencyTreeResolver.toAppArtifact(artifact, module); - } - - private static ArtifactKey getKey(Artifact artifact) { - return DependencyUtils.getKey(artifact); - } - private static List toAetherDeps(Collection directDeps) { if (directDeps.isEmpty()) { return List.of(); diff --git a/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/ApplicationDependencyTreeResolver.java b/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/ApplicationDependencyTreeResolver.java index fddf6c228c8d9..f7d2cc72d0a07 100644 --- a/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/ApplicationDependencyTreeResolver.java +++ b/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/ApplicationDependencyTreeResolver.java @@ -1,5 +1,9 @@ package io.quarkus.bootstrap.resolver.maven; +import static io.quarkus.bootstrap.util.DependencyUtils.getKey; +import static io.quarkus.bootstrap.util.DependencyUtils.newDependencyBuilder; +import static io.quarkus.bootstrap.util.DependencyUtils.toArtifact; + import java.io.BufferedReader; import java.io.IOException; import java.io.UncheckedIOException; @@ -25,6 +29,7 @@ import org.eclipse.aether.RepositorySystemSession; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.collection.CollectRequest; +import org.eclipse.aether.collection.DependencyCollectionException; import org.eclipse.aether.collection.DependencyGraphTransformationContext; import org.eclipse.aether.collection.DependencyGraphTransformer; import org.eclipse.aether.collection.DependencySelector; @@ -33,6 +38,9 @@ import org.eclipse.aether.graph.DependencyNode; import org.eclipse.aether.graph.Exclusion; import org.eclipse.aether.repository.RemoteRepository; +import org.eclipse.aether.resolution.ArtifactDescriptorResult; +import org.eclipse.aether.resolution.ArtifactRequest; +import org.eclipse.aether.resolution.ArtifactResolutionException; import org.eclipse.aether.resolution.DependencyRequest; import org.eclipse.aether.resolution.DependencyResolutionException; import org.eclipse.aether.util.artifact.JavaScopes; @@ -55,7 +63,6 @@ import io.quarkus.maven.dependency.ArtifactKey; import io.quarkus.maven.dependency.DependencyFlags; import io.quarkus.maven.dependency.ResolvedDependencyBuilder; -import io.quarkus.paths.PathList; import io.quarkus.paths.PathTree; public class ApplicationDependencyTreeResolver { @@ -95,26 +102,16 @@ public static Artifact getRuntimeArtifact(DependencyNode dep) { private MavenArtifactResolver resolver; private List managedDeps; - private List mainRepos; private ApplicationModelBuilder appBuilder; private boolean collectReloadableModules; private Consumer buildTreeConsumer; + private List collectCompileOnly; public ApplicationDependencyTreeResolver setArtifactResolver(MavenArtifactResolver resolver) { this.resolver = resolver; return this; } - public ApplicationDependencyTreeResolver setManagedDependencies(List managedDeps) { - this.managedDeps = managedDeps; - return this; - } - - public ApplicationDependencyTreeResolver setMainRepositories(List mainRepos) { - this.mainRepos = mainRepos; - return this; - } - public ApplicationDependencyTreeResolver setApplicationModelBuilder(ApplicationModelBuilder appBuilder) { this.appBuilder = appBuilder; return this; @@ -130,8 +127,21 @@ public ApplicationDependencyTreeResolver setBuildTreeConsumer(Consumer b return this; } + /** + * In addition to resolving dependencies for the build classpath, also resolve these compile-only dependencies + * and add them to the application model as {@link DependencyFlags#COMPILE_ONLY}. + * + * @param collectCompileOnly compile-only dependencies to add to the model + * @return self + */ + public ApplicationDependencyTreeResolver setCollectCompileOnly(List collectCompileOnly) { + this.collectCompileOnly = collectCompileOnly; + return this; + } + public void resolve(CollectRequest collectRtDepsRequest) throws AppModelResolverException { + this.managedDeps = collectRtDepsRequest.getManagedDependencies(); DependencyNode root = resolveRuntimeDeps(collectRtDepsRequest); if (collectReloadableModules) { @@ -204,10 +214,8 @@ public void resolve(CollectRequest collectRtDepsRequest) throws AppModelResolver } root = normalize(originalSession, root); - - final BuildDependencyGraphVisitor buildDepsVisitor = new BuildDependencyGraphVisitor(originalResolver, appBuilder, - buildTreeConsumer); - buildDepsVisitor.visit(root); + // add deployment dependencies + new BuildDependencyGraphVisitor(originalResolver, appBuilder, buildTreeConsumer).visit(root); if (!CONVERGED_TREE_ONLY && collectReloadableModules) { for (ResolvedDependencyBuilder db : appBuilder.getDependencies()) { @@ -224,6 +232,72 @@ public void resolve(CollectRequest collectRtDepsRequest) throws AppModelResolver } collectPlatformProperties(); + collectCompileOnly(collectRtDepsRequest, root); + } + + /** + * Resolves and adds compile-only dependencies to the application model with the {@link DependencyFlags#COMPILE_ONLY} flag. + * Compile-only dependencies are resolved as direct dependencies of the root with all the previously resolved dependencies + * enforced as version constraints to make sure compile-only dependencies do not override runtime dependencies of the final + * application. + * + * @param collectRtDepsRequest original runtime dependencies collection request + * @param root the root node of the Quarkus build time dependency tree + * @throws BootstrapMavenException in case of a failure + */ + private void collectCompileOnly(CollectRequest collectRtDepsRequest, DependencyNode root) throws BootstrapMavenException { + if (collectCompileOnly.isEmpty()) { + return; + } + // add all the build time dependencies as version constraints + var depStack = new ArrayDeque>(); + var children = root.getChildren(); + while (children != null) { + for (DependencyNode node : children) { + managedDeps.add(node.getDependency()); + if (!node.getChildren().isEmpty()) { + depStack.add(node.getChildren()); + } + } + children = depStack.poll(); + } + final CollectRequest request = new CollectRequest() + .setDependencies(collectCompileOnly) + .setManagedDependencies(managedDeps) + .setRepositories(collectRtDepsRequest.getRepositories()); + if (collectRtDepsRequest.getRoot() != null) { + request.setRoot(collectRtDepsRequest.getRoot()); + } else { + request.setRootArtifact(collectRtDepsRequest.getRootArtifact()); + } + + try { + root = resolver.getSystem().collectDependencies(resolver.getSession(), request).getRoot(); + } catch (DependencyCollectionException e) { + throw new BootstrapDependencyProcessingException( + "Failed to collect compile-only dependencies of " + root.getArtifact(), e); + } + children = root.getChildren(); + int flags = DependencyFlags.DIRECT | DependencyFlags.COMPILE_ONLY; + while (children != null) { + for (DependencyNode node : children) { + if (appBuilder.getDependency(getKey(node.getArtifact())) == null) { + var dep = newDependencyBuilder(node, resolver).setFlags(flags); + if (getExtensionInfoOrNull(node.getArtifact(), node.getRepositories()) != null) { + dep.setFlags(DependencyFlags.RUNTIME_EXTENSION_ARTIFACT); + if (dep.isFlagSet(DependencyFlags.DIRECT)) { + dep.setFlags(DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT); + } + } + appBuilder.addDependency(dep); + } + if (!node.getChildren().isEmpty()) { + depStack.add(node.getChildren()); + } + } + flags = DependencyFlags.COMPILE_ONLY; + children = depStack.poll(); + } } private void collectPlatformProperties() throws AppModelResolverException { @@ -342,7 +416,7 @@ private void visitRuntimeDependency(DependencyNode node) { final ArtifactKey key = getKey(artifact); ResolvedDependencyBuilder dep = appBuilder.getDependency(key); if (dep == null) { - artifact = resolve(artifact); + artifact = resolve(artifact, node.getRepositories()); } try { @@ -354,12 +428,15 @@ private void visitRuntimeDependency(DependencyNode node) { module = resolver.getProjectModuleResolver().getProjectModule(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion()); } - dep = toAppArtifact(artifact, module) + dep = DependencyUtils.toAppArtifact(artifact, module) .setOptional(node.getDependency().isOptional()) .setScope(node.getDependency().getScope()) .setDirect(isWalkingFlagOn(COLLECT_DIRECT_DEPS)) .setRuntimeCp() .setDeploymentCp(); + if (JavaScopes.PROVIDED.equals(dep.getScope())) { + dep.setFlags(DependencyFlags.COMPILE_ONLY); + } if (extDep != null) { dep.setRuntimeExtensionArtifact(); if (isWalkingFlagOn(COLLECT_TOP_EXTENSION_RUNTIME_NODES)) { @@ -402,20 +479,18 @@ private ExtensionDependency getExtensionDependencyOrNull(DependencyNode node, Ar if (extDep != null) { return extDep; } - final ExtensionInfo extInfo = getExtensionInfoOrNull(artifact); + final ExtensionInfo extInfo = getExtensionInfoOrNull(artifact, node.getRepositories()); if (extInfo != null) { - Collection exclusions; - if (!exclusionStack.isEmpty()) { - if (exclusionStack.size() == 1) { - exclusions = exclusionStack.peekLast(); - } else { - exclusions = new ArrayList<>(); - for (Collection set : exclusionStack) { - exclusions.addAll(set); - } - } - } else { + final Collection exclusions; + if (exclusionStack.isEmpty()) { exclusions = List.of(); + } else if (exclusionStack.size() == 1) { + exclusions = exclusionStack.peekLast(); + } else { + exclusions = new ArrayList<>(); + for (Collection set : exclusionStack) { + exclusions.addAll(set); + } } return new ExtensionDependency(extInfo, node, exclusions); } @@ -452,7 +527,8 @@ private void collectConditionalDependencies(ExtensionDependency dependent) if (selector != null && !selector.selectDependency(new Dependency(conditionalArtifact, JavaScopes.RUNTIME))) { continue; } - final ExtensionInfo conditionalInfo = getExtensionInfoOrNull(conditionalArtifact); + final ExtensionInfo conditionalInfo = getExtensionInfoOrNull(conditionalArtifact, + dependent.runtimeNode.getRepositories()); if (conditionalInfo == null) { log.warn(dependent.info.runtimeArtifact + " declares a conditional dependency on " + conditionalArtifact + " that is not a Quarkus extension and will be ignored"); @@ -467,7 +543,8 @@ private void collectConditionalDependencies(ExtensionDependency dependent) } } - private ExtensionInfo getExtensionInfoOrNull(Artifact artifact) throws BootstrapDependencyProcessingException { + private ExtensionInfo getExtensionInfoOrNull(Artifact artifact, List repos) + throws BootstrapDependencyProcessingException { if (!artifact.getExtension().equals(ArtifactCoords.TYPE_JAR)) { return null; } @@ -477,7 +554,7 @@ private ExtensionInfo getExtensionInfoOrNull(Artifact artifact) throws Bootstrap return ext; } - artifact = resolve(artifact); + artifact = resolve(artifact, repos); final Path path = artifact.getFile().toPath(); final Properties descriptor = PathTree.ofDirectoryOrArchive(path).apply(BootstrapConstants.DESCRIPTOR_PATH, visit -> { if (visit == null) { @@ -499,7 +576,8 @@ private ExtensionInfo getExtensionInfoOrNull(Artifact artifact) throws Bootstrap private void injectDeploymentDependencies(ExtensionDependency extDep) throws BootstrapDependencyProcessingException { log.debugf("Injecting deployment dependency %s", extDep.info.deploymentArtifact); - final DependencyNode deploymentNode = collectDependencies(extDep.info.deploymentArtifact, extDep.exclusions); + final DependencyNode deploymentNode = collectDependencies(extDep.info.deploymentArtifact, extDep.exclusions, + extDep.runtimeNode.getRepositories()); if (deploymentNode.getChildren().isEmpty()) { throw new BootstrapDependencyProcessingException( "Failed to collect dependencies of " + deploymentNode.getArtifact() @@ -592,27 +670,66 @@ private boolean replaceRuntimeBranch(ExtensionDependency extNode, List exclusions) { + private DependencyNode collectDependencies(Artifact artifact, Collection exclusions, + List repos) { + final CollectRequest request; + if (managedDeps.isEmpty()) { + request = new CollectRequest() + .setRoot(new Dependency(artifact, JavaScopes.COMPILE, false, exclusions)) + .setRepositories(repos); + } else { + final ArtifactDescriptorResult descr; + try { + descr = resolver.resolveDescriptor(artifact, repos); + } catch (BootstrapMavenException e) { + throw new DeploymentInjectionException("Failed to resolve descriptor for " + artifact, e); + } + final List mergedManagedDeps = new ArrayList<>( + managedDeps.size() + descr.getManagedDependencies().size()); + final Map managedVersions = new HashMap<>(managedDeps.size()); + for (Dependency dep : managedDeps) { + managedVersions.put(DependencyUtils.getKey(dep.getArtifact()), dep.getArtifact().getVersion()); + mergedManagedDeps.add(dep); + } + for (Dependency dep : descr.getManagedDependencies()) { + final ArtifactKey key = DependencyUtils.getKey(dep.getArtifact()); + if (!managedVersions.containsKey(key)) { + mergedManagedDeps.add(dep); + } + } + + var directDeps = DependencyUtils.mergeDeps(List.of(), descr.getDependencies(), managedVersions, + Set.of(JavaScopes.PROVIDED, JavaScopes.TEST)); + + request = new CollectRequest() + .setDependencies(directDeps) + .setManagedDependencies(mergedManagedDeps) + .setRepositories(repos); + if (exclusions.isEmpty()) { + request.setRootArtifact(artifact); + } else { + request.setRoot(new Dependency(artifact, JavaScopes.COMPILE, false, exclusions)); + } + } try { - return managedDeps.isEmpty() - ? resolver.collectDependencies(artifact, List.of(), mainRepos, exclusions).getRoot() - : resolver - .collectManagedDependencies(artifact, List.of(), managedDeps, mainRepos, exclusions, - JavaScopes.TEST, JavaScopes.PROVIDED) - .getRoot(); - } catch (AppModelResolverException e) { - throw new DeploymentInjectionException(e); + return resolver.getSystem().collectDependencies(resolver.getSession(), request).getRoot(); + } catch (DependencyCollectionException e) { + throw new DeploymentInjectionException("Failed to collect dependencies for " + artifact, e); } } - private Artifact resolve(Artifact artifact) { + private Artifact resolve(Artifact artifact, List repos) { if (artifact.getFile() != null) { return artifact; } try { - return resolver.resolve(artifact).getArtifact(); - } catch (AppModelResolverException e) { - throw new DeploymentInjectionException(e); + return resolver.getSystem().resolveArtifact(resolver.getSession(), + new ArtifactRequest() + .setArtifact(artifact) + .setRepositories(repos)) + .getArtifact(); + } catch (ArtifactResolutionException e) { + throw new DeploymentInjectionException("Failed to resolve artifact " + artifact, e); } } @@ -655,7 +772,7 @@ private class ExtensionInfo { throw new BootstrapDependencyProcessingException("Extension descriptor from " + runtimeArtifact + " does not include " + BootstrapConstants.PROP_DEPLOYMENT_ARTIFACT); } - Artifact deploymentArtifact = DependencyUtils.toArtifact(value); + Artifact deploymentArtifact = toArtifact(value); if (deploymentArtifact.getVersion() == null || deploymentArtifact.getVersion().isEmpty()) { deploymentArtifact = deploymentArtifact.setVersion(runtimeArtifact.getVersion()); } @@ -667,7 +784,7 @@ private class ExtensionInfo { conditionalDeps = new Artifact[deps.length]; for (int i = 0; i < deps.length; ++i) { try { - conditionalDeps[i] = DependencyUtils.toArtifact(deps[i]); + conditionalDeps[i] = toArtifact(deps[i]); } catch (Exception e) { throw new BootstrapDependencyProcessingException( "Failed to parse conditional dependencies configuration of " + runtimeArtifact, e); @@ -746,23 +863,26 @@ private ConditionalDependency(ExtensionInfo info, ExtensionDependency dependent) ExtensionDependency getExtensionDependency() { if (dependency == null) { - final DefaultDependencyNode rtNode = new DefaultDependencyNode(new Dependency(info.runtimeArtifact, "runtime")); + final DefaultDependencyNode rtNode = new DefaultDependencyNode( + new Dependency(info.runtimeArtifact, JavaScopes.RUNTIME)); rtNode.setVersion(new BootstrapArtifactVersion(info.runtimeArtifact.getVersion())); rtNode.setVersionConstraint(new BootstrapArtifactVersionConstraint( new BootstrapArtifactVersion(info.runtimeArtifact.getVersion()))); + rtNode.setRepositories(dependent.runtimeNode.getRepositories()); dependency = new ExtensionDependency(info, rtNode, dependent.exclusions); } return dependency; } - void activate() throws BootstrapDependencyProcessingException { + void activate() { if (activated) { return; } activated = true; clearWalkingFlag(COLLECT_TOP_EXTENSION_RUNTIME_NODES); final ExtensionDependency extDep = getExtensionDependency(); - final DependencyNode originalNode = collectDependencies(info.runtimeArtifact, extDep.exclusions); + final DependencyNode originalNode = collectDependencies(info.runtimeArtifact, extDep.exclusions, + extDep.runtimeNode.getRepositories()); final DefaultDependencyNode rtNode = (DefaultDependencyNode) extDep.runtimeNode; rtNode.setRepositories(originalNode.getRepositories()); // if this node has conditional dependencies on its own, they may have been activated by this time @@ -777,7 +897,7 @@ void activate() throws BootstrapDependencyProcessingException { dependent.runtimeNode.getChildren().add(rtNode); } - boolean isSatisfied() throws BootstrapDependencyProcessingException { + boolean isSatisfied() { if (info.dependencyCondition == null) { return true; } @@ -797,21 +917,6 @@ private static boolean isSameKey(Artifact a1, Artifact a2) { && a2.getExtension().equals(a1.getExtension()); } - private static ArtifactKey getKey(Artifact a) { - return DependencyUtils.getKey(a); - } - - public static ResolvedDependencyBuilder toAppArtifact(Artifact artifact, WorkspaceModule module) { - return ResolvedDependencyBuilder.newInstance() - .setWorkspaceModule(module) - .setGroupId(artifact.getGroupId()) - .setArtifactId(artifact.getArtifactId()) - .setClassifier(artifact.getClassifier()) - .setType(artifact.getExtension()) - .setVersion(artifact.getVersion()) - .setResolvedPaths(artifact.getFile() == null ? PathList.empty() : PathList.of(artifact.getFile().toPath())); - } - private static String toCompactCoords(Artifact a) { final StringBuilder b = new StringBuilder(); b.append(a.getGroupId()).append(':').append(a.getArtifactId()).append(':'); diff --git a/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/BuildDependencyGraphVisitor.java b/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/BuildDependencyGraphVisitor.java index bb63e6f6f939d..025aa5413c781 100644 --- a/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/BuildDependencyGraphVisitor.java +++ b/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/BuildDependencyGraphVisitor.java @@ -3,6 +3,9 @@ */ package io.quarkus.bootstrap.resolver.maven; +import static io.quarkus.bootstrap.util.DependencyUtils.getKey; +import static io.quarkus.bootstrap.util.DependencyUtils.newDependencyBuilder; + import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; @@ -12,9 +15,6 @@ import org.eclipse.aether.graph.DependencyNode; import io.quarkus.bootstrap.model.ApplicationModelBuilder; -import io.quarkus.bootstrap.util.DependencyUtils; -import io.quarkus.bootstrap.workspace.WorkspaceModule; -import io.quarkus.maven.dependency.ArtifactKey; import io.quarkus.maven.dependency.DependencyFlags; public class BuildDependencyGraphVisitor { @@ -132,28 +132,7 @@ private void visitLeave(DependencyNode node) throws BootstrapMavenException { return; } if (currentRuntime == null && appBuilder.getDependency(getKey(node.getArtifact())) == null) { - - Artifact artifact = dep.getArtifact(); - if (artifact.getFile() == null) { - artifact = resolver.resolve(artifact, node.getRepositories()).getArtifact(); - } - - int flags = DependencyFlags.DEPLOYMENT_CP; - if (node.getDependency().isOptional()) { - flags |= DependencyFlags.OPTIONAL; - } - WorkspaceModule module = null; - if (resolver.getProjectModuleResolver() != null) { - module = resolver.getProjectModuleResolver().getProjectModule(artifact.getGroupId(), artifact.getArtifactId(), - artifact.getVersion()); - if (module != null) { - flags |= DependencyFlags.WORKSPACE_MODULE; - } - } - appBuilder.addDependency(ApplicationDependencyTreeResolver.toAppArtifact(artifact, module) - .setScope(node.getDependency().getScope()) - .setFlags(flags)); - + appBuilder.addDependency(newDependencyBuilder(node, resolver).setFlags(DependencyFlags.DEPLOYMENT_CP)); } else if (currentRuntime == node) { currentRuntime = null; runtimeArtifactToFind = null; @@ -162,8 +141,4 @@ private void visitLeave(DependencyNode node) throws BootstrapMavenException { currentDeployment = null; } } - - private static ArtifactKey getKey(Artifact artifact) { - return DependencyUtils.getKey(artifact); - } } diff --git a/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/util/DependencyUtils.java b/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/util/DependencyUtils.java index 41eac854f01cd..66998179e9e7c 100644 --- a/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/util/DependencyUtils.java +++ b/independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/util/DependencyUtils.java @@ -1,6 +1,5 @@ package io.quarkus.bootstrap.util; -import java.io.PrintWriter; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -12,9 +11,15 @@ import org.eclipse.aether.graph.Dependency; import org.eclipse.aether.graph.DependencyNode; +import io.quarkus.bootstrap.resolver.maven.BootstrapMavenException; +import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; +import io.quarkus.bootstrap.workspace.WorkspaceModule; import io.quarkus.maven.dependency.ArtifactCoords; import io.quarkus.maven.dependency.ArtifactKey; +import io.quarkus.maven.dependency.DependencyFlags; import io.quarkus.maven.dependency.GACTV; +import io.quarkus.maven.dependency.ResolvedDependencyBuilder; +import io.quarkus.paths.PathList; public class DependencyUtils { @@ -61,16 +66,13 @@ public static List mergeDeps(List dominant, List:[:|[::]]:"); } - public static void printTree(DependencyNode node) { - PrintWriter out = new PrintWriter(System.out); - try { - printTree(node, out); - } finally { - out.flush(); + public static ResolvedDependencyBuilder newDependencyBuilder(DependencyNode node, MavenArtifactResolver resolver) + throws BootstrapMavenException { + var artifact = node.getDependency().getArtifact(); + if (artifact.getFile() == null) { + artifact = resolver.resolve(artifact, node.getRepositories()).getArtifact(); } - } - - public static void printTree(DependencyNode node, PrintWriter out) { - out.println("Dependency tree for " + node.getArtifact()); - printTree(node, 0, out); - } - - private static void printTree(DependencyNode node, int depth, PrintWriter out) { - if (node.getArtifact() != null) { - for (int i = 0; i < depth; ++i) { - out.append(" "); - } - out.println(node.getArtifact()); + int flags = 0; + if (node.getDependency().isOptional()) { + flags |= DependencyFlags.OPTIONAL; } - for (DependencyNode c : node.getChildren()) { - printTree(c, depth + 1, out); + WorkspaceModule module = null; + if (resolver.getProjectModuleResolver() != null) { + module = resolver.getProjectModuleResolver().getProjectModule(artifact.getGroupId(), artifact.getArtifactId(), + artifact.getVersion()); + if (module != null) { + flags |= DependencyFlags.WORKSPACE_MODULE; + } } + return toAppArtifact(artifact, module) + .setScope(node.getDependency().getScope()) + .setFlags(flags); + } + + public static ResolvedDependencyBuilder toAppArtifact(Artifact artifact, WorkspaceModule module) { + return ResolvedDependencyBuilder.newInstance() + .setWorkspaceModule(module) + .setGroupId(artifact.getGroupId()) + .setArtifactId(artifact.getArtifactId()) + .setClassifier(artifact.getClassifier()) + .setType(artifact.getExtension()) + .setVersion(artifact.getVersion()) + .setResolvedPaths(artifact.getFile() == null ? PathList.empty() : PathList.of(artifact.getFile().toPath())); } } diff --git a/independent-projects/bootstrap/pom.xml b/independent-projects/bootstrap/pom.xml index 44220f01bd483..e1c6d7fa849eb 100644 --- a/independent-projects/bootstrap/pom.xml +++ b/independent-projects/bootstrap/pom.xml @@ -40,7 +40,7 @@ 3.1.6 - 3.24.2 + 3.25.1 0.9.5 3.5.3.Final 5.10.1 @@ -76,7 +76,7 @@ 2.1.2 1.3.2 8.5 - 0.0.9 + 0.0.10 0.1.3 2.23.0 1.9.0 diff --git a/independent-projects/qute/pom.xml b/independent-projects/qute/pom.xml index 080d7139ba1d7..90207449b7792 100644 --- a/independent-projects/qute/pom.xml +++ b/independent-projects/qute/pom.xml @@ -39,7 +39,7 @@ UTF-8 5.10.1 - 3.24.2 + 3.25.1 3.1.6 1.7.0 3.5.3.Final diff --git a/independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/StorkClientRequestFilter.java b/independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/StorkClientRequestFilter.java index 7083d96803940..60990009a9d88 100644 --- a/independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/StorkClientRequestFilter.java +++ b/independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/StorkClientRequestFilter.java @@ -7,6 +7,7 @@ import jakarta.annotation.Priority; import jakarta.ws.rs.Priorities; import jakarta.ws.rs.core.GenericType; +import jakarta.ws.rs.core.UriBuilder; import jakarta.ws.rs.ext.Provider; import org.jboss.logging.Logger; @@ -62,7 +63,7 @@ public void filter(ResteasyReactiveClientRequestContext requestContext) { } // Service instance can also contain an optional path. Optional path = instance.getPath(); - String actualPath = uri.getPath(); + String actualPath = uri.getRawPath(); if (path.isPresent()) { var p = path.get(); if (!p.startsWith("/")) { @@ -79,11 +80,12 @@ public void filter(ResteasyReactiveClientRequestContext requestContext) { } } } - + //To avoid the path double encoding we create uri with path=null and set the path after URI newUri = new URI(scheme, uri.getUserInfo(), host, port, - actualPath, uri.getQuery(), uri.getFragment()); - requestContext.setUri(newUri); + null, uri.getQuery(), uri.getFragment()); + URI build = UriBuilder.fromUri(newUri).path(actualPath).build(); + requestContext.setUri(build); if (measureTime && instance.gatherStatistics()) { requestContext.setCallStatsCollector(instance); } diff --git a/independent-projects/resteasy-reactive/pom.xml b/independent-projects/resteasy-reactive/pom.xml index 556ce3208b67b..e6cc83b7600f1 100644 --- a/independent-projects/resteasy-reactive/pom.xml +++ b/independent-projects/resteasy-reactive/pom.xml @@ -46,10 +46,10 @@ 4.0.1 3.1.6 - 1.12.12 + 1.14.7 5.10.1 3.9.6 - 3.24.2 + 3.25.1 3.5.3.Final 3.0.4.Final 2.1.1 @@ -72,6 +72,7 @@ 4.2.0 3.7.2 1.0.4 + 5.8.0 1.0.0 diff --git a/independent-projects/resteasy-reactive/server/runtime/pom.xml b/independent-projects/resteasy-reactive/server/runtime/pom.xml index 30c15219f74db..10a71b2f8e375 100644 --- a/independent-projects/resteasy-reactive/server/runtime/pom.xml +++ b/independent-projects/resteasy-reactive/server/runtime/pom.xml @@ -42,7 +42,12 @@ org.jboss.logging jboss-logging - + + org.mockito + mockito-core + ${mockito.version} + test + diff --git a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/handlers/MediaTypeMapper.java b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/handlers/MediaTypeMapper.java index 20d6749cc8299..054febcab4d5c 100644 --- a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/handlers/MediaTypeMapper.java +++ b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/handlers/MediaTypeMapper.java @@ -17,7 +17,6 @@ import org.jboss.resteasy.reactive.common.util.ServerMediaType; import org.jboss.resteasy.reactive.server.core.ResteasyReactiveRequestContext; import org.jboss.resteasy.reactive.server.mapping.RuntimeResource; -import org.jboss.resteasy.reactive.server.spi.ServerHttpRequest; import org.jboss.resteasy.reactive.server.spi.ServerRestHandler; /** @@ -100,12 +99,13 @@ public void handle(ResteasyReactiveRequestContext requestContext) throws Excepti public MediaType selectMediaType(ResteasyReactiveRequestContext requestContext, Holder holder) { MediaType selected = null; - ServerHttpRequest httpServerRequest = requestContext.serverRequest(); - if (httpServerRequest.containsRequestHeader(HttpHeaders.ACCEPT)) { + List accepts = requestContext.getHttpHeaders().getRequestHeader(HttpHeaders.ACCEPT); + for (String accept : accepts) { Map.Entry entry = holder.serverMediaType - .negotiateProduces(requestContext.serverRequest().getRequestHeader(HttpHeaders.ACCEPT), null); + .negotiateProduces(accept, null); if (entry.getValue() != null) { selected = entry.getValue(); + break; } } if (selected == null) { diff --git a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/jaxrs/SseBroadcasterImpl.java b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/jaxrs/SseBroadcasterImpl.java index 1c5a714415049..07b0d2801fb50 100644 --- a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/jaxrs/SseBroadcasterImpl.java +++ b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/jaxrs/SseBroadcasterImpl.java @@ -126,5 +126,7 @@ synchronized void fireClose(SseEventSinkImpl sseEventSink) { for (Consumer listener : onCloseListeners) { listener.accept(sseEventSink); } + if (!isClosed) + sinks.remove(sseEventSink); } } diff --git a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/jaxrs/SseEventSinkImpl.java b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/jaxrs/SseEventSinkImpl.java index 05280e20dc474..bce377a34da90 100644 --- a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/jaxrs/SseEventSinkImpl.java +++ b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/jaxrs/SseEventSinkImpl.java @@ -37,18 +37,19 @@ public CompletionStage send(OutboundSseEvent event) { @Override public synchronized void close() { - if (isClosed()) + if (closed) return; closed = true; - // FIXME: do we need a state flag? ServerHttpResponse response = context.serverResponse(); - if (!response.headWritten()) { - // make sure we send the headers if we're closing this sink before the - // endpoint method is over - SseUtil.setHeaders(context, response); + if (!response.closed()) { + if (!response.headWritten()) { + // make sure we send the headers if we're closing this sink before the + // endpoint method is over + SseUtil.setHeaders(context, response); + } + response.end(); + context.close(); } - response.end(); - context.close(); if (broadcaster != null) broadcaster.fireClose(this); } @@ -69,11 +70,8 @@ public void accept(Throwable throwable) { // I don't think we should be firing the exception on the broadcaster here } }); - // response.closeHandler(v -> { - // // FIXME: notify of client closing - // System.err.println("Server connection closed"); - // }); } + response.addCloseHandler(this::close); } void register(SseBroadcasterImpl broadcaster) { diff --git a/independent-projects/resteasy-reactive/server/runtime/src/test/java/org/jboss/resteasy/reactive/server/jaxrs/SseServerBroadcasterTests.java b/independent-projects/resteasy-reactive/server/runtime/src/test/java/org/jboss/resteasy/reactive/server/jaxrs/SseServerBroadcasterTests.java new file mode 100644 index 0000000000000..425fe72ba1781 --- /dev/null +++ b/independent-projects/resteasy-reactive/server/runtime/src/test/java/org/jboss/resteasy/reactive/server/jaxrs/SseServerBroadcasterTests.java @@ -0,0 +1,82 @@ +package org.jboss.resteasy.reactive.server.jaxrs; + +import static org.mockito.ArgumentMatchers.any; + +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicBoolean; + +import jakarta.ws.rs.sse.OutboundSseEvent; +import jakarta.ws.rs.sse.SseBroadcaster; + +import org.jboss.resteasy.reactive.server.core.ResteasyReactiveRequestContext; +import org.jboss.resteasy.reactive.server.core.SseUtil; +import org.jboss.resteasy.reactive.server.spi.ServerHttpResponse; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.MockedStatic; +import org.mockito.Mockito; + +public class SseServerBroadcasterTests { + + @Test + public void shouldCloseRegisteredSinksWhenClosingBroadcaster() { + OutboundSseEvent.Builder builder = SseImpl.INSTANCE.newEventBuilder(); + SseBroadcaster broadcaster = SseImpl.INSTANCE.newBroadcaster(); + SseEventSinkImpl sseEventSink = Mockito.spy(new SseEventSinkImpl(getMockContext())); + broadcaster.register(sseEventSink); + try (MockedStatic utilities = Mockito.mockStatic(SseUtil.class)) { + utilities.when(() -> SseUtil.send(any(), any(), any())).thenReturn(CompletableFuture.completedFuture(null)); + broadcaster.broadcast(builder.data("test").build()); + broadcaster.close(); + Mockito.verify(sseEventSink).close(); + } + } + + @Test + public void shouldNotSendToClosedSink() { + OutboundSseEvent.Builder builder = SseImpl.INSTANCE.newEventBuilder(); + SseBroadcaster broadcaster = SseImpl.INSTANCE.newBroadcaster(); + SseEventSinkImpl sseEventSink = Mockito.spy(new SseEventSinkImpl(getMockContext())); + broadcaster.register(sseEventSink); + try (MockedStatic utilities = Mockito.mockStatic(SseUtil.class)) { + utilities.when(() -> SseUtil.send(any(), any(), any())).thenReturn(CompletableFuture.completedFuture(null)); + OutboundSseEvent sseEvent = builder.data("test").build(); + broadcaster.broadcast(sseEvent); + sseEventSink.close(); + broadcaster.broadcast(builder.data("should-not-be-sent").build()); + Mockito.verify(sseEventSink).send(sseEvent); + } + } + + @Test + public void shouldExecuteOnClose() { + // init broadcaster + SseBroadcaster broadcaster = SseImpl.INSTANCE.newBroadcaster(); + AtomicBoolean executed = new AtomicBoolean(false); + broadcaster.onClose(sink -> executed.set(true)); + // init sink + ResteasyReactiveRequestContext mockContext = getMockContext(); + SseEventSinkImpl sseEventSink = new SseEventSinkImpl(mockContext); + SseEventSinkImpl sinkSpy = Mockito.spy(sseEventSink); + broadcaster.register(sinkSpy); + try (MockedStatic utilities = Mockito.mockStatic(SseUtil.class)) { + utilities.when(() -> SseUtil.send(any(), any(), any())).thenReturn(CompletableFuture.completedFuture(null)); + // call to register onCloseHandler + ServerHttpResponse response = mockContext.serverResponse(); + sinkSpy.sendInitialResponse(response); + ArgumentCaptor closeHandler = ArgumentCaptor.forClass(Runnable.class); + Mockito.verify(response).addCloseHandler(closeHandler.capture()); + // run closeHandler to simulate closing context + closeHandler.getValue().run(); + Assertions.assertTrue(executed.get()); + } + } + + private ResteasyReactiveRequestContext getMockContext() { + ResteasyReactiveRequestContext requestContext = Mockito.mock(ResteasyReactiveRequestContext.class); + ServerHttpResponse response = Mockito.mock(ServerHttpResponse.class); + Mockito.when(requestContext.serverResponse()).thenReturn(response); + return requestContext; + } +} diff --git a/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/matching/PreMatchAcceptInHeader.java b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/matching/PreMatchAcceptInHeader.java new file mode 100644 index 0000000000000..cfbd44a4795fc --- /dev/null +++ b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/matching/PreMatchAcceptInHeader.java @@ -0,0 +1,124 @@ +package org.jboss.resteasy.reactive.server.vertx.test.matching; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +import java.util.function.Supplier; + +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.container.ContainerRequestFilter; +import jakarta.ws.rs.container.PreMatching; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.MultivaluedMap; +import jakarta.ws.rs.ext.Provider; + +import org.jboss.resteasy.reactive.server.vertx.test.framework.ResteasyReactiveUnitTest; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +public class PreMatchAcceptInHeader { + + @RegisterExtension + static ResteasyReactiveUnitTest test = new ResteasyReactiveUnitTest() + .setArchiveProducer(new Supplier<>() { + @Override + public JavaArchive get() { + return ShrinkWrap.create(JavaArchive.class) + .addClass(PathSegmentTest.Resource.class); + } + }); + + @Test + void browserDefault() { + given().accept("text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8") + .when() + .get("test") + .then() + .statusCode(200) + .body(containsString("")); + } + + @Test + void text() { + given().accept("text/plain") + .when() + .get("test") + .then() + .statusCode(200) + .body(equalTo("test")); + } + + @Test + void html() { + given().accept("text/html") + .when() + .get("test") + .then() + .statusCode(200) + .body(equalTo("test")); + } + + @Test + void json() { + given().accept("application/json") + .when() + .get("test") + .then() + .statusCode(404); + } + + @Test + void setAcceptToTextInFilter() { + given().accept("application/json") + .header("x-set-accept-to-text", "true") + .when() + .get("test") + .then() + .statusCode(200) + .body(equalTo("test")); + } + + @Path("/test") + public static class Resource { + + @GET + @Produces(MediaType.TEXT_PLAIN) + public String text() { + return "text"; + } + + @GET + @Produces(MediaType.TEXT_HTML) + public String html() { + return """ + + + + + Hello World + + + """; + } + } + + @PreMatching + @Provider + public static class SetAcceptHeaderFilter implements ContainerRequestFilter { + + @Override + public void filter(ContainerRequestContext requestContext) { + MultivaluedMap headers = requestContext.getHeaders(); + if ("true".equals(headers.getFirst("x-set-accept-to-text"))) { + headers.putSingle(HttpHeaders.ACCEPT, MediaType.TEXT_PLAIN); + } + } + } +} diff --git a/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/sse/SseServerResource.java b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/sse/SseServerResource.java new file mode 100644 index 0000000000000..650abb0b21cc1 --- /dev/null +++ b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/sse/SseServerResource.java @@ -0,0 +1,108 @@ +package org.jboss.resteasy.reactive.server.vertx.test.sse; + +import java.time.Instant; +import java.util.Objects; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import jakarta.inject.Inject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.sse.OutboundSseEvent; +import jakarta.ws.rs.sse.Sse; +import jakarta.ws.rs.sse.SseBroadcaster; +import jakarta.ws.rs.sse.SseEventSink; + +import org.jboss.logging.Logger; + +@Path("sse") +public class SseServerResource { + private static SseBroadcaster sseBroadcaster; + + private static OutboundSseEvent.Builder eventBuilder; + private static CountDownLatch closeLatch; + private static CountDownLatch errorLatch; + + private static final Logger logger = Logger.getLogger(SseServerResource.class); + + @Inject + public SseServerResource(@Context Sse sse) { + logger.info("Initialized SseServerResource " + this.hashCode()); + if (Objects.isNull(eventBuilder)) { + eventBuilder = sse.newEventBuilder(); + } + if (Objects.isNull(sseBroadcaster)) { + sseBroadcaster = sse.newBroadcaster(); + logger.info("Initializing broadcaster " + sseBroadcaster.hashCode()); + sseBroadcaster.onClose(sseEventSink -> { + CountDownLatch latch = SseServerResource.getCloseLatch(); + logger.info(String.format("Called on close, counting down latch %s", latch.hashCode())); + latch.countDown(); + }); + sseBroadcaster.onError((sseEventSink, throwable) -> { + CountDownLatch latch = SseServerResource.getErrorLatch(); + logger.info(String.format("There was an error, counting down latch %s", latch.hashCode())); + latch.countDown(); + }); + } + } + + @GET + @Path("subscribe") + @Produces(MediaType.SERVER_SENT_EVENTS) + public void subscribe(@Context SseEventSink sseEventSink) { + logger.info(this.hashCode() + " /subscribe"); + setLatches(); + getSseBroadcaster().register(sseEventSink); + sseEventSink.send(eventBuilder.data(sseEventSink.hashCode()).build()); + } + + @POST + @Path("broadcast") + public Response broadcast() { + logger.info(this.hashCode() + " /broadcast"); + getSseBroadcaster().broadcast(eventBuilder.data(Instant.now()).build()); + return Response.ok().build(); + } + + @GET + @Path("onclose-callback") + public Response callback() throws InterruptedException { + logger.info(this.hashCode() + " /onclose-callback, waiting for latch " + closeLatch.hashCode()); + boolean onCloseWasCalled = closeLatch.await(10, TimeUnit.SECONDS); + return Response.ok(onCloseWasCalled).build(); + } + + @GET + @Path("onerror-callback") + public Response errorCallback() throws InterruptedException { + logger.info(this.hashCode() + " /onerror-callback, waiting for latch " + errorLatch.hashCode()); + boolean onErrorWasCalled = errorLatch.await(2, TimeUnit.SECONDS); + return Response.ok(onErrorWasCalled).build(); + } + + private static SseBroadcaster getSseBroadcaster() { + logger.info("using broadcaster " + sseBroadcaster.hashCode()); + return sseBroadcaster; + } + + public static void setLatches() { + closeLatch = new CountDownLatch(1); + errorLatch = new CountDownLatch(1); + logger.info(String.format("Setting latches: \n closeLatch: %s\n errorLatch: %s", + closeLatch.hashCode(), errorLatch.hashCode())); + } + + public static CountDownLatch getCloseLatch() { + return closeLatch; + } + + public static CountDownLatch getErrorLatch() { + return errorLatch; + } +} diff --git a/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/sse/SseServerTestCase.java b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/sse/SseServerTestCase.java new file mode 100644 index 0000000000000..fe9d00c42a5d8 --- /dev/null +++ b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/sse/SseServerTestCase.java @@ -0,0 +1,85 @@ +package org.jboss.resteasy.reactive.server.vertx.test.sse; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.client.ClientBuilder; +import jakarta.ws.rs.client.WebTarget; +import jakarta.ws.rs.sse.SseEventSource; + +import org.hamcrest.Matchers; +import org.jboss.resteasy.reactive.server.vertx.test.framework.ResteasyReactiveUnitTest; +import org.jboss.resteasy.reactive.server.vertx.test.simple.PortProviderUtil; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.restassured.RestAssured; + +public class SseServerTestCase { + + @RegisterExtension + static final ResteasyReactiveUnitTest config = new ResteasyReactiveUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(SseServerResource.class)); + + @Test + public void shouldCallOnCloseOnServer() throws InterruptedException { + System.out.println("####### shouldCallOnCloseOnServer"); + Client client = ClientBuilder.newBuilder().build(); + WebTarget target = client.target(PortProviderUtil.createURI("/sse/subscribe")); + try (SseEventSource sse = SseEventSource.target(target).build()) { + CountDownLatch openingLatch = new CountDownLatch(1); + List results = new CopyOnWriteArrayList<>(); + sse.register(event -> { + System.out.println("received data: " + event.readData()); + results.add(event.readData()); + openingLatch.countDown(); + }); + sse.open(); + Assertions.assertTrue(openingLatch.await(3, TimeUnit.SECONDS)); + Assertions.assertEquals(1, results.size()); + sse.close(); + System.out.println("called sse.close() from client"); + RestAssured.get("/sse/onclose-callback") + .then() + .statusCode(200) + .body(Matchers.equalTo("true")); + } + } + + @Test + public void shouldNotTryToSendToClosedSink() throws InterruptedException { + System.out.println("####### shouldNotTryToSendToClosedSink"); + Client client = ClientBuilder.newBuilder().build(); + WebTarget target = client.target(PortProviderUtil.createURI("/sse/subscribe")); + try (SseEventSource sse = SseEventSource.target(target).build()) { + CountDownLatch openingLatch = new CountDownLatch(1); + List results = new ArrayList<>(); + sse.register(event -> { + System.out.println("received data: " + event.readData()); + results.add(event.readData()); + openingLatch.countDown(); + }); + sse.open(); + Assertions.assertTrue(openingLatch.await(3, TimeUnit.SECONDS)); + Assertions.assertEquals(1, results.size()); + sse.close(); + RestAssured.get("/sse/onclose-callback") + .then() + .statusCode(200) + .body(Matchers.equalTo("true")); + RestAssured.post("/sse/broadcast") + .then() + .statusCode(200); + RestAssured.get("/sse/onerror-callback") + .then() + .statusCode(200) + .body(Matchers.equalTo("false")); + } + } +} diff --git a/independent-projects/tools/base-codestarts/src/main/resources/codestarts/quarkus-extension/code/integration-tests/java/integration-tests/pom.tpl.qute.xml b/independent-projects/tools/base-codestarts/src/main/resources/codestarts/quarkus-extension/code/integration-tests/java/integration-tests/pom.tpl.qute.xml index 4e3da1d372638..9237988338bab 100644 --- a/independent-projects/tools/base-codestarts/src/main/resources/codestarts/quarkus-extension/code/integration-tests/java/integration-tests/pom.tpl.qute.xml +++ b/independent-projects/tools/base-codestarts/src/main/resources/codestarts/quarkus-extension/code/integration-tests/java/integration-tests/pom.tpl.qute.xml @@ -25,7 +25,7 @@ io.quarkus - quarkus-resteasy + quarkus-resteasy-reactive {group-id} diff --git a/independent-projects/tools/pom.xml b/independent-projects/tools/pom.xml index 97d342a3d88d6..e280b72b1ab6d 100644 --- a/independent-projects/tools/pom.xml +++ b/independent-projects/tools/pom.xml @@ -48,7 +48,7 @@ 4.4.0 - 3.24.2 + 3.25.1 2.16.1 4.0.1 5.10.1 diff --git a/integration-tests/gradle/pom.xml b/integration-tests/gradle/pom.xml index c434a33d00579..337ece09fe185 100644 --- a/integration-tests/gradle/pom.xml +++ b/integration-tests/gradle/pom.xml @@ -67,6 +67,11 @@ quarkus-devtools-testing test + + org.gradle + gradle-tooling-api + test + @@ -480,4 +485,14 @@ + + + gradle-dependencies + Gradle releases repository + https://repo.gradle.org/artifactory/libs-releases + + false + + + diff --git a/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/EnabledBuildItem.java b/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/EnabledBuildItem.java deleted file mode 100644 index f9235d0c8f051..0000000000000 --- a/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/EnabledBuildItem.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.acme.example.extension.deployment; - -import io.quarkus.builder.item.SimpleBuildItem; - - -import java.util.Optional; - -public final class EnabledBuildItem extends SimpleBuildItem { - - private final Boolean enabled; - - public EnabledBuildItem(final Boolean enabled){ - this.enabled=enabled; - } - - public Boolean getEnabled() { - return enabled; - } -} \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/QuarkusExampleProcessor.java b/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/QuarkusExampleProcessor.java index 1a4fb73d087ec..f8556db20ded4 100644 --- a/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/QuarkusExampleProcessor.java +++ b/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/QuarkusExampleProcessor.java @@ -1,15 +1,12 @@ package org.acme.example.extension.deployment; -import org.acme.example.extension.runtime.ExampleRecorder; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.ExecutionTime; -import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.arc.deployment.AdditionalBeanBuildItem; import org.acme.liba.LibA; import org.jboss.jandex.DotName; import io.quarkus.deployment.annotations.BuildProducer; -import org.acme.example.extension.deployment.EnabledBuildItem; import io.quarkus.arc.processor.DotNames; @@ -34,13 +31,4 @@ void addLibABean(BuildProducer additionalBeans) { .build()); } - - @BuildStep - @Record(ExecutionTime.STATIC_INIT) - EnabledBuildItem addLibABean( - final ExampleRecorder exampleRecorder) { - return new EnabledBuildItem(exampleRecorder.create().getValue()); - - } - } diff --git a/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/runtime/src/main/java/org/acme/example/extension/runtime/ExampleRecorder.java b/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/runtime/src/main/java/org/acme/example/extension/runtime/ExampleRecorder.java deleted file mode 100644 index c899e0f1790ca..0000000000000 --- a/integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/runtime/src/main/java/org/acme/example/extension/runtime/ExampleRecorder.java +++ /dev/null @@ -1,27 +0,0 @@ -package org.acme.example.extension.runtime; - -import io.quarkus.runtime.RuntimeValue; -import io.quarkus.runtime.annotations.Recorder; -import org.jboss.logmanager.formatters.PatternFormatter; - -import java.util.Optional; -import java.util.logging.Handler; -import java.util.logging.Level; - -@Recorder -public class ExampleRecorder { - - - private final QuarkusExampleExtensionConfig config; - - public ExampleRecorder(QuarkusExampleExtensionConfig config){ - this.config=config; - } - - public RuntimeValue create() { - boolean enabled = config.enabled; - - return new RuntimeValue<>(enabled); - - } -} diff --git a/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/build.gradle b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/build.gradle new file mode 100644 index 0000000000000..66eaa65715c90 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/build.gradle @@ -0,0 +1,16 @@ +plugins { +} + +repositories { + mavenLocal { + content { + includeGroupByRegex 'io.quarkus.*' + includeGroup 'org.hibernate.orm' + } + } + mavenCentral() +} + +group 'org.acme' +version '1.0.0-SNAPSHOT' + diff --git a/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/common/build.gradle b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/common/build.gradle new file mode 100644 index 0000000000000..1ffbddf6d0525 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/common/build.gradle @@ -0,0 +1,19 @@ +plugins { + id 'java-library' +} + +group 'org.acme' +version '1.0.0-SNAPSHOT' + +compileJava { + options.encoding = 'UTF-8' + options.compilerArgs << '-parameters' +} + +compileTestJava { + options.encoding = 'UTF-8' +} + +test { + systemProperty "java.util.logging.manager", "org.jboss.logmanager.LogManager" +} diff --git a/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/common/src/main/java/org/acme/common/Common.java b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/common/src/main/java/org/acme/common/Common.java new file mode 100644 index 0000000000000..f404475fac245 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/common/src/main/java/org/acme/common/Common.java @@ -0,0 +1,5 @@ +package org.acme.common; + + +public class Common { +} diff --git a/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/componly/build.gradle b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/componly/build.gradle new file mode 100644 index 0000000000000..7963961b03fcc --- /dev/null +++ b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/componly/build.gradle @@ -0,0 +1,23 @@ +plugins { + id 'java-library' +} + +dependencies { + implementation project(':common') +} + +group 'org.acme' +version '1.0.0-SNAPSHOT' + +compileJava { + options.encoding = 'UTF-8' + options.compilerArgs << '-parameters' +} + +compileTestJava { + options.encoding = 'UTF-8' +} + +test { + systemProperty "java.util.logging.manager", "org.jboss.logmanager.LogManager" +} diff --git a/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/componly/src/main/java/org/acme/componly/Componly.java b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/componly/src/main/java/org/acme/componly/Componly.java new file mode 100644 index 0000000000000..734895eb3bfc3 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/componly/src/main/java/org/acme/componly/Componly.java @@ -0,0 +1,6 @@ +package org.acme.componly; + + +public class Componly { + +} diff --git a/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/gradle.properties b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/gradle.properties new file mode 100644 index 0000000000000..8f063b7d88ba4 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/gradle.properties @@ -0,0 +1,2 @@ +quarkusPlatformArtifactId=quarkus-bom +quarkusPlatformGroupId=io.quarkus \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/quarkus/build.gradle b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/quarkus/build.gradle new file mode 100644 index 0000000000000..3b4c931109603 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/quarkus/build.gradle @@ -0,0 +1,42 @@ +plugins { + id 'java' + id 'io.quarkus' +} + +repositories { + mavenLocal { + content { + includeGroupByRegex 'io.quarkus.*' + } + } + mavenCentral() +} + +dependencies { + implementation enforcedPlatform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") + implementation 'io.quarkus:quarkus-resteasy' + + implementation project(':common') + + compileOnly project(':componly') +} + +group 'org.acme' +version '1.0.0-SNAPSHOT' + +compileJava { + options.encoding = 'UTF-8' + options.compilerArgs << '-parameters' +} + +compileTestJava { + options.encoding = 'UTF-8' +} + +test { + systemProperty "java.util.logging.manager", "org.jboss.logmanager.LogManager" +} + +test { + systemProperty "java.util.logging.manager", "org.jboss.logmanager.LogManager" +} \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/quarkus/src/main/java/org/acme/app/ExampleResource.java b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/quarkus/src/main/java/org/acme/app/ExampleResource.java new file mode 100644 index 0000000000000..75403e07af53d --- /dev/null +++ b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/quarkus/src/main/java/org/acme/app/ExampleResource.java @@ -0,0 +1,16 @@ +package org.acme.app; + +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; + +@Path("/hello") +public class ExampleResource { + + @GET + @Produces(MediaType.TEXT_PLAIN) + public String hello() { + return "hello!"; + } +} diff --git a/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/settings.gradle b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/settings.gradle new file mode 100644 index 0000000000000..a393880f63bce --- /dev/null +++ b/integration-tests/gradle/src/main/resources/compile-only-dependency-flags/settings.gradle @@ -0,0 +1,20 @@ +pluginManagement { + repositories { + mavenLocal { + content { + includeGroupByRegex 'io.quarkus.*' + includeGroup 'org.hibernate.orm' + } + } + mavenCentral() + gradlePluginPortal() + } + plugins { + id 'io.quarkus' version "${quarkusPluginVersion}" + } +} + +include ':quarkus', ':componly', ':common' + +rootProject.name = 'code-with-quarkus' + diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/build.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/build.gradle new file mode 100644 index 0000000000000..0a61da24e93a0 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/build.gradle @@ -0,0 +1,30 @@ +plugins{ + id "java" + id "io.quarkus" +} + + + +group 'io.quarkus.test.application' +version '1.0-SNAPSHOT' + + +repositories { + mavenLocal() + mavenCentral() +} + +dependencies { + implementation enforcedPlatform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") + testImplementation 'org.junit.jupiter:junit-jupiter-api' + testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine' + implementation 'io.quarkus:quarkus-resteasy' + implementation ('org.acme.libs:libraryB') + implementation ('org.acme.libs:libraryA') + implementation ('org.acme.extensions:another-example-extension') + +} + +test { + useJUnitPlatform() +} diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/gradle.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/gradle.properties new file mode 100644 index 0000000000000..ec2b6ef199c2c --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/gradle.properties @@ -0,0 +1,2 @@ +quarkusPlatformArtifactId=quarkus-bom +quarkusPlatformGroupId=io.quarkus diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/settings.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/settings.gradle new file mode 100644 index 0000000000000..f1dbf32c18c3f --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/settings.gradle @@ -0,0 +1,22 @@ +pluginManagement { + repositories { + mavenLocal { + content { + includeGroupByRegex 'io.quarkus.*' + includeGroup 'org.hibernate.orm' + } + } + mavenCentral() + gradlePluginPortal() + } + //noinspection GroovyAssignabilityCheck + plugins { + id 'io.quarkus' version "${quarkusPluginVersion}" + } +} + +includeBuild('../libraries') +includeBuild('../extensions/example-extension') +includeBuild('../extensions/another-example-extension') + +rootProject.name='application' diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/src/main/java/org/acme/quarkus/sample/HelloResource.java b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/src/main/java/org/acme/quarkus/sample/HelloResource.java new file mode 100644 index 0000000000000..69c983c7cdfa3 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/src/main/java/org/acme/quarkus/sample/HelloResource.java @@ -0,0 +1,29 @@ +package org.acme.quarkus.sample; + +import jakarta.inject.Inject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; + +import org.acme.libb.LibB; +import org.acme.liba.LibA; +import org.acme.example.extension.runtime.QuarkusExampleExtensionConfig; + +@Path("/hello") +public class HelloResource { + + @Inject + LibB libB; + @Inject + LibA libA; + + @Inject + private QuarkusExampleExtensionConfig config; + + @GET + @Produces(MediaType.TEXT_PLAIN) + public String hello() { + return "hello from " + libB.getName()+" and "+libA.getName()+" extension enabled: "+config.enabled; + } +} diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/src/main/resources/META-INF/resources/index.html b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/src/main/resources/META-INF/resources/index.html new file mode 100644 index 0000000000000..eadceb1f2f9c8 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/src/main/resources/META-INF/resources/index.html @@ -0,0 +1,155 @@ + + + + + my-quarkus-project - 1.0-SNAPSHOT + + + + +

+ +
+
+

Congratulations, you have created a new Quarkus application.

+ +

Why do you see this?

+ +

This page is served by Quarkus. The source is in + src/main/resources/META-INF/resources/index.html.

+ +

What can I do from here?

+ +

If not already done, run the application in dev mode using: mvn compile quarkus:dev. +

+
    +
  • Add REST resources, Servlets, functions and other services in src/main/java.
  • +
  • Your static assets are located in src/main/resources/META-INF/resources.
  • +
  • Configure your application in src/main/resources/application.properties. +
  • +
+ +

Do you like Quarkus?

+

Go give it a star on GitHub.

+ +

How do I get rid of this page?

+

Just delete the src/main/resources/META-INF/resources/index.html file.

+
+
+
+

Application

+
    +
  • GroupId: org.acme.quarkus.sample
  • +
  • ArtifactId: my-quarkus-project
  • +
  • Version: 1.0-SNAPSHOT
  • +
  • Quarkus Version: 999-SNAPSHOT
  • +
+
+ +
+
+ + + + \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/src/main/resources/application.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/src/main/resources/application.properties new file mode 100644 index 0000000000000..cbd08285d7bc9 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/application/src/main/resources/application.properties @@ -0,0 +1,4 @@ +# Configuration file +# key = value +quarkus.example.extension.enabled=false +quarkus.anotherExample.extension.enabled=false diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/build.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/build.gradle new file mode 100644 index 0000000000000..464a421fce2a9 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/build.gradle @@ -0,0 +1,34 @@ +plugins{ + id 'java-library' + id 'maven-publish' +} +subprojects {subProject-> + apply plugin: 'java-library' + apply plugin: 'maven-publish' + + group 'org.acme.extensions' + version '1.0-SNAPSHOT' + publishing { + publications { + maven(MavenPublication) { + groupId = 'org.acme.extensions' + artifactId = subProject.name + version = '1.0-SNAPSHOT' + from components.java + } + } + } +} + +publishing { + publications { + maven(MavenPublication) { + groupId = 'org.acme.extensions' + artifactId = rootProject.name + version = '1.0-SNAPSHOT' + from components.java + } + } +} +group 'org.acme.extensions' +version '1.0-SNAPSHOT' \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/deployment/build.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/deployment/build.gradle new file mode 100644 index 0000000000000..2114fbe38a983 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/deployment/build.gradle @@ -0,0 +1,27 @@ +plugins { + id 'java' + id 'java-library' +} +repositories { + mavenLocal() + mavenCentral() +} + +dependencies { + implementation platform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") + annotationProcessor "io.quarkus:quarkus-extension-processor:${quarkusPlatformVersion}" + + + api project(':another-example-extension') // why: https://quarkus.io/guides/building-my-first-extension + implementation 'io.quarkus:quarkus-core-deployment' + implementation 'io.quarkus:quarkus-arc-deployment' + implementation ('org.acme.libs:libraryB') + + testImplementation 'io.quarkus:quarkus-smallrye-health' +} + +java { + // withJavadocJar() + withSourcesJar() +} + diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/deployment/src/main/java/org/acme/anotherExample/extension/deployment/QuarkusAnotherExampleProcessor.java b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/deployment/src/main/java/org/acme/anotherExample/extension/deployment/QuarkusAnotherExampleProcessor.java new file mode 100644 index 0000000000000..71f1b1ade740d --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/deployment/src/main/java/org/acme/anotherExample/extension/deployment/QuarkusAnotherExampleProcessor.java @@ -0,0 +1,32 @@ +package org.acme.anotherExample.extension.deployment; + +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.FeatureBuildItem; +import io.quarkus.arc.deployment.AdditionalBeanBuildItem; +import org.acme.libb.LibB; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.arc.processor.DotNames; + + + + + +class QuarkusAnotherExampleProcessor { + + private static final String FEATURE = "another-example"; + + @BuildStep + FeatureBuildItem feature() { + return new FeatureBuildItem(FEATURE); + } + + @BuildStep + void addLibABean(BuildProducer additionalBeans) { + additionalBeans.produce(new AdditionalBeanBuildItem.Builder() + .addBeanClasses(LibB.class) + .setUnremovable() + .setDefaultScope(DotNames.APPLICATION_SCOPED) + .build()); + } + +} diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/deployment/src/test/resources/application.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/deployment/src/test/resources/application.properties new file mode 100644 index 0000000000000..d1b1b92a901b0 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/deployment/src/test/resources/application.properties @@ -0,0 +1 @@ +quarkus.log.level=INFO \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/gradle.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/gradle.properties new file mode 100644 index 0000000000000..ec2b6ef199c2c --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/gradle.properties @@ -0,0 +1,2 @@ +quarkusPlatformArtifactId=quarkus-bom +quarkusPlatformGroupId=io.quarkus diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/runtime/build.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/runtime/build.gradle new file mode 100644 index 0000000000000..682b8101db8d2 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/runtime/build.gradle @@ -0,0 +1,24 @@ + +plugins { + id 'io.quarkus.extension' +} + +quarkusExtension { + deploymentModule = 'another-example-extension-deployment' +} + +repositories { + mavenLocal() + mavenCentral() +} + +dependencies { + implementation platform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") + implementation ('org.acme.libs:libraryB') + annotationProcessor "io.quarkus:quarkus-extension-processor:${quarkusPlatformVersion}" + implementation 'io.quarkus:quarkus-core' + implementation 'io.quarkus:quarkus-arc' + + api ('org.acme.extensions:example-extension') +} + diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/runtime/src/main/java/org/acme/anotherExample/extension/runtime/QuarkusAnotherExampleExtensionConfig.java b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/runtime/src/main/java/org/acme/anotherExample/extension/runtime/QuarkusAnotherExampleExtensionConfig.java new file mode 100644 index 0000000000000..58ccaa8fe8560 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/runtime/src/main/java/org/acme/anotherExample/extension/runtime/QuarkusAnotherExampleExtensionConfig.java @@ -0,0 +1,16 @@ +package org.acme.anotherExample.extension.runtime; + +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +@ConfigRoot(phase = ConfigPhase.RUN_TIME, name = "anotherExample.extension") +public class QuarkusAnotherExampleExtensionConfig { + + /** + * A Simple example flag + */ + @ConfigItem(name = "enabled", defaultValue = "false") + public boolean enabled; + +} diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/runtime/src/test/resources/application.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/runtime/src/test/resources/application.properties new file mode 100644 index 0000000000000..d1b1b92a901b0 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/runtime/src/test/resources/application.properties @@ -0,0 +1 @@ +quarkus.log.level=INFO \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/settings.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/settings.gradle new file mode 100644 index 0000000000000..312c984bd69dd --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/another-example-extension/settings.gradle @@ -0,0 +1,23 @@ +pluginManagement { + repositories { + gradlePluginPortal() + mavenLocal() + } + plugins { + id 'io.quarkus.extension' version "${quarkusPluginVersion}" + } +} +dependencyResolutionManagement { + repositories { + mavenLocal() + mavenCentral() + } + +} +includeBuild('../../libraries') +includeBuild('../example-extension') +rootProject.name = 'another-example-extension-parent' +include(':deployment') +include(':runtime') +project(':deployment').name='another-example-extension-deployment' +project(':runtime').name='another-example-extension' diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/build.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/build.gradle new file mode 100644 index 0000000000000..464a421fce2a9 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/build.gradle @@ -0,0 +1,34 @@ +plugins{ + id 'java-library' + id 'maven-publish' +} +subprojects {subProject-> + apply plugin: 'java-library' + apply plugin: 'maven-publish' + + group 'org.acme.extensions' + version '1.0-SNAPSHOT' + publishing { + publications { + maven(MavenPublication) { + groupId = 'org.acme.extensions' + artifactId = subProject.name + version = '1.0-SNAPSHOT' + from components.java + } + } + } +} + +publishing { + publications { + maven(MavenPublication) { + groupId = 'org.acme.extensions' + artifactId = rootProject.name + version = '1.0-SNAPSHOT' + from components.java + } + } +} +group 'org.acme.extensions' +version '1.0-SNAPSHOT' \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/deployment/build.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/deployment/build.gradle new file mode 100644 index 0000000000000..6afc9cc3b84d7 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/deployment/build.gradle @@ -0,0 +1,27 @@ +plugins { + id 'java' + id 'java-library' +} +repositories { + mavenLocal() + mavenCentral() +} + +dependencies { + implementation platform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") + annotationProcessor "io.quarkus:quarkus-extension-processor:${quarkusPlatformVersion}" + + + api project(':example-extension') // why: https://quarkus.io/guides/building-my-first-extension + implementation 'io.quarkus:quarkus-core-deployment' + implementation 'io.quarkus:quarkus-arc-deployment' + implementation ('org.acme.libs:libraryA') + + testImplementation 'io.quarkus:quarkus-smallrye-health' +} + +java { + // withJavadocJar() + withSourcesJar() +} + diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/QuarkusExampleProcessor.java b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/QuarkusExampleProcessor.java new file mode 100644 index 0000000000000..f8556db20ded4 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/QuarkusExampleProcessor.java @@ -0,0 +1,34 @@ +package org.acme.example.extension.deployment; + +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.ExecutionTime; +import io.quarkus.deployment.builditem.FeatureBuildItem; +import io.quarkus.arc.deployment.AdditionalBeanBuildItem; +import org.acme.liba.LibA; +import org.jboss.jandex.DotName; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.arc.processor.DotNames; + + + + + +class QuarkusExampleProcessor { + + private static final String FEATURE = "example"; + + @BuildStep + FeatureBuildItem feature() { + return new FeatureBuildItem(FEATURE); + } + + @BuildStep + void addLibABean(BuildProducer additionalBeans) { + additionalBeans.produce(new AdditionalBeanBuildItem.Builder() + .addBeanClasses(LibA.class) + .setUnremovable() + .setDefaultScope(DotNames.APPLICATION_SCOPED) + .build()); + } + +} diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/deployment/src/test/resources/application.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/deployment/src/test/resources/application.properties new file mode 100644 index 0000000000000..d1b1b92a901b0 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/deployment/src/test/resources/application.properties @@ -0,0 +1 @@ +quarkus.log.level=INFO \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/gradle.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/gradle.properties new file mode 100644 index 0000000000000..ec2b6ef199c2c --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/gradle.properties @@ -0,0 +1,2 @@ +quarkusPlatformArtifactId=quarkus-bom +quarkusPlatformGroupId=io.quarkus diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/build.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/build.gradle new file mode 100644 index 0000000000000..8a71ff331c8f6 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/build.gradle @@ -0,0 +1,22 @@ + +plugins { + id 'io.quarkus.extension' +} + +quarkusExtension { + deploymentModule = 'example-extension-deployment' +} + +repositories { + mavenLocal() + mavenCentral() +} + +dependencies { + implementation platform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") + implementation ('org.acme.libs:libraryA') + annotationProcessor "io.quarkus:quarkus-extension-processor:${quarkusPlatformVersion}" + implementation 'io.quarkus:quarkus-core' + implementation 'io.quarkus:quarkus-arc' +} + diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/main/java/org/acme/example/extension/runtime/QuarkusExampleExtensionConfig.java b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/main/java/org/acme/example/extension/runtime/QuarkusExampleExtensionConfig.java new file mode 100644 index 0000000000000..9b667a5b7e030 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/main/java/org/acme/example/extension/runtime/QuarkusExampleExtensionConfig.java @@ -0,0 +1,16 @@ +package org.acme.example.extension.runtime; + +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +@ConfigRoot(phase = ConfigPhase.RUN_TIME, name="example.extension") +public class QuarkusExampleExtensionConfig { + + /** + * A Simple example flag + */ + @ConfigItem(name = "enabled", defaultValue = "false") + public boolean enabled; + +} diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/main/resources/META-INF/quarkus-extension.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/main/resources/META-INF/quarkus-extension.properties new file mode 100644 index 0000000000000..2e1a6326847e1 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/main/resources/META-INF/quarkus-extension.properties @@ -0,0 +1 @@ +deployment-artifact=org.acme.extensions\:example-extension-deployment\:1.0 \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/main/resources/META-INF/quarkus-extension.yaml new file mode 100644 index 0000000000000..12a5c710c9e82 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -0,0 +1,12 @@ +--- +name: Quarkus Example Extension +artifact: ${project.groupId}:${project.artifactId}:${project.version} +metadata: + config: + - "quarkus.example.extension." + keywords: + - "logzio" + - "logging" + categories: + - "logging" +description: "Quarkus example extension" \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/test/resources/application.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/test/resources/application.properties new file mode 100644 index 0000000000000..d1b1b92a901b0 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/runtime/src/test/resources/application.properties @@ -0,0 +1 @@ +quarkus.log.level=INFO \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/settings.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/settings.gradle new file mode 100644 index 0000000000000..04f14c2ebcefe --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/extensions/example-extension/settings.gradle @@ -0,0 +1,22 @@ +pluginManagement { + repositories { + gradlePluginPortal() + mavenLocal() + } + plugins { + id 'io.quarkus.extension' version "${quarkusPluginVersion}" + } +} +dependencyResolutionManagement { + repositories { + mavenLocal() + mavenCentral() + } + +} +includeBuild('../../libraries') +rootProject.name = 'example-extension-parent' +include(':deployment') +include(':runtime') +project(':deployment').name='example-extension-deployment' +project(':runtime').name='example-extension' \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/gradle.properties b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/gradle.properties new file mode 100644 index 0000000000000..8f063b7d88ba4 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/gradle.properties @@ -0,0 +1,2 @@ +quarkusPlatformArtifactId=quarkus-bom +quarkusPlatformGroupId=io.quarkus \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryA/build.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryA/build.gradle new file mode 100644 index 0000000000000..75702bcc346db --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryA/build.gradle @@ -0,0 +1,25 @@ +plugins{ + id "java-library" +} + + + +group 'org.acme.libs' +version '1.0-SNAPSHOT' + + +repositories { + mavenLocal() + mavenCentral() +} + +dependencies { + implementation platform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") + implementation ("${quarkusPlatformGroupId}:quarkus-arc:${quarkusPlatformVersion}") + testImplementation 'org.junit.jupiter:junit-jupiter-api' + testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine' +} + +test { + useJUnitPlatform() +} \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryA/src/main/java/org/acme/liba/LibA.java b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryA/src/main/java/org/acme/liba/LibA.java new file mode 100644 index 0000000000000..c53c4b62666a1 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryA/src/main/java/org/acme/liba/LibA.java @@ -0,0 +1,12 @@ +package org.acme.liba; + +import jakarta.enterprise.context.ApplicationScoped; + +@ApplicationScoped +public class LibA{ + + public String getName(){ + return "LibA"; + } + +} diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryA/src/main/resources/META-INF/beans.xml b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryA/src/main/resources/META-INF/beans.xml new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryB/build.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryB/build.gradle new file mode 100644 index 0000000000000..52e8e3a17fed5 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryB/build.gradle @@ -0,0 +1,26 @@ +plugins{ + id "java-library" +} + + + +group 'org.acme.libs' +version '1.0-SNAPSHOT' + + +repositories { + mavenLocal() + mavenCentral() +} + +dependencies { + implementation platform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") + implementation ("${quarkusPlatformGroupId}:quarkus-arc:${quarkusPlatformVersion}") + testImplementation 'org.junit.jupiter:junit-jupiter-api' + testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine' + implementation project(':libraryA') +} + +test { + useJUnitPlatform() +} \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryB/src/main/java/org/acme/libb/LibB.java b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryB/src/main/java/org/acme/libb/LibB.java new file mode 100644 index 0000000000000..b84b4a9ae4eb8 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryB/src/main/java/org/acme/libb/LibB.java @@ -0,0 +1,12 @@ +package org.acme.libb; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +@ApplicationScoped +public class LibB{ + + public String getName(){ + return "LibB"; + } +} diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryB/src/main/resources/META-INF/beans.xml b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/libraryB/src/main/resources/META-INF/beans.xml new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/settings.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/settings.gradle new file mode 100644 index 0000000000000..4516d648369e4 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/libraries/settings.gradle @@ -0,0 +1,20 @@ +pluginManagement { + repositories { + mavenLocal { + content { + includeGroupByRegex 'io.quarkus.*' + includeGroup 'org.hibernate.orm' + } + } + mavenCentral() + gradlePluginPortal() + } + //noinspection GroovyAssignabilityCheck + plugins { + id 'io.quarkus' version "${quarkusPluginVersion}" + } +} +rootProject.name='libraries' + +include('libraryA') +include('libraryB') diff --git a/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/settings.gradle b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/settings.gradle new file mode 100644 index 0000000000000..437f4c78b6645 --- /dev/null +++ b/integration-tests/gradle/src/main/resources/multi-composite-build-extensions-project/settings.gradle @@ -0,0 +1,21 @@ +pluginManagement { + repositories { + mavenLocal { + content { + includeGroupByRegex 'io.quarkus.*' + includeGroup 'org.hibernate.orm' + } + } + mavenCentral() + gradlePluginPortal() + } + //noinspection GroovyAssignabilityCheck + plugins { + id 'io.quarkus' version "${quarkusPluginVersion}" + } +} + +includeBuild('extensions/example-extension') +includeBuild('extensions/another-example-extension') +includeBuild('libraries') +includeBuild('application') diff --git a/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/deployment/settings.gradle b/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/deployment/settings.gradle deleted file mode 100644 index 6ffb501bf9d78..0000000000000 --- a/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/deployment/settings.gradle +++ /dev/null @@ -1 +0,0 @@ -rootProject.name='runtime-deployment' \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/runtime/settings.gradle b/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/runtime/settings.gradle deleted file mode 100644 index b8cd5218d74f2..0000000000000 --- a/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/runtime/settings.gradle +++ /dev/null @@ -1 +0,0 @@ -rootProject.name='runtime' \ No newline at end of file diff --git a/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/runtime/src/main/resources/META-INF/quarkus-extension.properties b/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/runtime/src/main/resources/META-INF/quarkus-extension.properties index 5a16b93d4d819..bb2fe5cca5b8a 100644 --- a/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/runtime/src/main/resources/META-INF/quarkus-extension.properties +++ b/integration-tests/gradle/src/main/resources/test-resources-in-build-steps/runtime/src/main/resources/META-INF/quarkus-extension.properties @@ -1,3 +1,3 @@ #Generated by extension-descriptor #Sat May 23 23:34:34 CEST 2020 -deployment-artifact=org.acme\:runtime-deployment\:1.0-SNAPSHOT +deployment-artifact=org.acme\:deployment\:1.0-SNAPSHOT diff --git a/integration-tests/gradle/src/test/java/io/quarkus/gradle/CompileOnlyDependencyFlagsTest.java b/integration-tests/gradle/src/test/java/io/quarkus/gradle/CompileOnlyDependencyFlagsTest.java new file mode 100644 index 0000000000000..13a55a7d19b1f --- /dev/null +++ b/integration-tests/gradle/src/test/java/io/quarkus/gradle/CompileOnlyDependencyFlagsTest.java @@ -0,0 +1,165 @@ +package io.quarkus.gradle; + +import java.io.File; +import java.util.*; +import java.util.concurrent.CompletableFuture; + +import org.gradle.tooling.BuildAction; +import org.gradle.tooling.BuildController; +import org.gradle.tooling.GradleConnectionException; +import org.gradle.tooling.GradleConnector; +import org.gradle.tooling.ProjectConnection; +import org.gradle.tooling.ResultHandler; +import org.gradle.wrapper.GradleUserHomeLookup; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import io.quarkus.bootstrap.model.ApplicationModel; +import io.quarkus.bootstrap.model.gradle.ModelParameter; +import io.quarkus.maven.dependency.ArtifactCoords; +import io.quarkus.maven.dependency.DependencyFlags; +import io.quarkus.runtime.LaunchMode; + +public class CompileOnlyDependencyFlagsTest { + + @Test + public void compileOnlyFlags() throws Exception { + var projectDir = QuarkusGradleTestBase.getProjectDir("compile-only-dependency-flags"); + + final String componly = ArtifactCoords.jar("org.acme", "componly", "1.0.0-SNAPSHOT").toCompactCoords(); + final String common = ArtifactCoords.jar("org.acme", "common", "1.0.0-SNAPSHOT").toCompactCoords(); + var expectedCompileOnly = Set.of(componly, common); + + final Map> compileOnlyDeps; + try (ProjectConnection connection = GradleConnector.newConnector() + .forProjectDirectory(new File(projectDir, "quarkus")) + .useGradleUserHomeDir(GradleUserHomeLookup.gradleUserHome()) + .connect()) { + final GradleActionOutcome>> outcome = GradleActionOutcome.of(); + connection.action((BuildAction>>) controller -> { + var result = new HashMap>(); + result.put(LaunchMode.DEVELOPMENT.name(), readCompileOnlyDeps(controller, LaunchMode.DEVELOPMENT.name())); + result.put(LaunchMode.TEST.name(), readCompileOnlyDeps(controller, LaunchMode.TEST.name())); + result.put(LaunchMode.NORMAL.name(), readCompileOnlyDeps(controller, LaunchMode.NORMAL.name())); + return result; + }).run(outcome); + compileOnlyDeps = outcome.getResult(); + } + + var compileOnly = compileOnlyDeps.get(LaunchMode.DEVELOPMENT.name()); + // the following line results in ClassNotFoundException: com.sun.jna.Library + // assertThat(compileOnly).containsOnlyKeys(expectedCompileOnly); + // so I am not using the assertj api here + assertEqual(compileOnly, expectedCompileOnly); + assertOnlyFlagsSet(common, compileOnly.get(common), + DependencyFlags.COMPILE_ONLY, + DependencyFlags.RUNTIME_CP, + DependencyFlags.DEPLOYMENT_CP, + DependencyFlags.RELOADABLE, + DependencyFlags.WORKSPACE_MODULE, + DependencyFlags.DIRECT); + assertOnlyFlagsSet(componly, compileOnly.get(componly), + DependencyFlags.COMPILE_ONLY, + DependencyFlags.RUNTIME_CP, + DependencyFlags.DEPLOYMENT_CP, + DependencyFlags.RELOADABLE, + DependencyFlags.WORKSPACE_MODULE, + DependencyFlags.DIRECT); + + compileOnly = compileOnlyDeps.get(LaunchMode.TEST.name()); + assertEqual(compileOnly, expectedCompileOnly); + assertOnlyFlagsSet(common, compileOnly.get(common), + DependencyFlags.COMPILE_ONLY, + DependencyFlags.RUNTIME_CP, + DependencyFlags.DEPLOYMENT_CP, + DependencyFlags.RELOADABLE, + DependencyFlags.WORKSPACE_MODULE, + DependencyFlags.DIRECT); + assertOnlyFlagsSet(componly, compileOnly.get(componly), + DependencyFlags.COMPILE_ONLY); + + compileOnly = compileOnlyDeps.get(LaunchMode.NORMAL.name()); + assertEqual(compileOnly, expectedCompileOnly); + assertOnlyFlagsSet(common, compileOnly.get(common), + DependencyFlags.COMPILE_ONLY, + DependencyFlags.RUNTIME_CP, + DependencyFlags.DEPLOYMENT_CP, + DependencyFlags.DIRECT); + assertOnlyFlagsSet(componly, compileOnly.get(componly), + DependencyFlags.COMPILE_ONLY); + } + + private static void assertOnlyFlagsSet(String coords, int flags, int... expectedFlags) { + int expected = 0; + for (var i : expectedFlags) { + expected |= i; + } + if (expected == flags) { + return; + } + StringBuilder sb = null; + for (var flag : expectedFlags) { + if ((flags & flag) != flag) { + if (sb == null) { + sb = new StringBuilder().append("Expected ").append(coords).append(" to have ").append(flag); + } else { + sb.append(", ").append(flag); + } + } + } + if (sb != null) { + Assertions.fail(sb.toString()); + } + Assertions.fail("Extra flags are set for " + coords + ": " + (flags - expected)); + } + + private static void assertEqual(Map compileOnly, Set expectedCompileOnly) { + if (!compileOnly.keySet().equals(expectedCompileOnly)) { + Assertions.fail("Expected " + expectedCompileOnly + " but got " + compileOnly.keySet()); + } + } + + private static Map readCompileOnlyDeps(BuildController controller, String modeName) { + var model = controller.getModel(ApplicationModel.class, ModelParameter.class, mode -> mode.setMode(modeName)); + var result = new HashMap(); + for (var d : model.getDependencies(DependencyFlags.COMPILE_ONLY)) { + result.put(ArtifactCoords.of( + d.getGroupId(), d.getArtifactId(), d.getClassifier(), d.getType(), d.getVersion()).toCompactCoords(), + d.getFlags()); + } + return result; + } + + public static class GradleActionOutcome implements ResultHandler { + + public static GradleActionOutcome of() { + return new GradleActionOutcome(); + } + + private CompletableFuture future = new CompletableFuture<>(); + private Exception error; + + public T getResult() { + try { + T result = future.get(); + if (error == null) { + return result; + } + } catch (Exception e) { + throw new RuntimeException("Failed to perform a Gradle action", e); + } + throw new RuntimeException("Failed to perform a Gradle action", error); + } + + @Override + public void onComplete(T result) { + future.complete(result); + } + + @Override + public void onFailure(GradleConnectionException failure) { + this.error = failure; + future.complete(null); + } + } +} diff --git a/integration-tests/gradle/src/test/java/io/quarkus/gradle/MultiCompositeBuildExtensionsQuarkusBuildTest.java b/integration-tests/gradle/src/test/java/io/quarkus/gradle/MultiCompositeBuildExtensionsQuarkusBuildTest.java new file mode 100644 index 0000000000000..90f2066b67b20 --- /dev/null +++ b/integration-tests/gradle/src/test/java/io/quarkus/gradle/MultiCompositeBuildExtensionsQuarkusBuildTest.java @@ -0,0 +1,69 @@ +package io.quarkus.gradle; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; + +import org.junit.jupiter.api.Test; + +public class MultiCompositeBuildExtensionsQuarkusBuildTest extends QuarkusGradleWrapperTestBase { + + @Test + public void testBasicMultiModuleBuild() throws Exception { + + final File projectDir = getProjectDir("multi-composite-build-extensions-project"); + + final File appProperties = new File(projectDir, "application/gradle.properties"); + final File libsProperties = new File(projectDir, "libraries/gradle.properties"); + final File extensionProperties = new File(projectDir, "extensions/example-extension/gradle.properties"); + final File anotherExtensionProperties = new File(projectDir, "extensions/another-example-extension/gradle.properties"); + + final Path projectProperties = projectDir.toPath().resolve("gradle.properties"); + + try { + Files.copy(projectProperties, appProperties.toPath(), StandardCopyOption.REPLACE_EXISTING); + Files.copy(projectProperties, libsProperties.toPath(), StandardCopyOption.REPLACE_EXISTING); + Files.copy(projectProperties, extensionProperties.toPath(), StandardCopyOption.REPLACE_EXISTING); + Files.copy(projectProperties, anotherExtensionProperties.toPath(), StandardCopyOption.REPLACE_EXISTING); + } catch (IOException e) { + throw new IllegalStateException("Unable to copy gradle.properties file", e); + } + + runGradleWrapper(projectDir, ":application:quarkusBuild"); + + final Path extension = projectDir.toPath().resolve("extensions").resolve("example-extension").resolve("runtime") + .resolve("build") + .resolve("libs"); + assertThat(extension).exists(); + assertThat(extension.resolve("example-extension-1.0-SNAPSHOT.jar")).exists(); + + final Path anotherExtension = projectDir.toPath().resolve("extensions").resolve("another-example-extension") + .resolve("runtime") + .resolve("build"); + + assertThat(anotherExtension).exists(); + assertThat(anotherExtension.resolve("resources/main/META-INF/quarkus-extension.yaml")).exists(); + + final Path libA = projectDir.toPath().resolve("libraries").resolve("libraryA").resolve("build").resolve("libs"); + assertThat(libA).exists(); + assertThat(libA.resolve("libraryA-1.0-SNAPSHOT.jar")).exists(); + + final Path libB = projectDir.toPath().resolve("libraries").resolve("libraryB").resolve("build").resolve("libs"); + assertThat(libB).exists(); + assertThat(libB.resolve("libraryB-1.0-SNAPSHOT.jar")).exists(); + + final Path applicationLib = projectDir.toPath().resolve("application").resolve("build").resolve("quarkus-app"); + assertThat(applicationLib.resolve("lib").resolve("main").resolve("org.acme.libs.libraryA-1.0-SNAPSHOT.jar")).exists(); + assertThat(applicationLib.resolve("lib").resolve("main").resolve("org.acme.libs.libraryB-1.0-SNAPSHOT.jar")).exists(); + assertThat(applicationLib.resolve("lib").resolve("main") + .resolve("org.acme.extensions.example-extension-1.0-SNAPSHOT.jar")).exists(); + assertThat(applicationLib.resolve("lib").resolve("main") + .resolve("org.acme.extensions.another-example-extension-1.0-SNAPSHOT.jar")).exists(); + + assertThat(applicationLib.resolve("app").resolve("application-1.0-SNAPSHOT.jar")).exists(); + } +} diff --git a/integration-tests/gradle/src/test/java/io/quarkus/gradle/QuarkusGradleWrapperTestBase.java b/integration-tests/gradle/src/test/java/io/quarkus/gradle/QuarkusGradleWrapperTestBase.java index 45d2a4d7d683a..6612582f07413 100644 --- a/integration-tests/gradle/src/test/java/io/quarkus/gradle/QuarkusGradleWrapperTestBase.java +++ b/integration-tests/gradle/src/test/java/io/quarkus/gradle/QuarkusGradleWrapperTestBase.java @@ -1,7 +1,11 @@ package io.quarkus.gradle; +import java.io.BufferedReader; +import java.io.BufferedWriter; import java.io.File; +import java.io.FileWriter; import java.io.IOException; +import java.lang.management.ManagementFactory; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; @@ -55,10 +59,17 @@ public BuildResult runGradleWrapper(boolean expectError, File projectDir, String public BuildResult runGradleWrapper(boolean expectError, File projectDir, boolean skipAnalytics, String... args) throws IOException, InterruptedException { + boolean isInCiPipeline = "true".equals(System.getenv("CI")); + setupTestCommand(); List command = new ArrayList<>(); command.add(getGradleWrapperCommand()); addSystemProperties(command); + + if (!isInCiPipeline && isDebuggerConnected()) { + command.add("-Dorg.gradle.debug=true"); + } + command.add("-Dorg.gradle.console=plain"); if (skipAnalytics) { command.add("-Dquarkus.analytics.disabled=true"); @@ -81,7 +92,6 @@ public BuildResult runGradleWrapper(boolean expectError, File projectDir, boolea .directory(projectDir) .command(command) .redirectInput(ProcessBuilder.Redirect.INHERIT) - .redirectOutput(logOutput) // Should prevent "fragmented" output (parts of stdout and stderr interleaved) .redirectErrorStream(true); if (System.getenv("GRADLE_JAVA_HOME") != null) { @@ -93,20 +103,39 @@ public BuildResult runGradleWrapper(boolean expectError, File projectDir, boolea } Process p = pb.start(); - //long timeout for native tests - //that may also need to download docker - boolean done = p.waitFor(10, TimeUnit.MINUTES); + Thread outputPuller = new Thread(new LogRedirectAndStopper(p, logOutput, !isInCiPipeline)); + outputPuller.setDaemon(true); + outputPuller.start(); + + boolean done; + + if (!isInCiPipeline && isDebuggerConnected()) { + p.waitFor(); + done = true; + } else { + //long timeout for native tests + //that may also need to download docker + done = p.waitFor(10, TimeUnit.MINUTES); + } + if (!done) { destroyProcess(p); } + + outputPuller.interrupt(); + outputPuller.join(); + final BuildResult commandResult = BuildResult.of(logOutput); int exitCode = p.exitValue(); // The test failed, if the Gradle build exits with != 0 and the tests expects no failure, or if the test // expects a failure and the exit code is 0. if (expectError == (exitCode == 0)) { - // Only print the output, if the test does not expect a failure. - printCommandOutput(projectDir, command, commandResult, exitCode); + if (isInCiPipeline) { + // Only print the output, if the test does not expect a failure. + printCommandOutput(projectDir, command, commandResult, exitCode); + } + // Fail hard, if the test does not expect a failure. Assertions.fail("Gradle build failed with exit code %d", exitCode); } @@ -175,4 +204,44 @@ private static void destroyProcess(Process wrapperProcess) { wrapperProcess.destroyForcibly(); } } + + private static boolean isDebuggerConnected() { + return ManagementFactory.getRuntimeMXBean().getInputArguments().toString().contains("jdwp"); + } + + private record LogRedirectAndStopper(Process process, File targetFile, Boolean forwardToStdOut) implements Runnable { + @Override + public void run() { + try (BufferedReader stdOutReader = process.inputReader(); + FileWriter fw = new FileWriter(targetFile); + BufferedWriter bw = new BufferedWriter(fw)) { + int errorCount = 0; + + while (!Thread.interrupted()) { + String line = stdOutReader.readLine(); + if (line == null) { + break; + } + + bw.write(line); + bw.newLine(); + + if (forwardToStdOut) { + System.out.println(line); + } + + if (line.contains("Build failure: Build failed due to errors")) { + errorCount++; + + if (errorCount >= 3) { + process.destroyForcibly(); + break; + } + } + } + } catch (IOException ignored) { + // ignored + } + } + } } diff --git a/integration-tests/gradle/src/test/java/io/quarkus/gradle/devmode/MultiCompositeBuildExtensionsDevModeTest.java b/integration-tests/gradle/src/test/java/io/quarkus/gradle/devmode/MultiCompositeBuildExtensionsDevModeTest.java new file mode 100644 index 0000000000000..e363802e6d9aa --- /dev/null +++ b/integration-tests/gradle/src/test/java/io/quarkus/gradle/devmode/MultiCompositeBuildExtensionsDevModeTest.java @@ -0,0 +1,63 @@ +package io.quarkus.gradle.devmode; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; + +import com.google.common.collect.ImmutableMap; + +public class MultiCompositeBuildExtensionsDevModeTest extends QuarkusDevGradleTestBase { + @Override + protected String projectDirectoryName() { + return "multi-composite-build-extensions-project"; + } + + @Override + protected String[] buildArguments() { + return new String[] { ":application:clean", ":application:quarkusDev" }; + } + + protected void testDevMode() throws Exception { + + assertThat(getHttpResponse()) + .contains("ready") + .contains("my-quarkus-project") + .contains("org.acme.quarkus.sample") + .contains("1.0-SNAPSHOT"); + + assertThat(getHttpResponse("/hello")).contains("hello from LibB and LibA extension enabled: false"); + + replace("libraries/libraryA/src/main/java/org/acme/liba/LibA.java", + ImmutableMap.of("return \"LibA\";", "return \"modifiedA\";")); + replace("libraries/libraryB/src/main/java/org/acme/libb/LibB.java", + ImmutableMap.of("return \"LibB\";", "return \"modifiedB\";")); + replace("application/src/main/resources/application.properties", + ImmutableMap.of("false", "true")); + + assertThat(getHttpResponse("/hello")).contains("hello from LibB and LibA extension enabled: true"); + } + + @Override + protected File getProjectDir() { + File projectDir = super.getProjectDir(); + final File appProperties = new File(projectDir, "application/gradle.properties"); + final File libsProperties = new File(projectDir, "libraries/gradle.properties"); + final File extensionProperties = new File(projectDir, "extensions/example-extension/gradle.properties"); + final File anotherExtensionProperties = new File(projectDir, "extensions/another-example-extension/gradle.properties"); + final Path projectProperties = projectDir.toPath().resolve("gradle.properties"); + + try { + Files.copy(projectProperties, appProperties.toPath(), StandardCopyOption.REPLACE_EXISTING); + Files.copy(projectProperties, libsProperties.toPath(), StandardCopyOption.REPLACE_EXISTING); + Files.copy(projectProperties, extensionProperties.toPath(), StandardCopyOption.REPLACE_EXISTING); + Files.copy(projectProperties, anotherExtensionProperties.toPath(), StandardCopyOption.REPLACE_EXISTING); + } catch (IOException e) { + throw new IllegalStateException("Unable to copy gradle.properties file", e); + } + return projectDir; + } +} diff --git a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/Book.java b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/Book.java new file mode 100644 index 0000000000000..9e6a156a01f83 --- /dev/null +++ b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/Book.java @@ -0,0 +1,92 @@ +package io.quarkus.it.hibernate.search.orm.elasticsearch.multitenancy.book; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.SequenceGenerator; +import jakarta.persistence.Table; + +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; + +@Entity +@Table(name = "books") +@Indexed +public class Book { + + @Id + @SequenceGenerator(name = "booksSequence", sequenceName = "books_id_seq", allocationSize = 1, initialValue = 10) + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "booksSequence") + private Integer id; + + @FullTextField + @Column(length = 40, unique = true) + private String name; + + public Book() { + } + + public Book(String name) { + this.name = name; + } + + public Book(Integer id, String name) { + this.id = id; + this.name = name; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + result = prime * result + ((name == null) ? 0 : name.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Book other = (Book) obj; + if (id == null) { + if (other.id != null) + return false; + } else if (!id.equals(other.id)) + return false; + if (name == null) { + if (other.name != null) + return false; + } else if (!name.equals(other.name)) + return false; + return true; + } + + @Override + public String toString() { + return "Book [id=" + id + ", name=" + name + "]"; + } + +} diff --git a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/BookResource.java b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/BookResource.java new file mode 100644 index 0000000000000..8844afa887c6a --- /dev/null +++ b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/BookResource.java @@ -0,0 +1,53 @@ +package io.quarkus.it.hibernate.search.orm.elasticsearch.multitenancy.book; + +import java.util.List; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import jakarta.persistence.EntityManager; +import jakarta.transaction.Transactional; +import jakarta.validation.constraints.NotNull; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Response; + +import org.hibernate.search.mapper.orm.session.SearchSession; + +import io.quarkus.hibernate.orm.PersistenceUnit; + +@ApplicationScoped +@Produces("application/json") +@Consumes("application/json") +@Path("/books") +public class BookResource { + + @Inject + @PersistenceUnit("books") + EntityManager entityManager; + @Inject + @PersistenceUnit("books") + SearchSession searchSession; + + @POST + @Path("/") + @Transactional + public Response create(@NotNull Book book) { + searchSession.indexingPlanFilter(context -> context.exclude(Book.class)); + entityManager.persist(book); + return Response.ok(book).status(Response.Status.CREATED).build(); + } + + @GET + @Path("/search") + @Transactional + public Response search(@NotNull @QueryParam("terms") String terms) { + List list = searchSession.search(Book.class) + .where(f -> f.simpleQueryString().field("name").matching(terms)) + .fetchAllHits(); + return Response.status(Response.Status.OK).entity(list).build(); + } +} diff --git a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/BookTenantResolver.java b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/BookTenantResolver.java new file mode 100644 index 0000000000000..04dda244ad824 --- /dev/null +++ b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/BookTenantResolver.java @@ -0,0 +1,26 @@ +package io.quarkus.it.hibernate.search.orm.elasticsearch.multitenancy.book; + +import java.util.concurrent.atomic.AtomicReference; + +import jakarta.enterprise.context.RequestScoped; + +import io.quarkus.hibernate.orm.PersistenceUnitExtension; +import io.quarkus.hibernate.orm.runtime.tenant.TenantResolver; + +@PersistenceUnitExtension("books") +@RequestScoped +public class BookTenantResolver implements TenantResolver { + + public static final AtomicReference TENANT_ID = new AtomicReference<>("company3"); + + @Override + public String getDefaultTenantId() { + return "base"; + } + + @Override + public String resolveTenantId() { + return TENANT_ID.get(); + } + +} diff --git a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/resources/application.properties b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/resources/application.properties index 98944ea31cc0e..5e2e31cb3b8ef 100644 --- a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/resources/application.properties +++ b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/resources/application.properties @@ -5,9 +5,22 @@ quarkus.flyway.clean-at-start=true quarkus.hibernate-orm.database.generation=none quarkus.hibernate-orm.multitenant=schema +quarkus.hibernate-orm.packages=io.quarkus.it.hibernate.search.orm.elasticsearch.multitenancy.fruit quarkus.hibernate-search-orm.elasticsearch.version=8 quarkus.hibernate-search-orm.elasticsearch.hosts=${elasticsearch.hosts:localhost:9200} quarkus.hibernate-search-orm.elasticsearch.protocol=${elasticsearch.protocol:http} quarkus.hibernate-search-orm.schema-management.strategy=drop-and-create-and-drop quarkus.hibernate-search-orm.indexing.plan.synchronization.strategy=sync + + +quarkus.hibernate-orm."books".datasource= +quarkus.hibernate-orm."books".database.generation=none +quarkus.hibernate-orm."books".multitenant=schema +quarkus.hibernate-orm."books".packages=io.quarkus.it.hibernate.search.orm.elasticsearch.multitenancy.book + +quarkus.hibernate-search-orm."books".elasticsearch.version=8 +quarkus.hibernate-search-orm."books".elasticsearch.hosts=${elasticsearch.hosts:localhost:9200} +quarkus.hibernate-search-orm."books".elasticsearch.protocol=${elasticsearch.protocol:http} +quarkus.hibernate-search-orm."books".schema-management.strategy=drop-and-create-and-drop +quarkus.hibernate-search-orm."books".indexing.plan.synchronization.strategy=sync \ No newline at end of file diff --git a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/resources/db/migration/V1.0.0__init.sql b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/resources/db/migration/V1.0.0__init.sql index 1d597c6d5b4e1..48eb1ed39149c 100644 --- a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/resources/db/migration/V1.0.0__init.sql +++ b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/resources/db/migration/V1.0.0__init.sql @@ -1,3 +1,9 @@ +DROP SCHEMA IF EXISTS "base" CASCADE; +DROP SCHEMA IF EXISTS "company1" CASCADE; +DROP SCHEMA IF EXISTS "company2" CASCADE; +DROP SCHEMA IF EXISTS "company3" CASCADE; +DROP SCHEMA IF EXISTS "company4" CASCADE; + CREATE SCHEMA "base"; CREATE TABLE "base".known_fruits ( @@ -21,3 +27,28 @@ CREATE TABLE "company2".known_fruits name VARCHAR(40) ); CREATE SEQUENCE "company2".known_fruits_id_seq START WITH 1; + +-- Books tables: + +CREATE TABLE "base".books +( + id INT, + name VARCHAR(40) +); +CREATE SEQUENCE "base".books_id_seq START WITH 1; + +CREATE SCHEMA "company3"; +CREATE TABLE "company3".books +( + id INT, + name VARCHAR(40) +); +CREATE SEQUENCE "company3".books_id_seq START WITH 1; + +CREATE SCHEMA "company4"; +CREATE TABLE "company4".books +( + id INT, + name VARCHAR(40) +); +CREATE SEQUENCE "company4".books_id_seq START WITH 1; diff --git a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/HibernateSearchTenancyReindexFunctionalityTest.java b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/HibernateSearchTenancyReindexFunctionalityTest.java new file mode 100644 index 0000000000000..e58ce5f3b628f --- /dev/null +++ b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/HibernateSearchTenancyReindexFunctionalityTest.java @@ -0,0 +1,82 @@ +package io.quarkus.it.hibernate.search.orm.elasticsearch.multitenancy.book; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; +import static org.hamcrest.Matchers.is; + +import java.util.List; +import java.util.Map; + +import jakarta.ws.rs.core.Response.Status; + +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Test; + +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.junit.QuarkusTestProfile; +import io.quarkus.test.junit.TestProfile; +import io.restassured.RestAssured; +import io.restassured.common.mapper.TypeRef; +import io.restassured.http.ContentType; +import io.restassured.response.Response; +import io.vertx.core.http.HttpHeaders; + +@QuarkusTest +@TestProfile(HibernateSearchTenancyReindexFunctionalityTest.Profile.class) +class HibernateSearchTenancyReindexFunctionalityTest { + public static final TypeRef> BOOK_LIST_TYPE_REF = new TypeRef<>() { + }; + + public static class Profile implements QuarkusTestProfile { + @Override + public Map getConfigOverrides() { + return Map.of("quarkus.management.enabled", "true", + "quarkus.hibernate-search-orm.management.enabled", "true"); + } + } + + @Test + void test() { + String tenant1Id = "company3"; + String tenant2Id = "company4"; + String bookName = "myBook"; + + Book book1 = new Book(bookName); + create(tenant1Id, book1); + assertThat(search(tenant1Id, bookName)).isEmpty(); + Book book2 = new Book(bookName); + create(tenant2Id, book2); + assertThat(search(tenant2Id, bookName)).isEmpty(); + + RestAssured.given() + .queryParam("wait_for", "finished") + .queryParam("persistence_unit", "books") + .header(HttpHeaders.CONTENT_TYPE.toString(), "application/json") + .body("{\"filter\": {\"tenants\": [\"" + tenant1Id + "\"], \"types\": [\"" + Book.class.getName() + "\"]}}") + .post("http://localhost:9001/q/hibernate-search/reindex") + .then().statusCode(200) + .body(Matchers.stringContainsInOrder("Reindexing started", "Reindexing succeeded")); + assertThat(search(tenant1Id, bookName)).hasSize(1); + assertThat(search(tenant2Id, bookName)).isEmpty(); + } + + private void create(String tenantId, Book book) { + BookTenantResolver.TENANT_ID.set(tenantId); + given().with().body(book).contentType(ContentType.JSON) + .when().post("/books") + .then() + .statusCode(is(Status.CREATED.getStatusCode())); + } + + private List search(String tenantId, String terms) { + BookTenantResolver.TENANT_ID.set(tenantId); + + Response response = given() + .when().get("/books/search?terms={terms}", terms); + if (response.getStatusCode() == Status.OK.getStatusCode()) { + return response.as(BOOK_LIST_TYPE_REF); + } + return List.of(); + } + +} diff --git a/integration-tests/hibernate-search-orm-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementTestResource.java b/integration-tests/hibernate-search-orm-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementTestResource.java new file mode 100644 index 0000000000000..131cd4f8ae09a --- /dev/null +++ b/integration-tests/hibernate-search-orm-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementTestResource.java @@ -0,0 +1,45 @@ +package io.quarkus.it.hibernate.search.orm.elasticsearch.management; + +import jakarta.inject.Inject; +import jakarta.transaction.Transactional; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; + +import org.hibernate.Session; +import org.hibernate.search.mapper.orm.session.SearchSession; + +@Path("/test/management") +public class HibernateSearchManagementTestResource { + + @Inject + Session session; + + @Inject + SearchSession searchSession; + + @PUT + @Path("/init-data") + @Transactional + public void initData() { + searchSession.indexingPlanFilter(context -> context.exclude(Object.class)); + session.persist(new ManagementTestEntity("name1")); + session.persist(new ManagementTestEntity("name2")); + session.persist(new ManagementTestEntity("name3")); + session.persist(new ManagementTestEntity("name4")); + session.persist(new ManagementTestEntity("name5")); + } + + @GET + @Path("/search-count") + @Produces(MediaType.TEXT_PLAIN) + @Transactional + public long testAnalysisConfigured() { + return searchSession.search(ManagementTestEntity.class) + .select(f -> f.id()) + .where(f -> f.matchAll()) + .fetchTotalHitCount(); + } +} diff --git a/integration-tests/hibernate-search-orm-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/ManagementTestEntity.java b/integration-tests/hibernate-search-orm-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/ManagementTestEntity.java new file mode 100644 index 0000000000000..9bc64a708e58e --- /dev/null +++ b/integration-tests/hibernate-search-orm-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/ManagementTestEntity.java @@ -0,0 +1,44 @@ +package io.quarkus.it.hibernate.search.orm.elasticsearch.management; + +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; + +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; + +@Entity +@Indexed +public class ManagementTestEntity { + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "managementSeq") + private Long id; + + @FullTextField + private String name; + + public ManagementTestEntity() { + } + + public ManagementTestEntity(String name) { + this.name = name; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/integration-tests/hibernate-search-orm-elasticsearch/src/main/resources/application.properties b/integration-tests/hibernate-search-orm-elasticsearch/src/main/resources/application.properties index 17baefd55b1cc..300811857879b 100644 --- a/integration-tests/hibernate-search-orm-elasticsearch/src/main/resources/application.properties +++ b/integration-tests/hibernate-search-orm-elasticsearch/src/main/resources/application.properties @@ -21,4 +21,9 @@ quarkus.hibernate-search-orm.indexing.plan.synchronization.strategy=sync # See io.quarkus.it.hibernate.search.orm.elasticsearch.devservices.HibernateSearchElasticsearchDevServicesEnabledImplicitlyTest.testHibernateSearch %test.quarkus.hibernate-search-orm.schema-management.strategy=drop-and-create %test.quarkus.hibernate-search-orm.elasticsearch.hosts=${elasticsearch.hosts:localhost:9200} -%test.quarkus.hibernate-search-orm.elasticsearch.protocol=${elasticsearch.protocol:http} \ No newline at end of file +%test.quarkus.hibernate-search-orm.elasticsearch.protocol=${elasticsearch.protocol:http} + +# we want to enable management so that we can access Hibernate Search management endpoints: +quarkus.management.enabled=true +# now enable the Hibernate Search management itself: +quarkus.hibernate-search-orm.management.enabled=true diff --git a/integration-tests/hibernate-search-orm-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementCustomUrlTest.java b/integration-tests/hibernate-search-orm-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementCustomUrlTest.java new file mode 100644 index 0000000000000..3b3ab1efa557f --- /dev/null +++ b/integration-tests/hibernate-search-orm-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementCustomUrlTest.java @@ -0,0 +1,34 @@ +package io.quarkus.it.hibernate.search.orm.elasticsearch.management; + +import java.util.Map; + +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Test; + +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.junit.QuarkusTestProfile; +import io.quarkus.test.junit.TestProfile; +import io.restassured.RestAssured; +import io.vertx.core.http.HttpHeaders; + +@QuarkusTest +@TestProfile(HibernateSearchManagementCustomUrlTest.Profile.class) +class HibernateSearchManagementCustomUrlTest { + + public static class Profile implements QuarkusTestProfile { + @Override + public Map getConfigOverrides() { + return Map.of("quarkus.hibernate-search-orm.management.root-path", "custom-reindex"); + } + } + + @Test + void simple() { + RestAssured.given() + .queryParam("wait_for", "finished") + .header(HttpHeaders.CONTENT_TYPE.toString(), "application/json") + .post("http://localhost:9001/q/custom-reindex/reindex") + .then().statusCode(200) + .body(Matchers.stringContainsInOrder("Reindexing started", "Reindexing succeeded")); + } +} diff --git a/integration-tests/hibernate-search-orm-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementIT.java b/integration-tests/hibernate-search-orm-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementIT.java new file mode 100644 index 0000000000000..d8d1dfbff19a3 --- /dev/null +++ b/integration-tests/hibernate-search-orm-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementIT.java @@ -0,0 +1,11 @@ +package io.quarkus.it.hibernate.search.orm.elasticsearch.management; + +import io.quarkus.test.junit.QuarkusIntegrationTest; + +@QuarkusIntegrationTest +public class HibernateSearchManagementIT extends HibernateSearchManagementTest { + @Override + protected String getPrefix() { + return "http://localhost:9000"; // ITs run in prod mode. + } +} diff --git a/integration-tests/hibernate-search-orm-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementTest.java b/integration-tests/hibernate-search-orm-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementTest.java new file mode 100644 index 0000000000000..0f6206bdd57bb --- /dev/null +++ b/integration-tests/hibernate-search-orm-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/management/HibernateSearchManagementTest.java @@ -0,0 +1,51 @@ +package io.quarkus.it.hibernate.search.orm.elasticsearch.management; + +import static org.hamcrest.Matchers.is; + +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Test; + +import io.quarkus.test.junit.QuarkusTest; +import io.restassured.RestAssured; +import io.vertx.core.http.HttpHeaders; + +@QuarkusTest +class HibernateSearchManagementTest { + + protected String getPrefix() { + return "http://localhost:9001"; + } + + @Test + void simple() { + RestAssured.given() + .queryParam("wait_for", "finished") + .header(HttpHeaders.CONTENT_TYPE.toString(), "application/json") + .post(getPrefix() + "/q/hibernate-search/reindex") + .then().statusCode(200) + .body(Matchers.stringContainsInOrder("Reindexing started", "Reindexing succeeded")); + } + + @Test + void specificTypeOnly() { + RestAssured.when().put("/test/management/init-data").then() + .statusCode(204); + + RestAssured.get("/test/management/search-count") + .then().statusCode(200) + .body(is("0")); + + RestAssured.given() + .queryParam("wait_for", "finished") + .header(HttpHeaders.CONTENT_TYPE.toString(), "application/json") + .body("{\"filter\": {\"types\": [\"" + ManagementTestEntity.class.getName() + "\"]}}") + .post(getPrefix() + "/q/hibernate-search/reindex") + .then().statusCode(200) + .body(Matchers.stringContainsInOrder("Reindexing started", "Reindexing succeeded")); + + RestAssured.get("/test/management/search-count") + .then().statusCode(200) + .body(is("5")); + } + +} diff --git a/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/BookIBANField.java b/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/BookIBANField.java new file mode 100644 index 0000000000000..5fc7a28bdcfa9 --- /dev/null +++ b/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/BookIBANField.java @@ -0,0 +1,24 @@ +package io.quarkus.it.jaxb; + +import jakarta.xml.bind.annotation.XmlAccessType; +import jakarta.xml.bind.annotation.XmlAccessorType; +import jakarta.xml.bind.annotation.XmlElement; +import jakarta.xml.bind.annotation.XmlTransient; + +@XmlAccessorType(XmlAccessType.FIELD) +@XmlTransient +public abstract class BookIBANField { + @XmlElement + private String IBAN; + + public BookIBANField() { + } + + public void setIBAN(String IBAN) { + this.IBAN = IBAN; + } + + public String getIBAN() { + return IBAN; + } +} diff --git a/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/BookWithParent.java b/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/BookWithParent.java new file mode 100644 index 0000000000000..d4d81b481e267 --- /dev/null +++ b/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/BookWithParent.java @@ -0,0 +1,27 @@ +package io.quarkus.it.jaxb; + +import jakarta.xml.bind.annotation.XmlElement; +import jakarta.xml.bind.annotation.XmlRootElement; +import jakarta.xml.bind.annotation.XmlType; + +@XmlRootElement +@XmlType(propOrder = { "IBAN", "title" }) +public class BookWithParent extends BookIBANField { + @XmlElement + private String title; + + public BookWithParent() { + } + + public BookWithParent(String title) { + this.title = title; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } +} diff --git a/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/JaxbResource.java b/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/JaxbResource.java index b03507a82089e..d6a9b8e386574 100644 --- a/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/JaxbResource.java +++ b/integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/JaxbResource.java @@ -65,4 +65,19 @@ public io.quarkus.it.jaxb.Response seeAlso() { return response; } + //Test for Jaxb with parent class field + @Path("/bookwithparent") + @GET + @Produces(MediaType.TEXT_PLAIN) + public String getBookWithParent(@QueryParam("name") String name, @QueryParam("iban") String iban) throws JAXBException { + BookWithParent bookWithParent = new BookWithParent(); + bookWithParent.setTitle(name); + bookWithParent.setIBAN(iban); + JAXBContext context = JAXBContext.newInstance(bookWithParent.getClass()); + Marshaller marshaller = context.createMarshaller(); + StringWriter sw = new StringWriter(); + marshaller.marshal(bookWithParent, sw); + return sw.toString(); + } + } diff --git a/integration-tests/jaxb/src/test/java/io/quarkus/it/jaxb/JaxbIT.java b/integration-tests/jaxb/src/test/java/io/quarkus/it/jaxb/JaxbIT.java index ae862b4a11e41..06611d4a0c081 100644 --- a/integration-tests/jaxb/src/test/java/io/quarkus/it/jaxb/JaxbIT.java +++ b/integration-tests/jaxb/src/test/java/io/quarkus/it/jaxb/JaxbIT.java @@ -1,8 +1,24 @@ package io.quarkus.it.jaxb; +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.is; + +import org.junit.jupiter.api.Test; + import io.quarkus.test.junit.QuarkusIntegrationTest; @QuarkusIntegrationTest public class JaxbIT extends JaxbTest { - + //We have to test native executable of Jaxb + @Test + public void bookWithParent() { + given().when() + .param("name", "Foundation") + .param("iban", "4242") + .get("/jaxb/bookwithparent") + .then() + .statusCode(200) + .body(is( + "4242Foundation")); + } } diff --git a/integration-tests/kafka-json-schema-apicurio2/pom.xml b/integration-tests/kafka-json-schema-apicurio2/pom.xml index 7fa3b388a9ff8..3a1713af0cc62 100644 --- a/integration-tests/kafka-json-schema-apicurio2/pom.xml +++ b/integration-tests/kafka-json-schema-apicurio2/pom.xml @@ -23,7 +23,7 @@ org.jetbrains.kotlin kotlin-scripting-compiler-embeddable - 1.6.0 + 1.9.22 org.json diff --git a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KnativeTest.java b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KnativeTest.java index b5dcac88d083d..f05d77f88b932 100644 --- a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KnativeTest.java +++ b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KnativeTest.java @@ -43,6 +43,7 @@ public void assertGeneratedResources() throws IOException { assertThat(s.getSpec()).satisfies(spec -> { assertThat(s.getMetadata()).satisfies(m -> { assertThat(m.getNamespace()).isNull(); + assertThat(m.getAnnotations().get("app.quarkus.io/quarkus-version")).isNotBlank(); }); assertThat(spec.getTemplate()).satisfies(template -> { diff --git a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KubernetesWithIdempotentTest.java b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KubernetesWithIdempotentTest.java index bb9eb39b57916..aa54c084165a2 100644 --- a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KubernetesWithIdempotentTest.java +++ b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KubernetesWithIdempotentTest.java @@ -43,6 +43,7 @@ public void assertGeneratedResources() throws IOException { assertThat(kubernetesList).allSatisfy(resource -> { assertThat(resource.getMetadata()).satisfies(m -> { assertThat(m.getName()).isEqualTo(APP_NAME); + assertThat(m.getAnnotations().get("app.quarkus.io/quarkus-version")).isNotBlank(); assertThat(m.getAnnotations().get("app.quarkus.io/commit-id")).isNull(); assertThat(m.getAnnotations().get("app.quarkus.io/build-timestamp")).isNull(); }); diff --git a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV3Test.java b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV3Test.java index 6065c821706c5..a39ee319cbd8a 100644 --- a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV3Test.java +++ b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV3Test.java @@ -47,6 +47,7 @@ public void assertGeneratedResources() throws IOException { assertThat(m.getLabels().get("app.kubernetes.io/name")).isEqualTo("openshift-v3"); assertThat(m.getLabels().get("app")).isEqualTo("openshift-v3"); assertThat(m.getNamespace()).isNull(); + assertThat(m.getAnnotations().get("app.quarkus.io/quarkus-version")).isNotBlank(); }); AbstractObjectAssert specAssert = assertThat(h).extracting("spec"); specAssert.extracting("selector").isInstanceOfSatisfying(Map.class, selectorsMap -> { diff --git a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV4DeploymentConfigTest.java b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV4DeploymentConfigTest.java index ab75d935d80d2..dde239084e49a 100644 --- a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV4DeploymentConfigTest.java +++ b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV4DeploymentConfigTest.java @@ -47,6 +47,7 @@ public void assertGeneratedResources() throws IOException { assertThat(m.getLabels().get("app.kubernetes.io/name")).isEqualTo("openshift-v4-deploymentconfig"); assertThat(m.getLabels().get("app")).isNull(); assertThat(m.getNamespace()).isNull(); + assertThat(m.getAnnotations().get("app.quarkus.io/quarkus-version")).isNotBlank(); }); AbstractObjectAssert specAssert = assertThat(h).extracting("spec"); specAssert.extracting("selector").isInstanceOfSatisfying(Map.class, selectorsMap -> { diff --git a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV4Test.java b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV4Test.java index d7370c8afa58c..186155711d017 100644 --- a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV4Test.java +++ b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftV4Test.java @@ -47,6 +47,7 @@ public void assertGeneratedResources() throws IOException { assertThat(m.getLabels().get("app.kubernetes.io/name")).isEqualTo("openshift-v4"); assertThat(m.getLabels().get("app")).isNull(); assertThat(m.getNamespace()).isNull(); + assertThat(m.getAnnotations().get("app.quarkus.io/quarkus-version")).isNotBlank(); }); AbstractObjectAssert specAssert = assertThat(h).extracting("spec"); specAssert.extracting("selector.matchLabels").isInstanceOfSatisfying(Map.class, selectorsMap -> { diff --git a/integration-tests/logging-gelf/src/main/java/io/quarkus/logging/gelf/it/GelfLogHandlerResource.java b/integration-tests/logging-gelf/src/main/java/io/quarkus/logging/gelf/it/GelfLogHandlerResource.java index 6de56f46342f9..e634b3cd11664 100644 --- a/integration-tests/logging-gelf/src/main/java/io/quarkus/logging/gelf/it/GelfLogHandlerResource.java +++ b/integration-tests/logging-gelf/src/main/java/io/quarkus/logging/gelf/it/GelfLogHandlerResource.java @@ -21,6 +21,7 @@ import jakarta.ws.rs.Path; import org.jboss.logging.Logger; +import org.jboss.logging.MDC; /** * This endpoint allow to test central logging solution by generating a log event when @@ -33,6 +34,8 @@ public class GelfLogHandlerResource { @GET public void log() { + MDC.put("field3", 99); + MDC.put("field4", 98); LOG.info("Some useful log message"); } diff --git a/integration-tests/logging-gelf/src/main/resources/application.properties b/integration-tests/logging-gelf/src/main/resources/application.properties index 59759a7e82150..b186f323a47fb 100644 --- a/integration-tests/logging-gelf/src/main/resources/application.properties +++ b/integration-tests/logging-gelf/src/main/resources/application.properties @@ -6,4 +6,6 @@ quarkus.log.handler.gelf.facility=custom quarkus.log.handler.gelf.additional-field.field1.value=value quarkus.log.handler.gelf.additional-field.field1.type=String quarkus.log.handler.gelf.additional-field.field2.value=666 -quarkus.log.handler.gelf.additional-field.field2.type=long \ No newline at end of file +quarkus.log.handler.gelf.additional-field.field2.type=long +quarkus.log.handler.gelf.include-full-mdc=true +quarkus.log.handler.gelf.dynamic-mdc-field-types=field3=String \ No newline at end of file diff --git a/integration-tests/logging-gelf/src/test/java/io/quarkus/logging/gelf/it/GelfLogHandlerTest.java b/integration-tests/logging-gelf/src/test/java/io/quarkus/logging/gelf/it/GelfLogHandlerTest.java index efe13272e0fc8..350d63c6911b5 100644 --- a/integration-tests/logging-gelf/src/test/java/io/quarkus/logging/gelf/it/GelfLogHandlerTest.java +++ b/integration-tests/logging-gelf/src/test/java/io/quarkus/logging/gelf/it/GelfLogHandlerTest.java @@ -42,6 +42,8 @@ public void test() { assertEquals(200, response.statusCode()); assertNotNull(response.body().path("hits.hits[0]._source")); assertEquals("Some useful log message", response.body().path("hits.hits[0]._source.message")); + assertEquals(Integer.valueOf(98), response.body().path("hits.hits[0]._source.field4")); + assertEquals("99", response.body().path("hits.hits[0]._source.field3")); }); } } diff --git a/integration-tests/main/src/test/resources/image-metrics/23.1/image-metrics.properties b/integration-tests/main/src/test/resources/image-metrics/23.1/image-metrics.properties index 3e643eb8117a5..e08a94b6e5cb9 100644 --- a/integration-tests/main/src/test/resources/image-metrics/23.1/image-metrics.properties +++ b/integration-tests/main/src/test/resources/image-metrics/23.1/image-metrics.properties @@ -20,5 +20,5 @@ analysis_results.classes.jni=62 analysis_results.classes.jni.tolerance=1 analysis_results.methods.jni=55 analysis_results.methods.jni.tolerance=1 -analysis_results.fields.jni=61 -analysis_results.fields.jni.tolerance=1 +analysis_results.fields.jni=62 +analysis_results.fields.jni.tolerance=2 diff --git a/integration-tests/maven/src/test/resources/__snapshots__/CreateExtensionMojoIT/testCreateQuarkiverseExtension/quarkus-my-quarkiverse-ext_integration-tests_pom.xml b/integration-tests/maven/src/test/resources/__snapshots__/CreateExtensionMojoIT/testCreateQuarkiverseExtension/quarkus-my-quarkiverse-ext_integration-tests_pom.xml index efeae2164b76c..6351615f20ec4 100644 --- a/integration-tests/maven/src/test/resources/__snapshots__/CreateExtensionMojoIT/testCreateQuarkiverseExtension/quarkus-my-quarkiverse-ext_integration-tests_pom.xml +++ b/integration-tests/maven/src/test/resources/__snapshots__/CreateExtensionMojoIT/testCreateQuarkiverseExtension/quarkus-my-quarkiverse-ext_integration-tests_pom.xml @@ -15,7 +15,7 @@ io.quarkus - quarkus-resteasy + quarkus-resteasy-reactive io.quarkiverse.my-quarkiverse-ext diff --git a/integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/transaction/TransactionPersonResource.java b/integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/transaction/TransactionPersonResource.java index 27763f671c453..1b2b5e2a76377 100644 --- a/integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/transaction/TransactionPersonResource.java +++ b/integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/transaction/TransactionPersonResource.java @@ -1,5 +1,7 @@ package io.quarkus.it.mongodb.panache.transaction; +import static org.junit.jupiter.api.Assertions.assertNotNull; + import java.net.URI; import java.util.ArrayList; import java.util.List; @@ -14,6 +16,7 @@ import com.mongodb.client.MongoClient; +import io.quarkus.mongodb.panache.Panache; import io.quarkus.runtime.StartupEvent; @Path("/transaction") @@ -42,6 +45,7 @@ public List getPersons() { @Transactional public Response addPerson(TransactionPerson person) { person.persist(); + assertNotNull(Panache.getSession(TransactionPerson.class)); return Response.created(URI.create("/transaction/" + person.id.toString())).build(); } diff --git a/integration-tests/opentelemetry-quickstart/pom.xml b/integration-tests/opentelemetry-quickstart/pom.xml new file mode 100644 index 0000000000000..b824643f4ecd0 --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/pom.xml @@ -0,0 +1,188 @@ + + + 4.0.0 + + + io.quarkus + quarkus-integration-tests-parent + 999-SNAPSHOT + + + quarkus-integration-test-opentelemetry-quickstart + Quarkus - Integration Tests - OpenTelemetry quickstart + + + + io.quarkus + quarkus-resteasy-reactive-jackson + + + io.quarkus + quarkus-opentelemetry + + + io.quarkus + quarkus-arc + + + io.quarkus + quarkus-resteasy-reactive + + + + + + io.opentelemetry + opentelemetry-sdk-testing + + + + + io.quarkus + quarkus-junit5 + test + + + io.rest-assured + rest-assured + test + + + org.awaitility + awaitility + test + + + + + io.quarkus + quarkus-arc-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-jackson-deployment + ${project.version} + pom + test + + + * + * + + + + + + io.quarkus + quarkus-opentelemetry-deployment + ${project.version} + pom + test + + + * + * + + + + + + + + + io.quarkus + quarkus-maven-plugin + + + + build + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + false + + + + + + + + + native-image + + + native + + + + + native + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + ${native.surefire.skip} + + false + + + + + + maven-failsafe-plugin + + + + integration-test + verify + + + + false + + ${project.build.directory}/${project.build.finalName}-runner + + + + + + + + + + + diff --git a/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/ExporterResource.java b/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/ExporterResource.java new file mode 100644 index 0000000000000..a611fda7c2c7b --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/ExporterResource.java @@ -0,0 +1,46 @@ +package io.quarkus.it.opentelemetry; + +import java.util.List; +import java.util.stream.Collectors; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.enterprise.inject.Produces; +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Response; + +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.data.SpanData; + +@Path("") +public class ExporterResource { + @Inject + InMemorySpanExporter inMemorySpanExporter; + + @GET + @Path("/reset") + public Response reset() { + inMemorySpanExporter.reset(); + return Response.ok().build(); + } + + @GET + @Path("/export") + public List export() { + return inMemorySpanExporter.getFinishedSpanItems() + .stream() + .filter(sd -> !sd.getName().contains("export") && !sd.getName().contains("reset")) + .collect(Collectors.toList()); + } + + @ApplicationScoped + static class InMemorySpanExporterProducer { + @Produces + @Singleton + InMemorySpanExporter inMemorySpanExporter() { + return InMemorySpanExporter.create(); + } + } +} diff --git a/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/GreetingResource.java b/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/GreetingResource.java new file mode 100644 index 0000000000000..3874e37f85302 --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/GreetingResource.java @@ -0,0 +1,16 @@ +package io.quarkus.it.opentelemetry; + +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; + +@Path("/hello") +public class GreetingResource { + + @GET + @Produces(MediaType.TEXT_PLAIN) + public String hello() { + return "Hello from RESTEasy Reactive"; + } +} diff --git a/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/output/SpanDataModuleSerializer.java b/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/output/SpanDataModuleSerializer.java new file mode 100644 index 0000000000000..83564dfe092bb --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/output/SpanDataModuleSerializer.java @@ -0,0 +1,19 @@ +package io.quarkus.it.opentelemetry.output; + +import jakarta.inject.Singleton; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; + +import io.opentelemetry.sdk.trace.data.SpanData; +import io.quarkus.jackson.ObjectMapperCustomizer; + +@Singleton +public class SpanDataModuleSerializer implements ObjectMapperCustomizer { + @Override + public void customize(ObjectMapper objectMapper) { + SimpleModule simpleModule = new SimpleModule(); + simpleModule.addSerializer(SpanData.class, new SpanDataSerializer()); + objectMapper.registerModule(simpleModule); + } +} diff --git a/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/output/SpanDataSerializer.java b/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/output/SpanDataSerializer.java new file mode 100644 index 0000000000000..c546ef284625e --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/main/java/io/quarkus/it/opentelemetry/output/SpanDataSerializer.java @@ -0,0 +1,55 @@ +package io.quarkus.it.opentelemetry.output; + +import java.io.IOException; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; + +import io.opentelemetry.sdk.trace.data.SpanData; + +public class SpanDataSerializer extends StdSerializer { + public SpanDataSerializer() { + this(null); + } + + public SpanDataSerializer(Class type) { + super(type); + } + + @Override + public void serialize(SpanData spanData, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) + throws IOException { + jsonGenerator.writeStartObject(); + + jsonGenerator.writeStringField("spanId", spanData.getSpanId()); + jsonGenerator.writeStringField("traceId", spanData.getTraceId()); + jsonGenerator.writeStringField("name", spanData.getName()); + jsonGenerator.writeStringField("kind", spanData.getKind().name()); + jsonGenerator.writeBooleanField("ended", spanData.hasEnded()); + + jsonGenerator.writeStringField("parentSpanId", spanData.getParentSpanContext().getSpanId()); + jsonGenerator.writeStringField("parent_spanId", spanData.getParentSpanContext().getSpanId()); + jsonGenerator.writeStringField("parent_traceId", spanData.getParentSpanContext().getTraceId()); + jsonGenerator.writeBooleanField("parent_remote", spanData.getParentSpanContext().isRemote()); + jsonGenerator.writeBooleanField("parent_valid", spanData.getParentSpanContext().isValid()); + + spanData.getAttributes().forEach((k, v) -> { + try { + jsonGenerator.writeStringField("attr_" + k.getKey(), v.toString()); + } catch (IOException e) { + e.printStackTrace(); + } + }); + + spanData.getResource().getAttributes().forEach((k, v) -> { + try { + jsonGenerator.writeStringField("resource_" + k.getKey(), v.toString()); + } catch (IOException e) { + e.printStackTrace(); + } + }); + + jsonGenerator.writeEndObject(); + } +} diff --git a/integration-tests/opentelemetry-quickstart/src/main/resources/META-INF/resources/test.html b/integration-tests/opentelemetry-quickstart/src/main/resources/META-INF/resources/test.html new file mode 100644 index 0000000000000..d3e7968fdf060 --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/main/resources/META-INF/resources/test.html @@ -0,0 +1 @@ +Test diff --git a/integration-tests/opentelemetry-quickstart/src/main/resources/application.properties b/integration-tests/opentelemetry-quickstart/src/main/resources/application.properties new file mode 100644 index 0000000000000..5a8972253198d --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/main/resources/application.properties @@ -0,0 +1,3 @@ +# speed up build +quarkus.otel.bsp.schedule.delay=0 +quarkus.otel.bsp.export.timeout=5s diff --git a/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/BaseTest.java b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/BaseTest.java new file mode 100644 index 0000000000000..549d7a9b21304 --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/BaseTest.java @@ -0,0 +1,20 @@ +package io.quarkus.it.opentelemetry; + +import static io.restassured.RestAssured.get; + +import java.util.List; +import java.util.Map; + +import io.restassured.common.mapper.TypeRef; + +public class BaseTest { + + protected List> getSpans() { + return get("/export").body().as(new TypeRef<>() { + }); + } + + protected void buildGlobalTelemetryInstance() { + // Do nothing in JVM mode + } +} diff --git a/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryDisabledIT.java b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryDisabledIT.java new file mode 100644 index 0000000000000..205816851ed98 --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryDisabledIT.java @@ -0,0 +1,19 @@ +package io.quarkus.it.opentelemetry; + +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.OpenTelemetrySdkBuilder; +import io.quarkus.test.junit.QuarkusIntegrationTest; + +@QuarkusIntegrationTest +public class OpenTelemetryDisabledIT extends OpenTelemetryDisabledTest { + @Override + protected void buildGlobalTelemetryInstance() { + // When running native tests the test class is outside the Quarkus application, + // so we need to set the propagator on the GlobalOpenTelemetry instance + OpenTelemetrySdkBuilder builder = OpenTelemetrySdk.builder(); + builder.setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())); + builder.buildAndRegisterGlobal(); + } +} diff --git a/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryDisabledTest.java b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryDisabledTest.java new file mode 100644 index 0000000000000..413fd1f41fd60 --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryDisabledTest.java @@ -0,0 +1,39 @@ +package io.quarkus.it.opentelemetry; + +import static io.restassured.RestAssured.get; +import static io.restassured.RestAssured.given; +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static org.awaitility.Awaitility.await; +import static org.hamcrest.CoreMatchers.is; + +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.junit.QuarkusTestProfile; +import io.quarkus.test.junit.TestProfile; + +@QuarkusTest +@TestProfile(OpenTelemetryDisabledTest.MyProfile.class) +public class OpenTelemetryDisabledTest extends BaseTest { + + @Test + void buildTimeDisabled() { + given() + .when().get("/hello") + .then() + .statusCode(200) + .body(is("Hello from RESTEasy Reactive")); + // Service will start nevertheless. + await().atMost(200, MILLISECONDS).until(() -> getSpans().size() == 0); + } + + public static class MyProfile implements QuarkusTestProfile { + + @Override + public Map getConfigOverrides() { + return Map.of("quarkus.otel.enabled", "false"); + } + } +} diff --git a/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryIT.java b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryIT.java new file mode 100644 index 0000000000000..a7e516388cfd1 --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryIT.java @@ -0,0 +1,19 @@ +package io.quarkus.it.opentelemetry; + +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.OpenTelemetrySdkBuilder; +import io.quarkus.test.junit.QuarkusIntegrationTest; + +@QuarkusIntegrationTest +public class OpenTelemetryIT extends OpenTelemetryTest { + @Override + protected void buildGlobalTelemetryInstance() { + // When running native tests the test class is outside the Quarkus application, + // so we need to set the propagator on the GlobalOpenTelemetry instance + OpenTelemetrySdkBuilder builder = OpenTelemetrySdk.builder(); + builder.setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())); + builder.buildAndRegisterGlobal(); + } +} diff --git a/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryTest.java b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryTest.java new file mode 100644 index 0000000000000..51a58984e53c3 --- /dev/null +++ b/integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryTest.java @@ -0,0 +1,24 @@ +package io.quarkus.it.opentelemetry; + +import static io.restassured.RestAssured.get; +import static io.restassured.RestAssured.given; +import static java.util.concurrent.TimeUnit.SECONDS; +import static org.awaitility.Awaitility.await; +import static org.hamcrest.CoreMatchers.is; + +import org.junit.jupiter.api.Test; + +import io.quarkus.test.junit.QuarkusTest; + +@QuarkusTest +public class OpenTelemetryTest extends BaseTest { + @Test + void buildTimeEnabled() { + given() + .when().get("/hello") + .then() + .statusCode(200) + .body(is("Hello from RESTEasy Reactive")); + await().atMost(5, SECONDS).until(() -> getSpans().size() == 1); + } +} diff --git a/integration-tests/opentelemetry-scheduler/src/test/java/io/quarkus/it/opentelemetry/scheduler/OpenTelemetrySchedulerTest.java b/integration-tests/opentelemetry-scheduler/src/test/java/io/quarkus/it/opentelemetry/scheduler/OpenTelemetrySchedulerTest.java index 28652a8ddd4c5..ed3aba8edc23e 100644 --- a/integration-tests/opentelemetry-scheduler/src/test/java/io/quarkus/it/opentelemetry/scheduler/OpenTelemetrySchedulerTest.java +++ b/integration-tests/opentelemetry-scheduler/src/test/java/io/quarkus/it/opentelemetry/scheduler/OpenTelemetrySchedulerTest.java @@ -2,6 +2,7 @@ import static io.restassured.RestAssured.get; import static io.restassured.RestAssured.given; +import static java.util.concurrent.TimeUnit.SECONDS; import static org.awaitility.Awaitility.await; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -9,8 +10,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import java.time.Duration; +import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; import org.junit.jupiter.api.Test; @@ -33,7 +36,7 @@ public void schedulerSpanTest() { assertCounter("/scheduler/count/job-definition", 1, Duration.ofSeconds(3)); // ------- SPAN ASSERTS ------- - List> spans = getSpans(); + List> spans = getSpans("myCounter", "myJobDefinition"); assertJobSpan(spans, "myCounter", DURATION_IN_NANOSECONDS); // identity assertJobSpan(spans, "myJobDefinition", DURATION_IN_NANOSECONDS); // identity @@ -62,9 +65,20 @@ private void assertCounter(String counterPath, int expectedCount, Duration timeo } - private List> getSpans() { - return get("/export").body().as(new TypeRef<>() { + private List> getSpans(String... expectedNames) { + AtomicReference>> ret = new AtomicReference<>(Collections.emptyList()); + await().atMost(15, SECONDS).until(() -> { + List> spans = get("/export").body().as(new TypeRef<>() { + }); + for (String name : expectedNames) { + if (spans.stream().filter(map -> map.get("name").equals(name)).findAny().isEmpty()) { + return false; + } + } + ret.set(spans); + return true; }); + return ret.get(); } private void assertJobSpan(List> spans, String expectedName, long expectedDuration) { @@ -82,6 +96,7 @@ private void assertJobSpan(List> spans, String expectedName, "' is not longer than 100ms, actual duration: " + delta + " (ns)"); } + @SuppressWarnings("unchecked") private void assertErrorJobSpan(List> spans, String expectedName, long expectedDuration, String expectedErrorMessage) { assertJobSpan(spans, expectedName, expectedDuration); diff --git a/integration-tests/picocli/src/main/java/io/quarkus/it/picocli/EntryWithVersionCommand.java b/integration-tests/picocli/src/main/java/io/quarkus/it/picocli/EntryWithVersionCommand.java new file mode 100644 index 0000000000000..c2121fe991b46 --- /dev/null +++ b/integration-tests/picocli/src/main/java/io/quarkus/it/picocli/EntryWithVersionCommand.java @@ -0,0 +1,9 @@ +package io.quarkus.it.picocli; + +import io.quarkus.picocli.runtime.annotations.TopCommand; +import picocli.CommandLine; + +@TopCommand +@CommandLine.Command(mixinStandardHelpOptions = true, versionProvider = VersionProvider.class) +public class EntryWithVersionCommand { +} diff --git a/integration-tests/picocli/src/main/java/io/quarkus/it/picocli/VersionProvider.java b/integration-tests/picocli/src/main/java/io/quarkus/it/picocli/VersionProvider.java new file mode 100644 index 0000000000000..e06d7c92156f7 --- /dev/null +++ b/integration-tests/picocli/src/main/java/io/quarkus/it/picocli/VersionProvider.java @@ -0,0 +1,22 @@ +package io.quarkus.it.picocli; + +import jakarta.inject.Singleton; + +import org.eclipse.microprofile.config.inject.ConfigProperty; + +import picocli.CommandLine; + +@Singleton +public class VersionProvider implements CommandLine.IVersionProvider { + + private final String version; + + public VersionProvider(@ConfigProperty(name = "some.version", defaultValue = "0.0.1") String version) { + this.version = version; + } + + @Override + public String[] getVersion() throws Exception { + return new String[] { version }; + } +} diff --git a/integration-tests/picocli/src/test/java/io/quarkus/it/picocli/TestVersion.java b/integration-tests/picocli/src/test/java/io/quarkus/it/picocli/TestVersion.java new file mode 100644 index 0000000000000..1218095d3de49 --- /dev/null +++ b/integration-tests/picocli/src/test/java/io/quarkus/it/picocli/TestVersion.java @@ -0,0 +1,25 @@ +package io.quarkus.it.picocli; + +import static io.quarkus.it.picocli.TestUtils.createConfig; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusProdModeTest; + +public class TestVersion { + + @RegisterExtension + static final QuarkusProdModeTest config = createConfig("version-app", EntryWithVersionCommand.class, + VersionProvider.class) + .overrideConfigKey("some.version", "1.1") + .setCommandLineParameters("--version"); + + @Test + public void simpleTest() { + Assertions.assertThat(config.getStartupConsoleOutput()).containsOnlyOnce("1.1"); + Assertions.assertThat(config.getExitCode()).isZero(); + } + +} diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 7b9728847c268..c03b3703e342b 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -359,6 +359,7 @@ rest-client-reactive rest-client-reactive-http2 rest-client-reactive-kotlin-serialization + rest-client-reactive-kotlin-serialization-with-validator rest-client-reactive-multipart rest-client-reactive-stork packaging @@ -373,6 +374,7 @@ micrometer-mp-metrics micrometer-prometheus opentelemetry + opentelemetry-quickstart opentelemetry-spi opentelemetry-jdbc-instrumentation opentelemetry-quartz diff --git a/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/pom.xml b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/pom.xml new file mode 100644 index 0000000000000..dc2f7858ee4b8 --- /dev/null +++ b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/pom.xml @@ -0,0 +1,180 @@ + + + + quarkus-integration-tests-parent + io.quarkus + 999-SNAPSHOT + + 4.0.0 + quarkus-integration-test-rest-client-reactive-kotlin-serialization-with-validator + Quarkus - Integration Tests - REST Client Reactive Kotlin Serialization With Validator + + + + io.quarkus + quarkus-resteasy-reactive-kotlin-serialization + + + io.quarkus + quarkus-rest-client-reactive-kotlin-serialization + + + io.quarkus + quarkus-hibernate-validator + + + + + io.quarkus + quarkus-junit5 + test + + + org.assertj + assertj-core + test + + + io.rest-assured + rest-assured + test + + + org.awaitility + awaitility + test + + + + + io.quarkus + quarkus-resteasy-reactive-kotlin-serialization-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-reactive-kotlin-serialization-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-validator-deployment + ${project.version} + pom + test + + + * + * + + + + + + + src/main/kotlin + src/test/kotlin + + + src/main/resources + true + + + + + org.jetbrains.kotlin + kotlin-maven-plugin + + + compile + compile + + compile + + + + test-compile + test-compile + + test-compile + + + + + + org.jetbrains.kotlin + kotlin-maven-allopen + ${kotlin.version} + + + org.jetbrains.kotlin + kotlin-maven-serialization + ${kotlin.version} + + + + + all-open + kotlinx-serialization + + + + + + + + io.quarkus + quarkus-maven-plugin + + + + build + + + + + + + + + + native-image + + + native + + + + native + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + + + + + diff --git a/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/main/kotlin/io/quarkus/it/rest/ValidationResource.kt b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/main/kotlin/io/quarkus/it/rest/ValidationResource.kt new file mode 100644 index 0000000000000..16840cd138473 --- /dev/null +++ b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/main/kotlin/io/quarkus/it/rest/ValidationResource.kt @@ -0,0 +1,21 @@ +package io.quarkus.it.rest + +import jakarta.validation.constraints.Size +import jakarta.ws.rs.GET +import jakarta.ws.rs.Path +import jakarta.ws.rs.PathParam +import jakarta.ws.rs.Produces +import jakarta.ws.rs.core.MediaType + +@Path("/") +class ValidationResource { + + @GET + @Path("/validate/{id}") + @Produces(MediaType.APPLICATION_JSON) + fun validate( + @Size(min = 5, message = "string is too short") @PathParam("id") id: String? + ): String? { + return id + } +} diff --git a/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/main/resources/application.properties b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/main/resources/application.properties new file mode 100644 index 0000000000000..bea6812de9ec1 --- /dev/null +++ b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/main/resources/application.properties @@ -0,0 +1,3 @@ +quarkus.kotlin-serialization.json.encode-defaults=true +quarkus.kotlin-serialization.json.pretty-print=true +quarkus.kotlin-serialization.json.pretty-print-indent=\ \ diff --git a/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/test/kotlin/io/quarkus/it/rest/client/BasicTest.kt b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/test/kotlin/io/quarkus/it/rest/client/BasicTest.kt new file mode 100644 index 0000000000000..7f06646b5174e --- /dev/null +++ b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/test/kotlin/io/quarkus/it/rest/client/BasicTest.kt @@ -0,0 +1,25 @@ +package io.quarkus.it.rest.client + +import io.quarkus.test.junit.QuarkusTest +import io.restassured.RestAssured +import org.assertj.core.api.Assertions +import org.junit.jupiter.api.Test + +@QuarkusTest +open class BasicTest { + + @Test + fun valid() { + val response = RestAssured.with().get("/validate/{id}", "12345") + Assertions.assertThat(response.asString()).isEqualTo("12345") + } + + @Test + fun invalid() { + val response = RestAssured.with().get("/validate/{id}", "1234") + Assertions.assertThat(response.asString()) + .contains("Constraint Violation") + .contains("validate.id") + .contains("string is too short") + } +} diff --git a/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/test/kotlin/io/quarkus/it/rest/client/BasicTestIT.kt b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/test/kotlin/io/quarkus/it/rest/client/BasicTestIT.kt new file mode 100644 index 0000000000000..2f30203d85b88 --- /dev/null +++ b/integration-tests/rest-client-reactive-kotlin-serialization-with-validator/src/test/kotlin/io/quarkus/it/rest/client/BasicTestIT.kt @@ -0,0 +1,5 @@ +package io.quarkus.it.rest.client + +import io.quarkus.test.junit.QuarkusIntegrationTest + +@QuarkusIntegrationTest class BasicTestIT : BasicTest() diff --git a/integration-tests/rest-client-reactive-stork/src/main/java/io/quarkus/it/rest/client/reactive/stork/Client.java b/integration-tests/rest-client-reactive-stork/src/main/java/io/quarkus/it/rest/client/reactive/stork/Client.java index 3c3bcea3042bc..ebf44dc314680 100644 --- a/integration-tests/rest-client-reactive-stork/src/main/java/io/quarkus/it/rest/client/reactive/stork/Client.java +++ b/integration-tests/rest-client-reactive-stork/src/main/java/io/quarkus/it/rest/client/reactive/stork/Client.java @@ -2,6 +2,9 @@ import jakarta.ws.rs.Consumes; import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; import jakarta.ws.rs.core.MediaType; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; @@ -11,4 +14,9 @@ public interface Client { @GET @Consumes(MediaType.TEXT_PLAIN) String echo(String name); + + @GET + @Path("/v2/{name}") + @Produces(MediaType.TEXT_PLAIN) + String invoke(@PathParam("name") String name); } diff --git a/integration-tests/rest-client-reactive-stork/src/main/java/io/quarkus/it/rest/client/reactive/stork/ClientCallingResource.java b/integration-tests/rest-client-reactive-stork/src/main/java/io/quarkus/it/rest/client/reactive/stork/ClientCallingResource.java index 2e3a307174dc8..782165a5c3895 100644 --- a/integration-tests/rest-client-reactive-stork/src/main/java/io/quarkus/it/rest/client/reactive/stork/ClientCallingResource.java +++ b/integration-tests/rest-client-reactive-stork/src/main/java/io/quarkus/it/rest/client/reactive/stork/ClientCallingResource.java @@ -3,6 +3,9 @@ import jakarta.enterprise.context.ApplicationScoped; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; import org.eclipse.microprofile.rest.client.inject.RestClient; @@ -17,4 +20,11 @@ public class ClientCallingResource { public String passThrough() { return client.echo("World!"); } + + @GET + @Path("/{name}") + @Produces(MediaType.TEXT_PLAIN) + public String invoke(@PathParam("name") String name) { + return client.invoke(name + "/" + name); + } } diff --git a/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/FastWiremockServer.java b/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/FastWiremockServer.java index ba55cdc0f30f0..a6b277681a970 100644 --- a/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/FastWiremockServer.java +++ b/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/FastWiremockServer.java @@ -1,6 +1,7 @@ package io.quarkus.it.rest.reactive.stork; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathTemplate; import java.util.Map; @@ -25,6 +26,8 @@ int httpsPort() { protected Map initWireMock(WireMockServer server) { server.stubFor(WireMock.get("/hello") .willReturn(aResponse().withBody(FAST_RESPONSE).withStatus(200))); + server.stubFor(WireMock.get(urlPathTemplate("/hello/v2/{name}")) + .willReturn(aResponse().withBody(FAST_RESPONSE).withStatus(200))); return Map.of("fast-service", "localhost:8443"); } } diff --git a/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/RestClientReactiveStorkTest.java b/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/RestClientReactiveStorkTest.java index 5adb6924ee71b..884d5ffbbfc0f 100644 --- a/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/RestClientReactiveStorkTest.java +++ b/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/RestClientReactiveStorkTest.java @@ -54,4 +54,17 @@ void shouldUseFasterService() { // after hitting the slow endpoint, we should only use the fast one: assertThat(responses).containsOnly(FAST_RESPONSE, FAST_RESPONSE, FAST_RESPONSE); } + + @Test + void shouldUseV2Service() { + Set responses = new HashSet<>(); + + for (int i = 0; i < 2; i++) { + Response response = when().get("/client/quarkus"); + response.then().statusCode(200); + } + + responses.clear(); + + } } diff --git a/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/SlowWiremockServer.java b/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/SlowWiremockServer.java index 7dbc7f74b9b9d..3b1a051f345a6 100644 --- a/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/SlowWiremockServer.java +++ b/integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/SlowWiremockServer.java @@ -1,6 +1,7 @@ package io.quarkus.it.rest.reactive.stork; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathTemplate; import java.util.Map; @@ -26,6 +27,8 @@ protected Map initWireMock(WireMockServer server) { server.stubFor(WireMock.get("/hello") .willReturn(aResponse().withFixedDelay(1000) .withBody(SLOW_RESPONSE).withStatus(200))); + server.stubFor(WireMock.get(urlPathTemplate("/hello/v2/{name}")) + .willReturn(aResponse().withFixedDelay(1000).withBody(SLOW_RESPONSE).withStatus(200))); return Map.of("slow-service", "localhost:8444"); } } diff --git a/integration-tests/test-extension/extension/deployment/src/main/java/io/quarkus/extest/deployment/TestRecordProcessor.java b/integration-tests/test-extension/extension/deployment/src/main/java/io/quarkus/extest/deployment/TestRecordProcessor.java new file mode 100644 index 0000000000000..047690092decf --- /dev/null +++ b/integration-tests/test-extension/extension/deployment/src/main/java/io/quarkus/extest/deployment/TestRecordProcessor.java @@ -0,0 +1,16 @@ +package io.quarkus.extest.deployment; + +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.ExecutionTime; +import io.quarkus.deployment.annotations.Record; +import io.quarkus.extest.runtime.records.TestRecord; +import io.quarkus.extest.runtime.records.TestRecordRecorder; + +public class TestRecordProcessor { + + @BuildStep + @Record(ExecutionTime.RUNTIME_INIT) + public void record(TestRecordRecorder recorder) { + recorder.record(new TestRecord("foo", 100)); + } +} diff --git a/integration-tests/test-extension/extension/runtime/src/main/java/io/quarkus/extest/runtime/records/TestRecord.java b/integration-tests/test-extension/extension/runtime/src/main/java/io/quarkus/extest/runtime/records/TestRecord.java new file mode 100644 index 0000000000000..3c2a46ceb4725 --- /dev/null +++ b/integration-tests/test-extension/extension/runtime/src/main/java/io/quarkus/extest/runtime/records/TestRecord.java @@ -0,0 +1,4 @@ +package io.quarkus.extest.runtime.records; + +public record TestRecord(String name, int age) { +} diff --git a/integration-tests/test-extension/extension/runtime/src/main/java/io/quarkus/extest/runtime/records/TestRecordRecorder.java b/integration-tests/test-extension/extension/runtime/src/main/java/io/quarkus/extest/runtime/records/TestRecordRecorder.java new file mode 100644 index 0000000000000..c8a848b5955e0 --- /dev/null +++ b/integration-tests/test-extension/extension/runtime/src/main/java/io/quarkus/extest/runtime/records/TestRecordRecorder.java @@ -0,0 +1,13 @@ +package io.quarkus.extest.runtime.records; + +import io.quarkus.runtime.annotations.Recorder; + +@Recorder +public class TestRecordRecorder { + + public static TestRecord testRecord; + + public void record(TestRecord testRecord) { + TestRecordRecorder.testRecord = testRecord; + } +} diff --git a/integration-tests/test-extension/tests/src/test/java/io/quarkus/it/extension/TestRecordRecorderTest.java b/integration-tests/test-extension/tests/src/test/java/io/quarkus/it/extension/TestRecordRecorderTest.java new file mode 100644 index 0000000000000..0d9992b922c16 --- /dev/null +++ b/integration-tests/test-extension/tests/src/test/java/io/quarkus/it/extension/TestRecordRecorderTest.java @@ -0,0 +1,18 @@ +package io.quarkus.it.extension; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +import io.quarkus.extest.runtime.records.TestRecordRecorder; +import io.quarkus.test.junit.QuarkusTest; + +@QuarkusTest +public class TestRecordRecorderTest { + + @Test + public void test() { + assertEquals("foo", TestRecordRecorder.testRecord.name()); + assertEquals(100, TestRecordRecorder.testRecord.age()); + } +} diff --git a/pom.xml b/pom.xml index 59bfbbf6876fb..0f6ae0805e873 100644 --- a/pom.xml +++ b/pom.xml @@ -55,7 +55,7 @@ jdbc:postgresql:hibernate_orm_test 4.5.1 - 0.0.101 + 0.0.102 false false @@ -165,7 +165,7 @@ io.quarkus.bot build-reporter-maven-extension - 3.2.2 + 3.3.3