diff --git a/.github/workflows/ci-actions-incremental.yml b/.github/workflows/ci-actions-incremental.yml index 93b3cbc29a2ad..0aa52a1bdb08d 100644 --- a/.github/workflows/ci-actions-incremental.yml +++ b/.github/workflows/ci-actions-incremental.yml @@ -174,7 +174,7 @@ jobs: echo 'EOF' >> $GITHUB_OUTPUT - name: Tar Maven Repo shell: bash - run: tar -I 'pigz -9' -cf maven-repo.tgz -C ~ .m2/repository + run: tar -czf maven-repo.tgz -C ~ .m2/repository - name: Persist Maven Repo uses: actions/upload-artifact@v3 with: diff --git a/bom/application/pom.xml b/bom/application/pom.xml index 37edf36bfee9c..8386a7eb51c00 100644 --- a/bom/application/pom.xml +++ b/bom/application/pom.xml @@ -26,7 +26,7 @@ 1.1.2 2.1.1.Final 3.0.2.Final - 6.2.4.Final + 6.2.5.Final 0.33.0 0.2.4 0.1.15 @@ -54,7 +54,7 @@ 3.3.3 4.0.4 4.0.0 - 3.5.0 + 3.5.1 2.3.0 3.0.3 6.2.6 @@ -105,11 +105,11 @@ 2.0.4.Final 8.0.1.Final - 6.2.0.Final + 6.2.1.Final 7.0.0.Final 2.1 8.0.0.Final - 8.8.2 + 8.9.1 2.2.21 2.2.5.Final 2.2.2.Final @@ -139,7 +139,7 @@ 5.3.0 5.9.3 1.5.0 - 14.0.13.Final + 14.0.14.Final 4.6.2.Final 3.1.5 4.1.94.Final diff --git a/build-parent/pom.xml b/build-parent/pom.xml index 168eb617d01dd..265f81d6e1445 100644 --- a/build-parent/pom.xml +++ b/build-parent/pom.xml @@ -19,7 +19,6 @@ - 3.11.0 1.9.0 1.8.20 @@ -28,7 +27,6 @@ ${scala-maven-plugin.version} - 3.2.1 3.1.2 3.0.0 diff --git a/core/deployment/src/main/java/io/quarkus/deployment/Capability.java b/core/deployment/src/main/java/io/quarkus/deployment/Capability.java index b56057ee024c3..d0830d76d35b8 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/Capability.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/Capability.java @@ -39,6 +39,8 @@ public interface Capability { String HAL = QUARKUS_PREFIX + ".hal"; + String REACTIVE_ROUTES = QUARKUS_PREFIX + ".reactive-routes"; + String REST = QUARKUS_PREFIX + ".rest"; String REST_CLIENT = REST + ".client"; String REST_CLIENT_REACTIVE = REST_CLIENT + ".reactive"; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/CodeGenerator.java b/core/deployment/src/main/java/io/quarkus/deployment/CodeGenerator.java index b986340941039..645d64d08a2d8 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/CodeGenerator.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/CodeGenerator.java @@ -1,11 +1,13 @@ package io.quarkus.deployment; +import java.io.BufferedWriter; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -14,6 +16,7 @@ import java.util.ServiceLoader; import java.util.StringJoiner; import java.util.function.Consumer; +import java.util.function.Function; import org.eclipse.microprofile.config.Config; import org.jboss.logging.Logger; @@ -24,7 +27,9 @@ import io.quarkus.bootstrap.prebuild.CodeGenException; import io.quarkus.deployment.codegen.CodeGenData; import io.quarkus.deployment.configuration.BuildTimeConfigurationReader; +import io.quarkus.deployment.configuration.tracker.ConfigTrackingConfig; import io.quarkus.deployment.configuration.tracker.ConfigTrackingValueTransformer; +import io.quarkus.deployment.configuration.tracker.ConfigTrackingWriter; import io.quarkus.deployment.dev.DevModeContext; import io.quarkus.deployment.dev.DevModeContext.ModuleInfo; import io.quarkus.maven.dependency.ResolvedDependency; @@ -32,6 +37,7 @@ import io.quarkus.paths.PathCollection; import io.quarkus.runtime.LaunchMode; import io.quarkus.runtime.util.ClassPathUtils; +import io.smallrye.config.SmallRyeConfig; /** * A set of methods to initialize and execute {@link CodeGenProvider}s. @@ -187,47 +193,104 @@ public static boolean trigger(ClassLoader deploymentClassLoader, } /** - * Initializes an application build time configuration and returns current values of properties - * passed in as {@code originalProperties}. + * Initializes an application build time configuration and dumps current values of properties + * passed in as {@code previouslyRecordedProperties} to a file. * * @param appModel application model * @param launchMode launch mode * @param buildSystemProps build system (or project) properties * @param deploymentClassLoader build classloader - * @param originalProperties properties to read from the initialized configuration - * @return current values of the passed in original properties + * @param previouslyRecordedProperties properties to read from the initialized configuration + * @param outputFile output file */ - public static Properties readCurrentConfigValues(ApplicationModel appModel, String launchMode, - Properties buildSystemProps, - QuarkusClassLoader deploymentClassLoader, Properties originalProperties) { + public static void dumpCurrentConfigValues(ApplicationModel appModel, String launchMode, Properties buildSystemProps, + QuarkusClassLoader deploymentClassLoader, Properties previouslyRecordedProperties, + Path outputFile) { + final LaunchMode mode = LaunchMode.valueOf(launchMode); + if (previouslyRecordedProperties.isEmpty()) { + try { + readConfig(appModel, mode, buildSystemProps, deploymentClassLoader, configReader -> { + var config = configReader.initConfiguration(mode, buildSystemProps, appModel.getPlatformProperties()); + final Map allProps = new HashMap<>(); + for (String name : config.getPropertyNames()) { + allProps.put(name, ConfigTrackingValueTransformer.asString(config.getConfigValue(name))); + } + ConfigTrackingWriter.write(allProps, + config.unwrap(SmallRyeConfig.class).getConfigMapping(ConfigTrackingConfig.class), + configReader.readConfiguration(config), + outputFile); + return null; + }); + } catch (CodeGenException e) { + throw new RuntimeException("Failed to load application configuration", e); + } + return; + } Config config = null; try { - config = getConfig(appModel, LaunchMode.valueOf(launchMode), buildSystemProps, deploymentClassLoader); + config = getConfig(appModel, mode, buildSystemProps, deploymentClassLoader); } catch (CodeGenException e) { throw new RuntimeException("Failed to load application configuration", e); } var valueTransformer = ConfigTrackingValueTransformer.newInstance(config); - final Properties currentValues = new Properties(originalProperties.size()); - for (var originalProp : originalProperties.entrySet()) { - var name = originalProp.getKey().toString(); + final Properties currentValues = new Properties(previouslyRecordedProperties.size()); + for (var prevProp : previouslyRecordedProperties.entrySet()) { + var name = prevProp.getKey().toString(); var currentValue = config.getConfigValue(name); final String current = valueTransformer.transform(name, currentValue); - if (!originalProp.getValue().equals(current)) { - log.info("Option " + name + " has changed since the last build from " - + originalProp.getValue() + " to " + current); + var originalValue = prevProp.getValue(); + if (!originalValue.equals(current)) { + log.info("Option " + name + " has changed since the last build from " + originalValue + " to " + current); } if (current != null) { currentValues.put(name, current); } } - return currentValues; + + final List names = new ArrayList<>(currentValues.stringPropertyNames()); + Collections.sort(names); + + final Path outputDir = outputFile.getParent(); + if (outputDir != null && !Files.exists(outputDir)) { + try { + Files.createDirectories(outputDir); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + try (BufferedWriter writer = Files.newBufferedWriter(outputFile)) { + for (var name : names) { + ConfigTrackingWriter.write(writer, name, currentValues.getProperty(name)); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } } public static Config getConfig(ApplicationModel appModel, LaunchMode launchMode, Properties buildSystemProps, QuarkusClassLoader deploymentClassLoader) throws CodeGenException { + return readConfig(appModel, launchMode, buildSystemProps, deploymentClassLoader, + configReader -> configReader.initConfiguration(launchMode, buildSystemProps, appModel.getPlatformProperties())); + } + + public static T readConfig(ApplicationModel appModel, LaunchMode launchMode, Properties buildSystemProps, + QuarkusClassLoader deploymentClassLoader, Function function) + throws CodeGenException { final Map> unavailableConfigServices = getUnavailableConfigServices(appModel.getAppArtifact(), deploymentClassLoader); + final ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader(); if (!unavailableConfigServices.isEmpty()) { + var sb = new StringBuilder(); + sb.append( + "The following services are not (yet) available and will be disabled during configuration initialization at the current build phase:"); + for (Map.Entry> missingService : unavailableConfigServices.entrySet()) { + sb.append(System.lineSeparator()); + for (String s : missingService.getValue()) { + sb.append("- ").append(s); + } + } + log.warn(sb.toString()); + final Map> allConfigServices = new HashMap<>(unavailableConfigServices.size()); final Map allowedConfigServices = new HashMap<>(unavailableConfigServices.size()); final Map bannedConfigServices = new HashMap<>(unavailableConfigServices.size()); @@ -266,14 +329,15 @@ public static Config getConfig(ApplicationModel appModel, LaunchMode launchMode, configClBuilder.addBannedElement(new MemoryClassPathElement(bannedConfigServices, true)); } deploymentClassLoader = configClBuilder.build(); + Thread.currentThread().setContextClassLoader(deploymentClassLoader); } try { - return new BuildTimeConfigurationReader(deploymentClassLoader).initConfiguration(launchMode, buildSystemProps, - appModel.getPlatformProperties()); + return function.apply(new BuildTimeConfigurationReader(deploymentClassLoader)); } catch (Exception e) { throw new CodeGenException("Failed to initialize application configuration", e); } finally { if (!unavailableConfigServices.isEmpty()) { + Thread.currentThread().setContextClassLoader(originalClassLoader); deploymentClassLoader.close(); } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/tracker/ConfigTrackingValueTransformer.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/tracker/ConfigTrackingValueTransformer.java index 7c098a6b722cf..df8b7fd4d9ed1 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/tracker/ConfigTrackingValueTransformer.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/tracker/ConfigTrackingValueTransformer.java @@ -53,7 +53,11 @@ public static ConfigTrackingValueTransformer newInstance(ConfigTrackingConfig co * @return non-null string value for a given {@link org.eclipse.microprofile.config.ConfigValue} instance */ public static String asString(ConfigValue value) { - return value == null ? NOT_CONFIGURED : value.getValue(); + if (value == null) { + return NOT_CONFIGURED; + } + var strValue = value.getValue(); + return strValue == null ? NOT_CONFIGURED : strValue; } private final String userHomeDir; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/tracker/ConfigTrackingWriter.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/tracker/ConfigTrackingWriter.java index 20c51265c5032..f535eb9411946 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/tracker/ConfigTrackingWriter.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/tracker/ConfigTrackingWriter.java @@ -36,7 +36,7 @@ private static boolean matches(String name, List patterns) { /** * Configuration writer that will persist collected configuration options and their values - * to a file. + * to a file derived from the config. */ public static void write(Map readOptions, ConfigTrackingConfig config, BuildTimeConfigurationReader.ReadResult configReadResult, @@ -64,6 +64,15 @@ public static void write(Map readOptions, ConfigTrackingConfig c } } + write(readOptions, config, configReadResult, file); + } + + /** + * Configuration writer that will persist collected configuration options and their values + * to a file. + */ + public static void write(Map readOptions, ConfigTrackingConfig config, + BuildTimeConfigurationReader.ReadResult configReadResult, Path file) { final List excludePatterns = config.getExcludePatterns(); final ConfigTrackingValueTransformer valueTransformer = ConfigTrackingValueTransformer.newInstance(config); @@ -99,12 +108,107 @@ public static void write(Map readOptions, ConfigTrackingConfig c */ public static void write(Writer writer, String name, String value) throws IOException { if (value != null) { - // escape the backslash before persisting - value = value.replace("\\", "\\\\"); + name = toWritableValue(name, true, true); + value = toWritableValue(value, false, true); writer.write(name); writer.write("="); writer.write(value); writer.write(System.lineSeparator()); } } + + /* + * Converts unicodes to encoded \uxxxx and escapes + * special characters with a preceding slash + */ + + /** + * Escapes characters that are expected to be escaped when {@link java.util.Properties} load + * files from disk. + * + * @param str property name or value + * @param escapeSpace whether to escape a whitespace (should be true for property names) + * @param escapeUnicode whether to converts unicodes to encoded \uxxxx + * @return property name or value that can be written to a file + */ + private static String toWritableValue(String str, boolean escapeSpace, boolean escapeUnicode) { + int len = str.length(); + int bufLen = len * 2; + if (bufLen < 0) { + bufLen = Integer.MAX_VALUE; + } + StringBuilder outBuffer = new StringBuilder(bufLen); + + for (int x = 0; x < len; x++) { + char aChar = str.charAt(x); + // Handle common case first, selecting largest block that + // avoids the specials below + if ((aChar > 61) && (aChar < 127)) { + if (aChar == '\\') { + outBuffer.append('\\'); + outBuffer.append('\\'); + continue; + } + outBuffer.append(aChar); + continue; + } + switch (aChar) { + case ' ': + if (x == 0 || escapeSpace) { + outBuffer.append('\\'); + } + outBuffer.append(' '); + break; + case '\t': + outBuffer.append('\\'); + outBuffer.append('t'); + break; + case '\n': + outBuffer.append('\\'); + outBuffer.append('n'); + break; + case '\r': + outBuffer.append('\\'); + outBuffer.append('r'); + break; + case '\f': + outBuffer.append('\\'); + outBuffer.append('f'); + break; + case '=': // Fall through + case ':': // Fall through + case '#': // Fall through + case '!': + outBuffer.append('\\'); + outBuffer.append(aChar); + break; + default: + if (((aChar < 0x0020) || (aChar > 0x007e)) & escapeUnicode) { + outBuffer.append('\\'); + outBuffer.append('u'); + outBuffer.append(toHex((aChar >> 12) & 0xF)); + outBuffer.append(toHex((aChar >> 8) & 0xF)); + outBuffer.append(toHex((aChar >> 4) & 0xF)); + outBuffer.append(toHex(aChar & 0xF)); + } else { + outBuffer.append(aChar); + } + } + } + return outBuffer.toString(); + } + + /** + * Convert a nibble to a hex character + * + * @param nibble the nibble to convert. + */ + private static char toHex(int nibble) { + return hexDigit[(nibble & 0xF)]; + } + + /** A table of hex digits */ + private static final char[] hexDigit = { + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' + }; } diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java index 01598a936e4ca..a8d7fbcd40be5 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java @@ -17,6 +17,7 @@ import org.gradle.api.file.FileCollection; import org.gradle.api.file.RegularFile; import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.provider.ListProperty; import org.gradle.api.provider.MapProperty; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; @@ -254,6 +255,10 @@ public MapProperty getQuarkusBuildProperties() { return quarkusBuildProperties; } + public ListProperty getCachingRelevantProperties() { + return cachingRelevantProperties; + } + public void set(String name, @Nullable String value) { quarkusBuildProperties.put(String.format("quarkus.%s", name), value); } diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/AbstractQuarkusExtension.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/AbstractQuarkusExtension.java index 2e97ca979576c..8d59abd2a90ef 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/AbstractQuarkusExtension.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/AbstractQuarkusExtension.java @@ -39,6 +39,7 @@ public abstract class AbstractQuarkusExtension { protected final Property finalName; private final MapProperty forcedPropertiesProperty; protected final MapProperty quarkusBuildProperties; + protected final ListProperty cachingRelevantProperties; private final ListProperty ignoredEntries; private final FileCollection classpath; private final Property baseConfig; @@ -52,6 +53,7 @@ protected AbstractQuarkusExtension(Project project) { this.finalName.convention(project.provider(() -> String.format("%s-%s", project.getName(), project.getVersion()))); this.forcedPropertiesProperty = project.getObjects().mapProperty(String.class, String.class); this.quarkusBuildProperties = project.getObjects().mapProperty(String.class, String.class); + this.cachingRelevantProperties = project.getObjects().listProperty(String.class).value(List.of("quarkus[.].*")); this.ignoredEntries = project.getObjects().listProperty(String.class); this.ignoredEntries.convention( project.provider(() -> baseConfig().packageConfig().userConfiguredIgnoredEntries.orElse(emptyList()))); diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/BaseConfig.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/BaseConfig.java index 6c57a8590af4d..90c229c043b9b 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/BaseConfig.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/BaseConfig.java @@ -1,6 +1,9 @@ package io.quarkus.gradle.tasks; +import java.util.List; import java.util.Map; +import java.util.function.Predicate; +import java.util.regex.Pattern; import java.util.stream.Collectors; import io.quarkus.deployment.pkg.PackageConfig; @@ -18,7 +21,7 @@ final class BaseConfig { private final Manifest manifest; private final PackageConfig packageConfig; - private final Map quarkusProperties; + private final Map configMap; // Note: EffectiveConfig has all the code to load the configurations from all the sources. BaseConfig(EffectiveConfig config) { @@ -31,8 +34,7 @@ final class BaseConfig { manifest.attributes(packageConfig.manifest.attributes); packageConfig.manifest.manifestSections.forEach((section, attribs) -> manifest.attributes(attribs, section)); - this.quarkusProperties = config.configMap().entrySet().stream().filter(e -> e.getKey().startsWith("quarkus.")) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + configMap = config.configMap(); } PackageConfig packageConfig() { @@ -47,7 +49,12 @@ Manifest manifest() { return manifest; } - Map quarkusProperties() { - return quarkusProperties; + Map cachingRelevantProperties(List propertyPatterns) { + List patterns = propertyPatterns.stream().map(s -> "^(" + s + ")$").map(Pattern::compile) + .collect(Collectors.toList()); + Predicate> keyPredicate = e -> patterns.stream().anyMatch(p -> p.matcher(e.getKey()).matches()); + return configMap.entrySet().stream() + .filter(keyPredicate) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } } diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/EffectiveConfig.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/EffectiveConfig.java index db3c8c3215723..3e03f70678474 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/EffectiveConfig.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/EffectiveConfig.java @@ -117,6 +117,9 @@ static Map generateFullConfigMap(SmallRyeConfig config) { static SmallRyeConfig buildConfig(String profile, List configSources) { return ConfigUtils.emptyConfigBuilder() .setAddDiscoveredSecretKeysHandlers(false) + // We add our own sources for environment, system-properties and microprofile-config.properties, + // no need to include those twice. + .setAddDefaultSources(false) .withSources(configSources) .withProfile(profile) .build(); diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildTask.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildTask.java index b4c664b88e2e1..106bca4941130 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildTask.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildTask.java @@ -15,6 +15,7 @@ import org.gradle.api.file.FileCopyDetails; import org.gradle.api.file.FileSystemOperations; import org.gradle.api.logging.LogLevel; +import org.gradle.api.provider.ListProperty; import org.gradle.api.tasks.Classpath; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.StopExecutionException; @@ -57,7 +58,8 @@ public FileCollection getClasspath() { @Input public Map getCachingRelevantInput() { - return extension().baseConfig().quarkusProperties(); + ListProperty vars = extension().getCachingRelevantProperties(); + return extension().baseConfig().cachingRelevantProperties(vars.get()); } PackageConfig.BuiltInType packageType() { diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java index 83ccc825f07d2..3e9c6c02cf304 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java @@ -13,6 +13,7 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.DirectoryProperty; import org.gradle.api.file.FileCollection; +import org.gradle.api.provider.ListProperty; import org.gradle.api.tasks.CacheableTask; import org.gradle.api.tasks.CompileClasspath; import org.gradle.api.tasks.Input; @@ -65,7 +66,17 @@ public void setCompileClasspath(Configuration compileClasspath) { @Input public Map getCachingRelevantInput() { - return extension().baseConfig().quarkusProperties(); + ListProperty vars = extension().getCachingRelevantProperties(); + return extension().baseConfig().cachingRelevantProperties(vars.get()); + } + + @Input + Map getInternalTaskConfig() { + // Necessary to distinguish the different `quarkusGenerateCode*` tasks, because the task path is _not_ + // an input to the cache key. We need to declare these properties as inputs, because those influence the + // execution. + // Documented here: https://docs.gradle.org/current/userguide/build_cache.html#sec:task_output_caching_inputs + return Map.of("launchMode", launchMode.name(), "inputSourceSetName", inputSourceSetName); } @InputFiles diff --git a/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/CachingTest.java b/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/CachingTest.java index 219f60d456a23..d1d27359e8215 100644 --- a/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/CachingTest.java +++ b/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/CachingTest.java @@ -1,18 +1,22 @@ package io.quarkus.gradle.tasks; +import static org.assertj.core.api.Assumptions.assumeThat; import static org.junit.jupiter.params.provider.Arguments.arguments; import java.io.File; import java.io.IOException; +import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.UUID; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -25,6 +29,7 @@ import org.gradle.testkit.runner.BuildTask; import org.gradle.testkit.runner.GradleRunner; import org.gradle.testkit.runner.TaskOutcome; +import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.io.TempDir; import org.junit.jupiter.params.ParameterizedTest; @@ -33,13 +38,122 @@ @ExtendWith(SoftAssertionsExtension.class) public class CachingTest { + private static final Map ALL_SUCCESS = Map.of( + ":quarkusGenerateCode", TaskOutcome.SUCCESS, + ":quarkusGenerateCodeDev", TaskOutcome.SUCCESS, + ":quarkusGenerateCodeTests", TaskOutcome.SUCCESS, + ":quarkusAppPartsBuild", TaskOutcome.SUCCESS, + ":quarkusDependenciesBuild", TaskOutcome.SUCCESS, + ":quarkusBuild", TaskOutcome.SUCCESS, + ":build", TaskOutcome.SUCCESS); + private static final Map ALL_UP_TO_DATE = Map.of( + ":quarkusGenerateCode", TaskOutcome.UP_TO_DATE, + // intentionally omit ":quarkusGenerateCodeDev", it can be UP_TO_DATE or SUCCESS + ":quarkusGenerateCodeTests", TaskOutcome.UP_TO_DATE, + ":quarkusAppPartsBuild", TaskOutcome.UP_TO_DATE, + ":quarkusDependenciesBuild", TaskOutcome.UP_TO_DATE, + ":quarkusBuild", TaskOutcome.UP_TO_DATE, + ":build", TaskOutcome.UP_TO_DATE); + public static final Map FROM_CACHE = Map.of( + ":quarkusGenerateCode", TaskOutcome.FROM_CACHE, + ":quarkusGenerateCodeDev", TaskOutcome.SUCCESS, + ":quarkusGenerateCodeTests", TaskOutcome.FROM_CACHE, + ":quarkusAppPartsBuild", TaskOutcome.FROM_CACHE, + ":quarkusDependenciesBuild", TaskOutcome.SUCCESS, + ":quarkusBuild", TaskOutcome.SUCCESS, + ":build", TaskOutcome.SUCCESS); + @InjectSoftAssertions SoftAssertions soft; @TempDir Path testProjectDir; - @TempDir - Path saveDir; + + @Test + void envChangeInvalidatesBuild() throws Exception { + // Declare the environment variables FOO_ENV_VAR and FROM_DOT_ENV_FILE as relevant for the build. + prepareGradleBuildProject(String.join("\n", + "cachingRelevantProperties.add(\"FOO_ENV_VAR\")", + "cachingRelevantProperties.add(\"FROM_DOT_ENV_FILE\")")); + + String[] arguments = List.of("build", "--info", "--stacktrace", "--build-cache", "--configuration-cache", + "-Dquarkus.package.type=fast-jar", + "-Dquarkus.randomized.value=" + UUID.randomUUID()) + .toArray(new String[0]); + + Map env = Map.of(); + + assertBuildResult("initial", gradleBuild(rerunTasks(arguments), env), ALL_SUCCESS); + assertBuildResult("initial rebuild", gradleBuild(arguments, env), ALL_UP_TO_DATE); + + // Change the relevant environment, must rebuild + env = Map.of("FOO_ENV_VAR", "some-value"); + assertBuildResult("set FOO_ENV_VAR", gradleBuild(arguments, env), ALL_SUCCESS); + assertBuildResult("set FOO_ENV_VAR rebuild", gradleBuild(arguments, env), ALL_UP_TO_DATE); + + // Change the environment file again, must rebuild + env = Map.of("FOO_ENV_VAR", "some-other-value"); + assertBuildResult("change FOO_ENV_VAR", gradleBuild(arguments, env), ALL_SUCCESS); + assertBuildResult("change FOO_ENV_VAR rebuild", gradleBuild(arguments, env), ALL_UP_TO_DATE); + + // Change an unrelated environment variable, all up-to-date + env = Map.of("SOME_UNRELATED", "meep"); + assertBuildResult("SOME_UNRELATED", gradleBuild(arguments, env), FROM_CACHE); + } + + @Test + void dotEnvChangeInvalidatesBuild() throws Exception { + var dotEnvFile = Paths.get(System.getProperty("user.dir"), ".env"); + // If the local environment has a ~/.env file, then skip this test - do not mess up a user's environment. + assumeThat(dotEnvFile) + .describedAs("Gradle plugin CachingTest.dotEnvChangeInvalidatesBuild requires missing ~/.env file"); + + try { + // Declare the environment variables FOO_ENV_VAR and FROM_DOT_ENV_FILE as relevant for the build. + prepareGradleBuildProject(String.join("\n", + "cachingRelevantProperties.add(\"FOO_ENV_VAR\")", + "cachingRelevantProperties.add(\"FROM_DOT_ENV_FILE\")")); + + String[] arguments = List.of("build", "--info", "--stacktrace", "--build-cache", "--configuration-cache", + "-Dquarkus.package.type=fast-jar", + "-Dquarkus.randomized.value=" + UUID.randomUUID()) + .toArray(new String[0]); + + Map env = Map.of(); + + assertBuildResult("initial", gradleBuild(rerunTasks(arguments), env), ALL_SUCCESS); + assertBuildResult("initial rebuild", gradleBuild(arguments, env), ALL_UP_TO_DATE); + + // Change the .env file, must rebuild + + Files.write(dotEnvFile, List.of("FROM_DOT_ENV_FILE=env file value")); + assertBuildResult("set FROM_DOT_ENV_FILE", gradleBuild(arguments, env), ALL_SUCCESS); + assertBuildResult("set FROM_DOT_ENV_FILE rebuild", gradleBuild(arguments, env), ALL_UP_TO_DATE); + + // Change the .env file again, must rebuild + + Files.write(dotEnvFile, List.of("FROM_DOT_ENV_FILE=new value")); + assertBuildResult("change FROM_DOT_ENV_FILE", gradleBuild(arguments, env), ALL_SUCCESS); + assertBuildResult("change FROM_DOT_ENV_FILE rebuild", gradleBuild(arguments, env), ALL_UP_TO_DATE); + + // OTHER_ENV_VAR is not declared as relevant for the build, skipping its check + Files.write(dotEnvFile, List.of("FROM_DOT_ENV_FILE=new value", "OTHER_ENV_VAR=hello")); + assertBuildResult("OTHER_ENV_VAR", gradleBuild(arguments, env), ALL_UP_TO_DATE); + + // remove relevant var from .env file + Files.write(dotEnvFile, List.of("OTHER_ENV_VAR=hello")); + assertBuildResult("remove FROM_DOT_ENV_FILE", gradleBuild(arguments, env), FROM_CACHE); + + // Delete the .env file, must rebuild + + Files.deleteIfExists(dotEnvFile); + + BuildResult result = gradleBuild(arguments, env); + assertBuildResult("delete .env file", result, ALL_UP_TO_DATE); + } finally { + Files.deleteIfExists(dotEnvFile); + } + } static Stream gradleCaching() { return Stream.of("fast-jar", "uber-jar", "mutable-jar", "legacy-jar", "native-sources") @@ -50,12 +164,8 @@ static Stream gradleCaching() { @ParameterizedTest @MethodSource - void gradleCaching(String packageType, boolean simulateCI, String outputDir) throws Exception { - URL url = getClass().getClassLoader().getResource("io/quarkus/gradle/tasks/caching/main"); - - FileUtils.copyDirectory(new File(url.toURI()), testProjectDir.toFile()); - - FileUtils.copyFile(new File("../gradle.properties"), testProjectDir.resolve("gradle.properties").toFile()); + void gradleCaching(String packageType, boolean simulateCI, String outputDir, @TempDir Path saveDir) throws Exception { + prepareGradleBuildProject(""); Map env = simulateCI ? Map.of("CI", "yes") : Map.of(); @@ -66,46 +176,9 @@ void gradleCaching(String packageType, boolean simulateCI, String outputDir) thr args.add("-Dquarkus.package.outputDirectory=" + outputDir); } String[] arguments = args.toArray(new String[0]); - args.add("--rerun-tasks"); - String[] initialArguments = args.toArray(new String[0]); - BuildResult result = GradleRunner.create() - .withPluginClasspath() - .withProjectDir(testProjectDir.toFile()) - .withArguments(initialArguments) - .withEnvironment(env) - .build(); - Map taskResults = taskResults(result); - - soft.assertThat(taskResults) - .describedAs("output: %s", result.getOutput()) - .containsEntry(":quarkusGenerateCode", TaskOutcome.SUCCESS) - .containsEntry(":quarkusGenerateCodeDev", TaskOutcome.SUCCESS) - .containsEntry(":quarkusGenerateCodeTests", TaskOutcome.SUCCESS) - .containsEntry(":quarkusAppPartsBuild", TaskOutcome.SUCCESS) - .containsEntry(":quarkusDependenciesBuild", TaskOutcome.SUCCESS) - .containsEntry(":quarkusBuild", TaskOutcome.SUCCESS) - .containsEntry(":build", TaskOutcome.SUCCESS); - - // A follow-up 'build' does nothing, everything's up-to-date - - result = GradleRunner.create() - .withPluginClasspath() - .withProjectDir(testProjectDir.toFile()) - .withArguments(arguments) - .withEnvironment(env) - .build(); - taskResults = taskResults(result); - - soft.assertThat(taskResults) - .describedAs("output: %s", result.getOutput()) - .containsEntry(":quarkusGenerateCode", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusGenerateCodeDev", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusGenerateCodeTests", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusAppPartsBuild", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusDependenciesBuild", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusBuild", TaskOutcome.UP_TO_DATE) - .containsEntry(":build", TaskOutcome.UP_TO_DATE); + assertBuildResult("initial", gradleBuild(rerunTasks(arguments), env), ALL_SUCCESS); + assertBuildResult("initial rebuild", gradleBuild(arguments, env), ALL_UP_TO_DATE); // Purge the whole build/ directory @@ -118,13 +191,8 @@ void gradleCaching(String packageType, boolean simulateCI, String outputDir) thr // A follow-up 'build', without a build/ directory should fetch everything from the cache / pull the dependencies - result = GradleRunner.create() - .withPluginClasspath() - .withProjectDir(testProjectDir.toFile()) - .withArguments(arguments) - .withEnvironment(env) - .build(); - taskResults = taskResults(result); + BuildResult result = gradleBuild(arguments, env); + Map taskResults = taskResults(result); Path quarkusBuildGen = Paths.get("quarkus-build", "gen"); boolean isFastJar = "fast-jar".equals(packageType); @@ -145,23 +213,44 @@ void gradleCaching(String packageType, boolean simulateCI, String outputDir) thr // A follow-up 'build' does nothing, everything's up-to-date - result = GradleRunner.create() + result = gradleBuild(arguments, env); + assertBuildResult("follow-up", result, ALL_UP_TO_DATE); + } + + private static String[] rerunTasks(String[] arguments) { + String[] args = Arrays.copyOf(arguments, arguments.length + 1); + args[arguments.length] = "--rerun-tasks"; + return args; + } + + private BuildResult gradleBuild(String[] arguments, Map env) { + return GradleRunner.create() .withPluginClasspath() .withProjectDir(testProjectDir.toFile()) .withArguments(arguments) .withEnvironment(env) .build(); - taskResults = taskResults(result); + } + private void assertBuildResult(String step, BuildResult result, + Map expected) { + Map taskResults = taskResults(result); soft.assertThat(taskResults) - .describedAs("output: %s", result.getOutput()) - .containsEntry(":quarkusGenerateCode", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusGenerateCodeDev", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusGenerateCodeTests", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusAppPartsBuild", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusDependenciesBuild", TaskOutcome.UP_TO_DATE) - .containsEntry(":quarkusBuild", TaskOutcome.UP_TO_DATE) - .containsEntry(":build", TaskOutcome.UP_TO_DATE); + .describedAs("output: %s\n\nSTEP: %s", result.getOutput(), step) + .containsAllEntriesOf(expected); + } + + private void prepareGradleBuildProject(String additionalQuarkusConfig) throws IOException, URISyntaxException { + URL url = getClass().getClassLoader().getResource("io/quarkus/gradle/tasks/caching/main"); + + FileUtils.copyDirectory(new File(url.toURI()), testProjectDir.toFile()); + + // Randomize the build script + String buildScript = Files.readString(testProjectDir.resolve("build.gradle.kts")); + buildScript = buildScript.replace("// ADDITIONAL_CONFIG", additionalQuarkusConfig); + Files.writeString(testProjectDir.resolve("build.gradle.kts"), buildScript); + + FileUtils.copyFile(new File("../gradle.properties"), testProjectDir.resolve("gradle.properties").toFile()); } static Map taskResults(BuildResult result) { diff --git a/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/caching/main/build.gradle.kts b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/caching/main/build.gradle.kts index 8618797d16203..8fb484c22a651 100644 --- a/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/caching/main/build.gradle.kts +++ b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/caching/main/build.gradle.kts @@ -25,4 +25,7 @@ quarkus { manifest { attributes(mapOf("Manifest-Attribute" to "some-value")) } + + // The following line is replaced by the tests in `CachingTest` + // ADDITIONAL_CONFIG } diff --git a/devtools/gradle/gradle/libs.versions.toml b/devtools/gradle/gradle/libs.versions.toml index 524d9ee6dec99..20615026e8be0 100644 --- a/devtools/gradle/gradle/libs.versions.toml +++ b/devtools/gradle/gradle/libs.versions.toml @@ -1,5 +1,5 @@ [versions] -plugin-publish = "1.2.0" +plugin-publish = "1.2.1" # updating Kotlin here makes QuarkusPluginTest > shouldNotFailOnProjectDependenciesWithoutMain(Path) fail kotlin = "1.8.10" diff --git a/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java index 2ae61b88271c5..63f8e4aac717a 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java @@ -185,7 +185,9 @@ private MavenArtifactResolver artifactResolver(QuarkusBootstrapMojo mojo, Launch .setUserSettings(mojo.mavenSession().getRequest().getUserSettingsFile()) .setCurrentProject(mojo.mavenProject().getFile().toString()) .setPreferPomsFromWorkspace(true) - .setProjectModelProvider(getProjectMap(mojo.mavenSession())::get)); + .setProjectModelProvider(getProjectMap(mojo.mavenSession())::get) + // pass the repositories since Maven extensions could manipulate repository configs + .setRemoteRepositories(mojo.remoteRepositories())); } // PROD packaging mode with workspace discovery disabled return MavenArtifactResolver.builder() diff --git a/devtools/maven/src/main/java/io/quarkus/maven/TrackConfigChangesMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/TrackConfigChangesMojo.java index b8b43e94db09b..13f4379079981 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/TrackConfigChangesMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/TrackConfigChangesMojo.java @@ -1,16 +1,11 @@ package io.quarkus.maven; import java.io.BufferedReader; -import java.io.BufferedWriter; import java.io.File; import java.io.IOException; -import java.io.UncheckedIOException; import java.lang.reflect.Method; import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; import java.util.Properties; import org.apache.maven.plugin.MojoExecutionException; @@ -23,7 +18,6 @@ import io.quarkus.bootstrap.app.CuratedApplication; import io.quarkus.bootstrap.classloading.QuarkusClassLoader; import io.quarkus.bootstrap.model.ApplicationModel; -import io.quarkus.deployment.configuration.tracker.ConfigTrackingWriter; import io.quarkus.runtime.LaunchMode; /** @@ -58,6 +52,12 @@ public class TrackConfigChangesMojo extends QuarkusBootstrapMojo { @Parameter(property = "quarkus.recorded-build-config.file", required = false) File recordedBuildConfigFile; + /** + * Whether to dump the current build configuration in case the configuration from the previous build isn't found + */ + @Parameter(defaultValue = "false", property = "quarkus.track-config-changes.dump-current-when-recorded-unavailable") + boolean dumpCurrentWhenRecordedUnavailable; + @Override protected boolean beforeExecute() throws MojoExecutionException, MojoFailureException { if (skip) { @@ -102,16 +102,17 @@ protected void doExecute() throws MojoExecutionException, MojoFailureException { compareFile = recordedBuildConfigDirectory.toPath().resolve(this.recordedBuildConfigFile.toPath()); } - if (!Files.exists(compareFile)) { + final Properties compareProps = new Properties(); + if (Files.exists(compareFile)) { + try (BufferedReader reader = Files.newBufferedReader(compareFile)) { + compareProps.load(reader); + } catch (IOException e) { + throw new RuntimeException("Failed to read " + compareFile, e); + } + } else if (!dumpCurrentWhenRecordedUnavailable) { getLog().info(compareFile + " not found"); return; } - final Properties compareProps = new Properties(); - try (BufferedReader reader = Files.newBufferedReader(compareFile)) { - compareProps.load(reader); - } catch (IOException e) { - throw new RuntimeException("Failed to read " + compareFile, e); - } CuratedApplication curatedApplication = null; QuarkusClassLoader deploymentClassLoader = null; @@ -124,11 +125,11 @@ protected void doExecute() throws MojoExecutionException, MojoFailureException { Thread.currentThread().setContextClassLoader(deploymentClassLoader); final Class codeGenerator = deploymentClassLoader.loadClass("io.quarkus.deployment.CodeGenerator"); - final Method dumpConfig = codeGenerator.getMethod("readCurrentConfigValues", ApplicationModel.class, String.class, - Properties.class, QuarkusClassLoader.class, Properties.class); - actualProps = (Properties) dumpConfig.invoke(null, curatedApplication.getApplicationModel(), + final Method dumpConfig = codeGenerator.getMethod("dumpCurrentConfigValues", ApplicationModel.class, String.class, + Properties.class, QuarkusClassLoader.class, Properties.class, Path.class); + dumpConfig.invoke(null, curatedApplication.getApplicationModel(), launchMode.name(), getBuildSystemProperties(true), - deploymentClassLoader, compareProps); + deploymentClassLoader, compareProps, targetFile); } catch (Exception any) { throw new MojoExecutionException("Failed to bootstrap Quarkus application", any); } finally { @@ -140,24 +141,5 @@ protected void doExecute() throws MojoExecutionException, MojoFailureException { deploymentClassLoader.close(); } } - - final List names = new ArrayList<>(actualProps.stringPropertyNames()); - Collections.sort(names); - - final Path outputDir = targetFile.getParent(); - if (outputDir != null && !Files.exists(outputDir)) { - try { - Files.createDirectories(outputDir); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - try (BufferedWriter writer = Files.newBufferedWriter(targetFile)) { - for (var name : names) { - ConfigTrackingWriter.write(writer, name, actualProps.getProperty(name)); - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } } } diff --git a/devtools/maven/src/main/java/io/quarkus/maven/UpdateMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/UpdateMojo.java index 113ca35670cdb..4bc790ec24f7f 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/UpdateMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/UpdateMojo.java @@ -16,6 +16,7 @@ import io.quarkus.devtools.commands.data.QuarkusCommandOutcome; import io.quarkus.devtools.project.QuarkusProject; import io.quarkus.devtools.project.QuarkusProjectHelper; +import io.quarkus.devtools.project.update.rewrite.QuarkusUpdateExitErrorException; import io.quarkus.maven.dependency.ArtifactCoords; import io.quarkus.registry.RegistryResolutionException; import io.quarkus.registry.catalog.ExtensionCatalog; @@ -129,10 +130,12 @@ protected void processProjectState(QuarkusProject quarkusProject) throws MojoExe final QuarkusCommandOutcome result = invoker.execute(); if (!result.isSuccess()) { throw new MojoExecutionException( - "The command did not succeed."); + "Failed to apply the updates."); } + } catch (QuarkusUpdateExitErrorException e) { + throw new MojoExecutionException(e.getMessage()); } catch (QuarkusCommandException e) { - throw new MojoExecutionException("Failed to resolve the available updates", e); + throw new MojoExecutionException("Failed to apply the updates", e); } } diff --git a/docs/downstreamdoc.yaml b/docs/downstreamdoc.yaml index 41c96ef42024d..d0c1f3370ea3c 100644 --- a/docs/downstreamdoc.yaml +++ b/docs/downstreamdoc.yaml @@ -1,6 +1,5 @@ guides: - src/main/asciidoc/datasource.adoc - - src/main/asciidoc/deploying-to-openshift.adoc - src/main/asciidoc/logging.adoc - src/main/asciidoc/security-architecture.adoc - src/main/asciidoc/security-authentication-mechanisms.adoc diff --git a/docs/pom.xml b/docs/pom.xml index 46b84ec333121..7177ab511802c 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -3054,6 +3054,25 @@ + + assemble-downstream-doc + prepare-package + + + exec + + + java + + -classpath + + io.quarkus.docs.generation.AssembleDownstreamDocumentation + + ${project.basedir} + + diff --git a/docs/src/main/asciidoc/amqp-reference.adoc b/docs/src/main/asciidoc/amqp-reference.adoc index 7da796b9f759c..0bc1cef14800f 100644 --- a/docs/src/main/asciidoc/amqp-reference.adoc +++ b/docs/src/main/asciidoc/amqp-reference.adoc @@ -817,3 +817,18 @@ Type: _string_ | false | Type: _string_ | false | |=== + +=== Conditionally configure channels + +You can configure the channels using a specific profile. +Thus, the channels are only configured (and added to the application) when the specified profile is enabled. + +To achieve this, you need: + +1. Prefix the `mp.messaging.[incoming|outgoing].$channel` entries with `%my-profile` such as `%my-profile.mp.messaging.[incoming|outgoing].$channel.key=value` +2. Use the `@IfBuildProfile("my-profile")` on the CDI beans containing `@Incoming(channel)` and `@Outgoing(channel)` annotations that need only to be enabled when the profile is enabled. + +Note that reactive messaging verifies that the graph is complete. +So, when using such a conditional configuration, ensure the application works with and without the profile enabled. + +Note that this approach can also be used to change the channel configuration based on a profile. diff --git a/docs/src/main/asciidoc/aws-lambda-http.adoc b/docs/src/main/asciidoc/aws-lambda-http.adoc index b57bd431576bd..f94a0927f1e29 100644 --- a/docs/src/main/asciidoc/aws-lambda-http.adoc +++ b/docs/src/main/asciidoc/aws-lambda-http.adoc @@ -170,6 +170,13 @@ quarkus.lambda.mock-event-server.test-port=8083 A port value of zero will result in a randomly assigned port. +To turn off the mock event server: +[source, subs=attributes+] +---- +quarkus.lambda.mock-event-server.enabled=false +---- + + == Simulate AWS Lambda Deployment with SAM CLI The AWS SAM CLI allows you to run your lambda's locally on your laptop in a simulated Lambda environment. This requires Docker to be installed. diff --git a/docs/src/main/asciidoc/aws-lambda.adoc b/docs/src/main/asciidoc/aws-lambda.adoc index 4ea510bac9245..05ebc142f0bc6 100644 --- a/docs/src/main/asciidoc/aws-lambda.adoc +++ b/docs/src/main/asciidoc/aws-lambda.adoc @@ -362,6 +362,13 @@ quarkus.lambda.mock-event-server.test-port=8083 A port value of zero will result in a randomly assigned port. +To turn off the mock event server: +[source, subs=attributes+] +---- +quarkus.lambda.mock-event-server.enabled=false +---- + + == Testing with the SAM CLI If you do not want to use the mock event server, you can test your lambdas with SAM CLI. diff --git a/docs/src/main/asciidoc/building-native-image.adoc b/docs/src/main/asciidoc/building-native-image.adoc index 37c83b2ef2dfd..89065b22499fb 100644 --- a/docs/src/main/asciidoc/building-native-image.adoc +++ b/docs/src/main/asciidoc/building-native-image.adoc @@ -204,9 +204,9 @@ include::{includes}/devtools/build-native.adoc[] [NOTE] .Issues with packaging on Windows ==== -The Microsoft Native Tools for Visual Studio must first be initialized before packaging. You can do this by starting -the `x64 Native Tools Command Prompt` that was installed with the Visual Studio Build Tools. At -`x64 Native Tools Command Prompt` you can navigate to your project folder and run `./mvnw package -Dnative`. +The Microsoft Native Tools for Visual Studio must first be initialized before packaging. +You can do this by starting the `x64 Native Tools Command Prompt` that was installed with the Visual Studio Build Tools. +At the `x64 Native Tools Command Prompt`, you can navigate to your project folder and run `./mvnw package -Dnative`. Another solution is to write a script to do this for you: @@ -233,7 +233,7 @@ You can do so by prepending the flag with `-J` and passing it as additional nati IMPORTANT: Fully static native executables support is experimental. On Linux it's possible to package a native executable that doesn't depend on any system shared library. -There are https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/guides/build-static-executables/#prerequisites-and-preparation[some system requirements] to be fulfilled and additional build arguments to be used along with the `native-image` invocation, a minimum is `-Dquarkus.native.additional-build-args="--static","--libc=musl"`. +There are link:https://www.graalvm.org/latest/reference-manual/native-image/guides/build-static-executables/#prerequisites-and-preparation[some system requirements] to be fulfilled and additional build arguments to be used along with the `native-image` invocation, a minimum is `-Dquarkus.native.additional-build-args="--static","--libc=musl"`. Compiling fully static binaries is done by statically linking https://musl.libc.org/[musl] instead of `glibc` and should not be used in production without rigorous testing. diff --git a/docs/src/main/asciidoc/cdi-reference.adoc b/docs/src/main/asciidoc/cdi-reference.adoc index f91f1f401797c..45165171358b9 100644 --- a/docs/src/main/asciidoc/cdi-reference.adoc +++ b/docs/src/main/asciidoc/cdi-reference.adoc @@ -160,7 +160,7 @@ quarkus.arc.exclude-dependency.acme.artifact-id=acme-services <2> == Native Executables and Private Members Quarkus is using GraalVM to build a native executable. -One of the limitations of GraalVM is the usage of https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/Reflection/[Reflection, window="_blank"]. +One of the limitations of GraalVM is the usage of link:https://www.graalvm.org/latest/reference-manual/native-image/Reflection/[Reflection, window="_blank"]. Reflective operations are supported but all relevant members must be registered for reflection explicitly. Those registrations result in a bigger native executable. diff --git a/docs/src/main/asciidoc/config-reference.adoc b/docs/src/main/asciidoc/config-reference.adoc index 3e7308792d2c8..811be77c45b3b 100644 --- a/docs/src/main/asciidoc/config-reference.adoc +++ b/docs/src/main/asciidoc/config-reference.adoc @@ -692,6 +692,18 @@ Maven projects could add the following goal to their `quarkus-maven-plugin` conf The `track-config-changes` goal looks for `${project.basedir}/.quarkus/quarkus-prod-config-dump` (file name and directory are configurable) and, if the file already exists, checks whether the values stored in the config dump have changed. It will log the changed options and save the current values of each of the options present in `${project.basedir}/.quarkus/quarkus-prod-config-dump` in `${project.basedir}/target/quarkus-prod-config.check` (the target file name and location can be configured). If the build time configuration has not changed since the last build both `${project.basedir}/.quarkus/quarkus-prod-config-dump` and `${project.basedir}/.quarkus/quarkus-prod-config-dump` will be identical. +==== Dump current build configuration when the recorded configuration isn't found + +By default, `track-config-changes` looks for the configuration recorded during previous build and does nothing if it's not found. Enabling `dumpCurrentWhenRecordedUnavailable` parameter will make it dump the current build configuration +options taking into account `quarkus.config-tracking.*` configuration. + +[IMPORTANT] +==== +Unlike the build configuration options recorded during the `quarkus:build` goal, configuration options saved by `quarkus:track-config-changes` with `dumpCurrentWhenRecordedUnavailable` enabled will include all the build configuration +options exposed by a `org.eclipse.microprofile.config.Config` instance. Which means this report may include some build configuration options that will not be used by the Quarkus application build process but also may be missing some +build configuration options since MicroProfile Config specification allows configuration sources not to expose all the property names they provide to users. +==== + [[additional-information]] == Additional Information diff --git a/docs/src/main/asciidoc/deploying-to-google-cloud.adoc b/docs/src/main/asciidoc/deploying-to-google-cloud.adoc index 7a2cc124955f7..9bc36ffa2f090 100644 --- a/docs/src/main/asciidoc/deploying-to-google-cloud.adoc +++ b/docs/src/main/asciidoc/deploying-to-google-cloud.adoc @@ -248,23 +248,19 @@ Finally, you need to configure your datasource specifically to use the socket fa [source, properties] ---- -quarkus.datasource.db-kind=other <1> -quarkus.datasource.jdbc.url=jdbc:postgresql:///mydatabase <2> +quarkus.datasource.db-kind=postgresql +quarkus.datasource.jdbc.url=jdbc:postgresql:///mydatabase <1> quarkus.datasource.jdbc.driver=org.postgresql.Driver quarkus.datasource.username=quarkus quarkus.datasource.password=quarkus -quarkus.datasource.jdbc.additional-jdbc-properties.cloudSqlInstance=project-id:gcp-region:instance <3> -quarkus.datasource.jdbc.additional-jdbc-properties.socketFactory=com.google.cloud.sql.postgres.SocketFactory <4> +quarkus.datasource.jdbc.additional-jdbc-properties.cloudSqlInstance=project-id:gcp-region:instance <2> +quarkus.datasource.jdbc.additional-jdbc-properties.socketFactory=com.google.cloud.sql.postgres.SocketFactory <3> ---- -<1> Database kind must be 'other' as we need to skip Quarkus auto-configuration. -<2> The JDBC URL should not include the hostname / IP of the database. -<3> We add the `cloudSqlInstance` additional JDBC property to configure the instance id. -<4> We add the `socketFactory` additional JDBC property to configure the socket factory used to connect to Cloud SQL, +<1> The JDBC URL should not include the hostname / IP of the database. +<2> We add the `cloudSqlInstance` additional JDBC property to configure the instance id. +<3> We add the `socketFactory` additional JDBC property to configure the socket factory used to connect to Cloud SQL, this one is coming from the `postgres-socket-factory` dependency. -NOTE: If you use Hibernate ORM, you also need to configure `quarkus.hibernate-orm.dialect=org.hibernate.dialect.PostgreSQL10Dialect` -as Hibernate ORM would not be able to automatically detect the dialect of your database. - WARNING: Using a PostgreSQL socket factory is not possible in dev mode at the moment due to issue link:https://github.com/quarkusio/quarkus/issues/15782[#15782]. diff --git a/docs/src/main/asciidoc/gradle-tooling.adoc b/docs/src/main/asciidoc/gradle-tooling.adoc index 1a01ce58980bd..8d58477cb0ef5 100644 --- a/docs/src/main/asciidoc/gradle-tooling.adoc +++ b/docs/src/main/asciidoc/gradle-tooling.adoc @@ -624,7 +624,7 @@ The Quarkus build uses the `prod` configuration profile: 3. Configuration via the `quarkus` extensions's `quarkusBuildProperties` For example: quarkus { - properties { + quarkusBuildProperties { set("package.type", "uber-jar") } } @@ -650,6 +650,31 @@ specify the `--save-config-properties` command line option, the configuration pr `build/.quarkus-build.properties`. +=== Gradle caching / task inputs + +By default, system properties starting with `quarkus.` and environment variables, including those from `~/.env`, +starting with `QUARKUS_`, are considered as inputs for the Gradle tasks. This means that only changes to those system +properties or environment variables will cause Gradle's up-to-date to trigger a rebuild. Changes to other system +properties or environment variables do not change Quarkus' Gradle task inputs and do not trigger an unnecessary rebuild. + +Configuration properties specified via `quarkus.quarkusBuildProperties` or via the Quarkus `application.*` +configuration files are all considered as Gradle task inputs, in other words: every change in these files causes +a rebuild. + +If your Quarkus build references system properties that do not start with `quarkus.` (or environment variables that +do not start with `QUARKUS_`), you must reference those via the Quarkus build extension. For example, if your +`application.properties` file references an environment variable like this: + + greeting.message=${FOO_MESSAGE:Hello!} + +it must be explicitly declared as "caching relevant": + + quarkus { + cachingRelevantProperties.add("FOO_MESSAGE") + // Note: `cachingRelevantProperties` accepts regular expressions + } + + == Build workers Quarkus application builds are ran in isolated processes using Gradle's worker API. This includes the Quarkus diff --git a/docs/src/main/asciidoc/images/keycloak-add-saml-provider.png b/docs/src/main/asciidoc/images/keycloak-add-saml-provider.png new file mode 100644 index 0000000000000..ccf9214a4e24e Binary files /dev/null and b/docs/src/main/asciidoc/images/keycloak-add-saml-provider.png differ diff --git a/docs/src/main/asciidoc/images/keycloak-default-saml-provider.png b/docs/src/main/asciidoc/images/keycloak-default-saml-provider.png new file mode 100644 index 0000000000000..f9b95b5269ef4 Binary files /dev/null and b/docs/src/main/asciidoc/images/keycloak-default-saml-provider.png differ diff --git a/docs/src/main/asciidoc/images/okta-create-saml-integration.png b/docs/src/main/asciidoc/images/okta-create-saml-integration.png new file mode 100644 index 0000000000000..3bbee52f66b62 Binary files /dev/null and b/docs/src/main/asciidoc/images/okta-create-saml-integration.png differ diff --git a/docs/src/main/asciidoc/images/okta-saml-configuration.png b/docs/src/main/asciidoc/images/okta-saml-configuration.png new file mode 100644 index 0000000000000..0124726b6235b Binary files /dev/null and b/docs/src/main/asciidoc/images/okta-saml-configuration.png differ diff --git a/docs/src/main/asciidoc/images/okta-saml-general-settings.png b/docs/src/main/asciidoc/images/okta-saml-general-settings.png new file mode 100644 index 0000000000000..73ffea7fc644e Binary files /dev/null and b/docs/src/main/asciidoc/images/okta-saml-general-settings.png differ diff --git a/docs/src/main/asciidoc/images/okta-saml-metadata.png b/docs/src/main/asciidoc/images/okta-saml-metadata.png new file mode 100644 index 0000000000000..be561ae3abf7a Binary files /dev/null and b/docs/src/main/asciidoc/images/okta-saml-metadata.png differ diff --git a/docs/src/main/asciidoc/images/pulsar-qs-app-screenshot.png b/docs/src/main/asciidoc/images/pulsar-qs-app-screenshot.png new file mode 100644 index 0000000000000..c6e62cbe68318 Binary files /dev/null and b/docs/src/main/asciidoc/images/pulsar-qs-app-screenshot.png differ diff --git a/docs/src/main/asciidoc/images/pulsar-qs-architecture.png b/docs/src/main/asciidoc/images/pulsar-qs-architecture.png new file mode 100644 index 0000000000000..a46da328ac79a Binary files /dev/null and b/docs/src/main/asciidoc/images/pulsar-qs-architecture.png differ diff --git a/docs/src/main/asciidoc/kafka.adoc b/docs/src/main/asciidoc/kafka.adoc index 4e8572bcda44a..d4b7cb33ceb1c 100644 --- a/docs/src/main/asciidoc/kafka.adoc +++ b/docs/src/main/asciidoc/kafka.adoc @@ -2320,6 +2320,21 @@ Attribute values are resolved as follows: 2. if not set, the connector looks for a `Map` with the channel name or the configured `kafka-configuration` (if set) and the value is retrieved from that `Map` 3. If the resolved `Map` does not contain the value the default `Map` is used (exposed with the `default-kafka-broker` name) +=== Conditionally configure channels + +You can configure the channels using a specific profile. +Thus, the channels are only configured (and added to the application) when the specified profile is enabled. + +To achieve this, you need: + +1. Prefix the `mp.messaging.[incoming|outgoing].$channel` entries with `%my-profile` such as `%my-profile.mp.messaging.[incoming|outgoing].$channel.key=value` +2. Use the `@IfBuildProfile("my-profile")` on the CDI beans containing `@Incoming(channel)` and `@Outgoing(channel)` annotations that need only to be enabled when the profile is enabled. + +Note that reactive messaging verifies that the graph is complete. +So, when using such a conditional configuration, ensure the application works with and without the profile enabled. + +Note that this approach can also be used to change the channel configuration based on a profile. + == Integrating with Kafka - Common patterns === Writing to Kafka from an HTTP endpoint diff --git a/docs/src/main/asciidoc/kubernetes-client.adoc b/docs/src/main/asciidoc/kubernetes-client.adoc index 20e6f2df149b9..ca75c11fc8b91 100644 --- a/docs/src/main/asciidoc/kubernetes-client.adoc +++ b/docs/src/main/asciidoc/kubernetes-client.adoc @@ -163,6 +163,8 @@ public class KubernetesClientTest { @KubernetesTestServer KubernetesServer mockServer; + @Inject + KubernetesClient client; @BeforeEach public void before() { @@ -170,8 +172,8 @@ public class KubernetesClientTest { final Pod pod2 = new PodBuilder().withNewMetadata().withName("pod2").withNamespace("test").and().build(); // Set up Kubernetes so that our "pretend" pods are created - mockServer.getClient().pods().create(pod1); - mockServer.getClient().pods().create(pod2); + client.pods().resource(pod1).create(); + client.pods().resource(pod2).create(); } @Test diff --git a/docs/src/main/asciidoc/native-and-ssl.adoc b/docs/src/main/asciidoc/native-and-ssl.adoc index fe7a020991ebc..2a0f744fef173 100644 --- a/docs/src/main/asciidoc/native-and-ssl.adoc +++ b/docs/src/main/asciidoc/native-and-ssl.adoc @@ -253,7 +253,8 @@ The file containing the custom TrustStore does *not* (and probably should not) h === Run time configuration -Using the runtime certificate configuration, supported by GraalVM since 21.3 does not require any special or additional configuration compared to regular java programs or Quarkus in jvm mode. See the https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/dynamic-features/CertificateManagement/#runtime-options[GraalVM documentation] for more information. +Using the runtime certificate configuration, supported by GraalVM since 21.3 does not require any special or additional configuration compared to regular java programs or Quarkus in jvm mode. +For more information, see the link:https://www.graalvm.org/latest/reference-manual/native-image/dynamic-features/CertificateManagement/#runtime-options[Runtime Options] section of the "GraalVM Certificate Management in Native Image" guide. [#working-with-containers] === Working with containers diff --git a/docs/src/main/asciidoc/native-reference.adoc b/docs/src/main/asciidoc/native-reference.adoc index d5558350a09e5..43021f2c9b709 100644 --- a/docs/src/main/asciidoc/native-reference.adoc +++ b/docs/src/main/asciidoc/native-reference.adoc @@ -24,8 +24,9 @@ These are the high level sections to be found in this guide: [[native-memory-management]] == Native Memory Management Memory management for Quarkus native executables is enabled by GraalVM’s SubstrateVM runtime system. -The memory management component in GraalVM is explained in detail -link:https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/optimizations-and-performance/MemoryManagement[here]. + +For detailed explanations about the memory management component in GraalVM, see the link:https://www.graalvm.org/latest/reference-manual/native-image/optimizations-and-performance/MemoryManagement[GraalVM Memory Management] guide. + This guide complements the information available in the GraalVM website with further observations particularly relevant to Quarkus applications. === Garbage Collectors @@ -95,8 +96,7 @@ To build Quarkus native with epsilon GC, pass the following argument at build ti ---- === Memory Management Options -Options to control maximum heap size, young space and other typical use cases found in the JVM can be found in -https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/optimizations-and-performance/MemoryManagement[the GraalVM memory management guide]. +For information about options to control maximum heap size, young space, and other typical use cases found in the JVM, see the link:https://www.graalvm.org/latest/reference-manual/native-image/optimizations-and-performance/MemoryManagement[GraalVM Memory Management] guide. Setting the maximum heap size, either as a percentage or an explicit value, is generally recommended. [[gc-logging]] @@ -234,10 +234,7 @@ $ ps -o pid,rss,command -p $(pidof code-with-quarkus-1.0.0-SNAPSHOT-runner) How come this Quarkus native executable consumes ~35MB RSS on startup? To get an understanding of this number, this section will use `perf` to trace calls to `syscalls:sys_enter_mmap`. -Assuming the default -https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/MemoryManagement/#serial-garbage-collector[Serial Garbage Collector] -is in use, -this system call is particularly interesting for native executables generated by GraalVM's `native-image` because of how it allocates heap. +Assuming the default link:https://www.graalvm.org/latest/reference-manual/native-image/optimizations-and-performance/MemoryManagement/#serial-garbage-collector[GraalVM Serial Garbage Collector] is in use, this system call is particularly interesting for native executables generated by GraalVM's `native-image` because of how it allocates heap. In native executables generated by GraalVM's `native-image`, the heap is allocated using either aligned or unaligned heap chunks. All non-array objects get allocated in thread local aligned chunks. Each of these are 1MB in size by default. @@ -1865,8 +1862,8 @@ Source directories searched: /data/target/debugging-native-1.0.0-SNAPSHOT-native We can now examine line `169` and get a first hint of what might be wrong (in this case we see that it fails at the first read from src which contains the address `0x0000`), or walk up the stack using `gdb`’s `up` command to see what part of our code led to this situation. -To learn more about using gdb to debug native executables see -https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/debugging-and-diagnostics/DebugInfo/[here]. +For more information about using `gdb` to debug native executables, see the +link:https://www.graalvm.org/latest/reference-manual/native-image/debugging-and-diagnostics/DebugInfo/[GraalVM Debug Info Feature] guide. [[native-faq]] == Frequently Asked Questions @@ -1919,7 +1916,7 @@ In the unfortunate case where the reference causing the issue is made by a 3rd p 2. Add the optional dependency as a non-optional dependency of your project. Note that although option (1) is the best choice performance wise, as it minimizes the applications footprint,it might not be trivial to implement. -To make matters worse, it's also not easy to maintain as it is tightly coupled to the 3rd party library implementation. +To make matters worse, it's also not easy to maintain as it is tightly coupled to the 3rd party library implementation. Option (2) is a straight forward alternative to work around the issue, but comes at the cost of including possibly never invoked code in the resulting native executable. === I get an `OutOfMemoryError` (OOME) building native executables, what can I do? @@ -2188,7 +2185,7 @@ Once the image is compiled, enable and start JFR via runtime flags: `-XX:+Flight -XX:StartFlightRecording="filename=recording.jfr" ---- -For more details on using JFR, see https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/debugging-and-diagnostics/JFR/[here]. +For more information about using JFR, see the link:https://www.graalvm.org/latest/reference-manual/native-image/debugging-and-diagnostics/JFR/[GraalVM JDK Flight Recorder (JFR) with Native Image] guide. === How can we troubleshoot performance problems only reproducible in production? @@ -2260,4 +2257,4 @@ This setting forces the native compilation to use an older instruction set, incr To explicitly define the target architecture run `native-image -march=list` to get the supported configurations and then set `-march` to one of them, e.g., `quarkus.native.additional-build-args=-march=x86-64-v4`. If you are targeting an AMD64 host, `-march=x86-64-v2` would work in most cases. -NOTE: The `march` parameter is only available on GraalVM 23+. \ No newline at end of file +NOTE: The `march` parameter is only available on GraalVM 23+. diff --git a/docs/src/main/asciidoc/opentelemetry.adoc b/docs/src/main/asciidoc/opentelemetry.adoc index 94fe8e1dc8bd6..e2ebd5c910c25 100644 --- a/docs/src/main/asciidoc/opentelemetry.adoc +++ b/docs/src/main/asciidoc/opentelemetry.adoc @@ -301,6 +301,28 @@ extension to be added as a dependency to your project. implementation("io.opentelemetry.contrib:opentelemetry-aws-xray-propagator") ---- +==== Customise Propagator + +To customise the propagation header you can implement the `TextMapPropagatorCustomizer` interface. This can be used, as an example, to restrict propagation of OpenTelemetry trace headers and prevent potentially sensitive data to be sent to third party systems. + +```java +/** + * /** + * Meant to be implemented by a CDI bean that provides arbitrary customization for the TextMapPropagator + * that are to be registered with OpenTelemetry + */ +public interface TextMapPropagatorCustomizer { + + TextMapPropagator customize(Context context); + + interface Context { + TextMapPropagator propagator(); + + ConfigProperties otelConfigProperties(); + } +} +``` + === Resource A https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/overview.md#resources[resource] is a representation of the entity that is producing telemetry, it adds attributes to the exported trace to characterize who is producing the trace. @@ -331,6 +353,10 @@ public class CustomConfiguration { } ---- +==== User data + +By setting `quarkus.otel.traces.eusp.enabled=true` you can add information about the user related to each span. The user's ID and roles will be added to the span attributes, if available. + === Sampler A https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/sdk.md#sampling[sampler] decides whether a trace should be sampled and exported, controlling noise and overhead by reducing the number of sample of traces collected and sent @@ -528,10 +554,42 @@ which retrieves the OpenTelemetry `Context` to extract the current span for prop == Exporters -Quarkus OpenTelemetry defaults to the standard OTLP exporter defined in OpenTelemetry. +=== Default + +The Quarkus OpenTelemetry extension uses its own exporter built on top of Vert.x for optimal performance and maintainability. + +The exporter is automatically wired with CDI, that's why the `quarkus.otel.traces.exporter` property defaults to `cdi`. +The `quarkus.otel.exporter.otlp.traces.protocol` default to `grpc` and `http/protobuf` can also be used. + +=== On Quarkiverse Additional exporters will be available in the Quarkiverse https://github.com/quarkiverse/quarkus-opentelemetry-exporter/blob/main/README.md[quarkus-opentelemetry-exporter] project. +=== OpenTelemetry exporter +The default OpenTelemetry exporter can be used, but it's not recommended because of the additional dependency on the OkHttp library. + +It can be activated by setting `quarkus.otel.traces.exporter=otlp` and include the following dependencies in your project: + +[source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] +.pom.xml +---- + + io.opentelemetry + opentelemetry-exporter-otlp-common + + + io.opentelemetry + opentelemetry-exporter-otlp + +---- + +[source,gradle,role="secondary asciidoc-tabs-target-sync-gradle"] +.build.gradle +---- +implementation("io.opentelemetry:opentelemetry-exporter-otlp-common") +implementation("io.opentelemetry:opentelemetry-exporter-otlp") +---- + [[configuration-reference]] == OpenTelemetry Configuration Reference diff --git a/docs/src/main/asciidoc/pulsar-getting-started.adoc b/docs/src/main/asciidoc/pulsar-getting-started.adoc index 0631cad49a006..22525f7a75a80 100644 --- a/docs/src/main/asciidoc/pulsar-getting-started.adoc +++ b/docs/src/main/asciidoc/pulsar-getting-started.adoc @@ -21,15 +21,13 @@ In this guide, we are going to develop two applications communicating with Pulsa The first application sends a _quote request_ to Pulsar and consumes Pulsar messages from the _quote_ topic. The second application receives the _quote request_ and sends a _quote_ back. -// TODO pulsar -image::kafka-qs-architecture.png[alt=Architecture, align=center] +image::pulsar-qs-architecture.png[alt=Architecture, align=center] The first application, the _producer_, will let the user request some quotes over an HTTP endpoint. For each quote request a random identifier is generated and returned to the user, to mark the quote request as _pending_. At the same time, the generated request id is sent over a Pulsar topic `quote-requests`. -// TODO pulsar -image::kafka-qs-app-screenshot.png[alt=Producer App UI, align=center] +image::pulsar-qs-app-screenshot.png[alt=Producer App UI, align=center] The second application, the _processor_, will read from the `quote-requests` topic, put a random price to the quote, and send it to a Pulsar topic named `quotes`. @@ -314,7 +312,6 @@ It will also generate a deserializer for the `Quote` class. In this example we used JSON Schema with Pulsar messages. For more options on Pulsar Schemas, see xref:pulsar.adoc#pulsar-schema[Pulsar Reference Guide - Schema]. -// TODO write about schema ==== == The HTML page @@ -398,7 +395,6 @@ Open `http://localhost:8080/quotes.html` in your browser and request some quotes == Running in JVM or Native mode When not running in dev or test mode, you will need to start your Pulsar broker. -// TODO You can follow the instructions from the https://pulsar.apache.org/docs/3.0.x/getting-started-docker/[Run a standalone Pulsar cluster in Docker] or create a `docker-compose.yaml` file with the following content: [source, yaml] diff --git a/docs/src/main/asciidoc/security-authentication-mechanisms.adoc b/docs/src/main/asciidoc/security-authentication-mechanisms.adoc index f03870b451659..5f10459eb9091 100644 --- a/docs/src/main/asciidoc/security-authentication-mechanisms.adoc +++ b/docs/src/main/asciidoc/security-authentication-mechanisms.adoc @@ -197,6 +197,7 @@ For more information about OIDC authentication and authorization methods that yo |OIDC topic |Quarkus information resource |Bearer token authentication mechanism|xref:security-oidc-bearer-token-authentication.adoc[OIDC Bearer token authentication] |Authorization Code Flow authentication mechanism|xref:security-oidc-code-flow-authentication.adoc[OpenID Connect (OIDC) Authorization Code Flow mechanism] +|OIDC and SAML Identity broker|xref:security-oidc-code-flow-authentication.adoc#oidc-saml-broker[OpenID Connect (OIDC) Authorization Code Flow and SAML Identity broker] |Multiple tenants that can support the Bearer token authentication or Authorization Code Flow mechanisms|xref:security-openid-connect-multitenancy.adoc[Using OpenID Connect (OIDC) multi-tenancy] |Securing Quarkus with commonly-used OpenID Connect providers|xref:security-openid-connect-providers.adoc[Configuring well-known OpenID Connect providers] |Using Keycloak to centralize authorization |xref:security-keycloak-authorization.adoc[Using OpenID Connect (OIDC) and Keycloak to centralize authorization] diff --git a/docs/src/main/asciidoc/security-basic-authentication-tutorial.adoc b/docs/src/main/asciidoc/security-basic-authentication-tutorial.adoc index 06e11d4620c6b..1b62aecee6d50 100644 --- a/docs/src/main/asciidoc/security-basic-authentication-tutorial.adoc +++ b/docs/src/main/asciidoc/security-basic-authentication-tutorial.adoc @@ -46,22 +46,9 @@ You can find the solution in the `security-jpa-quickstart` link:{quickstarts-tre :sectnums: :sectnumlevels: 3 -== Create a Maven project +== Create and verify the Maven project For Quarkus Security to be able to map your security source to Jakarta Persistence entities, ensure that the Maven project that is used in this tutorial includes the `security-jpa` or the `security-jpa-reactive` extension. -You can either create a new Maven project with the Security Jakarta Persistence extension or you can add the extension to an existing Maven project. - -* To create the Maven project with Hibernate ORM, use the following command: - -:create-app-artifact-id: security-jpa-quickstart -:create-app-extensions: security-jpa,jdbc-postgresql,resteasy-reactive,hibernate-orm-panache -include::{includes}/devtools/create-app.adoc[] - -* To create the Maven project with Hibernate Reactive, use the following command: - -:create-app-artifact-id: security-jpa-reactive-quickstart -:create-app-extensions: security-jpa-reactive,reactive-pg-client,resteasy-reactive,hibernate-reactive-panache -include::{includes}/devtools/create-app.adoc[] [NOTE] ==== @@ -72,20 +59,49 @@ You must also add your preferred database connector library. The instructions in this example tutorial use a PostgreSQL database for the identity store. ==== -* To add the Security Jakarta Persistence extension to an existing Maven project with Hibernate ORM, run the following command from your project base directory: -:add-extension-extensions: security-jpa -include::{includes}/devtools/extension-add.adoc[] +=== Create the Maven project + +You can either create a new Maven project with the Security Jakarta Persistence extension or you can add the extension to an existing Maven project. You can use either Hibernate ORM or Hibernate Reactive. + +* To create a new Maven project with the Jakarta Persistence extension, complete one of the following steps: +** To create the Maven project with Hibernate ORM, use the following command: ++ +==== +:create-app-artifact-id: security-jpa-quickstart +:create-app-extensions: security-jpa,jdbc-postgresql,resteasy-reactive,hibernate-orm-panache +include::{includes}/devtools/create-app.adoc[] +==== +** To create the Maven project with Hibernate Reactive, use the following command: ++ +==== +:create-app-artifact-id: security-jpa-reactive-quickstart +:create-app-extensions: security-jpa-reactive,reactive-pg-client,resteasy-reactive,hibernate-reactive-panache +include::{includes}/devtools/create-app.adoc[] +==== -* To add the Security Jakarta Persistence extension to an existing Maven project with Hibernate Reactive, run the following command from your project base directory: +* To add the Jakarta Persistence extension to an existing Maven project, complete one of the following steps: +** To add the Security Jakarta Persistence extension to an existing Maven project with Hibernate ORM, run the following command from your project base directory: ++ +==== +:add-extension-extensions: security-jpa +include::{includes}/devtools/extension-add.adoc[] +==== +** To add the Security Jakarta Persistence extension to an existing Maven project with Hibernate Reactive, run the following command from your project base directory: ++ +==== :add-extension-extensions: security-jpa-reactive include::{includes}/devtools/extension-add.adoc[] +==== === Verify the quarkus-security-jpa dependency -After you run either of the preceding commands, verify that the `security-jpa` dependency was added to your project build XML file,as follows: +After you have run either of the preceding commands to create the Maven project, verify that the `security-jpa` dependency was added to your project build XML file. +* To verify the `security-jpa` extension, check for the following configuration: ++ +==== [source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] .pom.xml ---- @@ -100,9 +116,10 @@ After you run either of the preceding commands, verify that the `security-jpa` d ---- implementation("io.quarkus:quarkus-security-jpa") ---- - -Similarly, if you added the `security-jpa-reactive` extension, verify that the correct dependency was added to your project build XML file, as follows: - +==== +* To verify the `security-jpa-reactive` extension, check for the following configuration: ++ +==== [source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] .pom.xml ---- @@ -117,12 +134,16 @@ Similarly, if you added the `security-jpa-reactive` extension, verify that the ---- implementation("io.quarkus:quarkus-security-jpa-reactive") ---- +==== == Write the application -* Let's start by implementing the `/api/public` endpoint to allow all users access to access the application. -Add a regular Jakarta REST resource to your Java source code, as outlined in the following code snippet: +* Secure the API endpoint to determine who can access the application by using one of the following approaches: + +** Implement the `/api/public` endpoint to allow all users access to the application. +Add a regular Jakarta REST resource to your Java source code, as shown in the following code snippet: + +==== [source,java] ---- package org.acme.security.jpa; @@ -144,9 +165,12 @@ public class PublicResource { } } ---- -* The source code for the `/api/admin` endpoint is similar, but instead, you use a `@RolesAllowed` annotation to ensure that only users granted the `admin` role can access the endpoint. +==== +** Implement the `/api/public` endpoint to allow all users to access the application. +The source code for the `/api/admin` endpoint is similar, but instead, you use a `@RolesAllowed` annotation to ensure that only users granted the `admin` role can access the endpoint. Add a Jakarta REST resource with the following `@RolesAllowed` annotation: + +==== [source,java] ---- package org.acme.security.jpa; @@ -168,9 +192,11 @@ public class AdminResource { } } ---- -* Finally, implement an `/api/users/me` endpoint that can only accessed by users who have the `user` role. +==== +** Implement an `/api/users/me` endpoint that can only be accessed by users who have the `user` role. Use `SecurityContext` to get access to the currently authenticated `Principal` user and to return their username, all of which is retrieved from the database. + +==== [source,java] ---- package org.acme.security.jpa; @@ -193,10 +219,11 @@ public class UserResource { } } ---- +==== === Define the user entity -You can now describe how you want security information to be stored in the model by adding annotations to the `user` entity, as outlined in the following code snippet: +* You can now describe how you want security information to be stored in the model by adding annotations to the `user` entity, as outlined in the following code snippet: [source,java] ---- @@ -268,6 +295,7 @@ Therefore, in this tutorial, you do not need to set the property `quarkus.http.a . Configure at least one data source in the `application.properties` file so the `security-jpa` extension can access your database. For example: + +==== [source,properties] ---- quarkus.http.auth.basic=true @@ -279,23 +307,21 @@ quarkus.datasource.jdbc.url=jdbc:postgresql:security_jpa quarkus.hibernate-orm.database.generation=drop-and-create ---- +==== + . To initialize the database with users and roles, implement the `Startup` class, as outlined in the following code snippet: [NOTE] ==== -The URLs of Reactive datasources that are used by the `security-jpa-reactive` extension are set with the `quarkus.datasource.reactive.url` +* The URLs of Reactive datasources that are used by the `security-jpa-reactive` extension are set with the `quarkus.datasource.reactive.url` configuration property and not the `quarkus.datasource.jdbc.url` configuration property that is typically used by JDBC datasources. - ++ [source,properties] ---- %prod.quarkus.datasource.reactive.url=vertx-reactive:postgresql://localhost:5431/security_jpa ---- -==== - -[NOTE] -==== -In this tutorial, a PostgreSQL database is used for the identity store. ++ +* In this tutorial, a PostgreSQL database is used for the identity store. link:https://hibernate.org/orm/[Hibernate ORM] automatically creates the database schema on startup. This approach is suitable for development but is not recommended for production. Therefore adjustments are needed in a production environment. @@ -336,12 +362,12 @@ As a result, the `security-jpa` defaults to using bcrypt-hashed passwords. Complete the integration testing of your application in JVM and native modes by using xref:dev-services.adoc#databases[Dev Services for PostgreSQL] before you run your application in production mode. -To run your application in dev mode: +* To run your application in dev mode: include::{includes}/devtools/dev.adoc[] -The following properties configuration demonstrates how you can enable PostgreSQL testing to run in production (`prod`) mode only. +* The following properties configuration demonstrates how you can enable PostgreSQL testing to run in production (`prod`) mode only. In this scenario, `Dev Services for PostgreSQL` launches and configures a `PostgreSQL` test container. [source,properties] @@ -354,9 +380,9 @@ In this scenario, `Dev Services for PostgreSQL` launches and configures a `Postg quarkus.hibernate-orm.database.generation=drop-and-create ---- -If you add the `%prod.` profile prefix, data source properties are not visible to `Dev Services for PostgreSQL` and are only observed by an application running in production mode. +* If you add the `%prod.` profile prefix, data source properties are not visible to `Dev Services for PostgreSQL` and are only observed by an application running in production mode. -To write the integration test, use the following code sample: +* To write the integration test, use the following code sample: [source,java] ---- @@ -433,9 +459,9 @@ While developing your application, you can add tests one by one and run them usi `Dev Services for PostgreSQL` supports testing while you develop by providing a separate `PostgreSQL` test container that does not conflict with the `devmode` container. ==== -=== Use `curl` or a browser to test your application +=== Use Curl or a browser to test your application -Use the following example to start the PostgreSQL server: +* Use the following example to start the PostgreSQL server: [source,bash] ---- docker run --rm=true --name security-getting-started -e POSTGRES_USER=quarkus \ @@ -444,37 +470,45 @@ docker run --rm=true --name security-getting-started -e POSTGRES_USER=quarkus \ ---- === Compile and run the application -Compile and run your Quarkus application by using one of the following methods: - -==== JVM mode - -Compile the application: +* Compile and run your Quarkus application by using one of the following methods: +** JVM mode +. Compile the application: ++ +==== include::{includes}/devtools/build.adoc[] -Run the application: - +==== +. Run the application: ++ +==== [source,bash] ---- java -jar target/quarkus-app/quarkus-run.jar ---- +==== -==== Native mode - -Compile the application: +** Native mode +. Compile the application: ++ +==== include::{includes}/devtools/build-native.adoc[] - -Run the application: +==== +. Run the application: ++ +==== [source,bash] ---- ./target/security-jpa-quickstart-runner ---- +==== === Access and test the application security -When your application is running, you can access its endpoints by using one of the following `curl` commands. -You can also access the same endpoint URLs by using a browser. +When your application is running, you can access its endpoints by using one of the following Curl commands. * Connect to a protected endpoint anonymously: ++ +==== [source,shell] ---- $ curl -i -X GET http://localhost:8080/api/public @@ -484,8 +518,10 @@ Content-Type: text/plain;charset=UTF-8 public ---- - +==== * Connect to a protected endpoint anonymously: ++ +==== [source,shell] ---- $ curl -i -X GET http://localhost:8080/api/admin @@ -496,14 +532,10 @@ WWW-Authenticate: Basic Not authorized ---- - -[NOTE] ==== -If you use a browser to anonymously connect to a protected resource, a Basic authentication form displays, prompting you to enter credentials. -==== - * Connect to a protected endpoint as an authorized user: - ++ +==== [source,shell] ---- $ curl -i -X GET -u admin:admin http://localhost:8080/api/admin @@ -513,6 +545,15 @@ Content-Type: text/plain;charset=UTF-8 admin ---- +==== + +You can also access the same endpoint URLs by using a browser. + +[NOTE] +==== +If you use a browser to anonymously connect to a protected resource, a Basic authentication form displays, prompting you to enter credentials. +==== + === Results @@ -531,7 +572,7 @@ Content-Type: text/html;charset=UTF-8 Forbidden ---- -Finally, the user name `user` is authorized and the security context contains the principal details, for example, the user name. +Finally, the user named `user` is authorized and the security context contains the principal details, for example, the username. [source,shell] ---- diff --git a/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc b/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc index ff81993528580..1f4eff83f391e 100644 --- a/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc +++ b/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc @@ -1184,6 +1184,49 @@ In such cases an issuer verification failure may be reported by the endpoint and In such cases, if you work with Keycloak then please start it with a `KEYCLOAK_FRONTEND_URL` system property set to the externally accessible base URL. If you work with other Openid Connect providers then please check your provider's documentation. +[[oidc-saml-broker]] +== OIDC SAML Identity broker + +If your identity provider does not implement OpenId Connect but only the legacy XML-based SAML2.0 SSO protocol, then Quarkus can not be used as a SAML 2.0 adapter, similarly to how `quarkus-oidc` is used as an OIDC adapter. + +However, many OIDC providers such as Keycloak, Okta, Auth0, Microsoft ADFS can offer OIDC to SAML 2.0 bridges. You can create an identity broker connection to SAML 2.0 provider in your OIDC provider and use `quarkus-oidc` to authenticate your users to this SAML 2.0 provider with the OIDC provider coordinating OIDC and SAML 2.0 communications. As far as Quarkus endpoints are concerned, they can continue using the same Quarkus Security and OIDC API and annotations such as `@Authenticated`, `SecurityIdentity`, etc. + +For example, lets assume `Okta` is your SAML 2.0 provider and `Keycloak` is your OIDC provider. Here is a typical sequence explaining how to configure `Keycloak` to broker with the `Okta` SAML 2.0 provider. + +First, create a new `SAML2` integration in your `Okta` `Dashboard/Applications`: + +image::okta-create-saml-integration.png[alt=Okta Create SAML Integration,role="center"] + +For example, name it as `OktaSaml`: + +image::okta-saml-general-settings.png[alt=Okta SAML General Settings,role="center"] + +Next, configure it to point to a Keycloak SAML broker endpoint. At this point you need to know the name of the Keycloak realm, for example, `quarkus`, and, assuming that the Keycloak SAML broker alias is `saml`, enter the endpoint address as `http:localhost:8081/realms/quarkus/broker/saml/endpoint` and Service provider (SP) entity id as `http:localhost:8081/realms/quarkus`, where `http://localhost:8081` is a Keycloak base address and `saml` is a broker alias: + +image::okta-saml-configuration.png[alt=Okta SAML Configuration,role="center"] + +Next, save this SAML integration and note its Metadata URL: + +image::okta-saml-metadata.png[alt=Okta SAML Metadata,role="center"] + +Next, add SAML Provider to Keycloak: + +First, as usual, create a new realm or import the existing realm to `Keycloak`, in this case, the realm name has to be `quarkus`. + +Now, in the `quarkus` Realm properties, navigate to `Identity Providers` and add a new SAML provider: + +image::keycloak-add-saml-provider.png[alt=Keycloak Add SAML Provider,role="center"] + +Note the alias is set to `saml`, `Redirect URI` is `http:localhost:8081/realms/quarkus/broker/saml/endpoint` and `Service provider entity id` is `http:localhost:8081/realms/quarkus` - these are the same values you have entered when creating the Okta SAML integration in the previous step. + +Finally, set `Service entity descriptor` to point to the Okta SAML Intregration Metadata URL you noted at the end of the previous step. + +Next, if you would like, you can register this Keycloak SAML Provider as a Default Provider by navigating to `Authentication/browser/Identity Provider Redirector config` and setting both `Alias` and `Default Identity Provider` properties to `saml`. If you do not configure it as a Default Provider then, at the authentication time, Keycloak will offer 2 options - authenticate with the SAML provider, and authenticate directly to Keycloak with the name and password. + +Now configure the Quarkus OIDC `web-app` application to point to the Keycloak `quarkus` realm, `quarkus.oidc.auth-server-url=http://localhost:8180/realms/quarkus` and you are ready to start authenticating your Quarkus users to the Okta SAML 2.0 provider using an OIDC to SAML bridge provided by Keycloak OIDC and Okta SAML 2.0 providers. + +You can configure other OIDC providers to provide a SAML bridge similarly to how it can be done for Keycloak. + [[integration-testing]] == Testing diff --git a/docs/src/main/asciidoc/spring-di.adoc b/docs/src/main/asciidoc/spring-di.adoc index dec134fce3dd2..9c1ca4357005c 100644 --- a/docs/src/main/asciidoc/spring-di.adoc +++ b/docs/src/main/asciidoc/spring-di.adoc @@ -334,6 +334,10 @@ The following table shows how Spring DI annotations can be converted to CDI and |Doesn't have a one-to-one mapping to a CDI annotation. |=== +== Spring DI Configuration Reference + +include::{generated-dir}/config/quarkus-spring-di.adoc[leveloffset=+1, opts=optional] + == More Spring guides Quarkus has more Spring compatibility features. See the following guides for more details: diff --git a/docs/src/main/asciidoc/writing-extensions.adoc b/docs/src/main/asciidoc/writing-extensions.adoc index f185a27b431d3..fd91dd2dbf3b0 100644 --- a/docs/src/main/asciidoc/writing-extensions.adoc +++ b/docs/src/main/asciidoc/writing-extensions.adoc @@ -1045,7 +1045,7 @@ and add them to the reflective hierarchy for `BUILD_TIME` analysis. ==== Visualizing build step dependencies -It can occasionally be useful to see a visual representation of the interactions between the various build steps. For such cases, adding `-Djboss.builder.graph-output=build.dot` when building an application +It can occasionally be useful to see a visual representation of the interactions between the various build steps. For such cases, adding `-Dquarkus.builder.graph-output=build.dot` when building an application will result in the creation of the `build.dot` file in the project's root directory. See link:https://graphviz.org/resources/[this] for a list of software that can open the file and show the actual visual representation. [[configuration]] @@ -2527,7 +2527,7 @@ You can use the `io.quarkus.arc.runtime.BeanContainer` interface to interact wit TestBuildAndRunTimeConfig buildTimeConfig, TestRunTimeConfig runTimeConfig) { log.info("Begin BeanContainerListener callback\n"); - IConfigConsumer instance = beanContainer.instance(beanClass); <4> + IConfigConsumer instance = beanContainer.beanInstance(beanClass); <4> instance.loadConfig(buildTimeConfig, runTimeConfig); <5> log.infof("configureBeans, instance=%s\n", instance); } diff --git a/docs/src/main/asciidoc/writing-native-applications-tips.adoc b/docs/src/main/asciidoc/writing-native-applications-tips.adoc index ffc534ecc3315..82149c66e3da1 100644 --- a/docs/src/main/asciidoc/writing-native-applications-tips.adoc +++ b/docs/src/main/asciidoc/writing-native-applications-tips.adoc @@ -76,7 +76,7 @@ Here we include all the XML files and JSON files into the native executable. [NOTE] ==== -You can find more information about this topic in https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/dynamic-features/Resources/[the GraalVM documentation]. +For more information about this topic, see the link:https://www.graalvm.org/latest/reference-manual/native-image/dynamic-features/Resources/[GraalVM Accessing Resources in Native Image] guide. ==== The final order of business is to make the configuration file known to the `native-image` executable by adding the proper configuration to `application.properties`: @@ -245,7 +245,7 @@ As an example, in order to register all methods of class `com.acme.MyClass` for [NOTE] ==== -For more details on the format of this file, please refer to https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/dynamic-features/Reflection/[the GraalVM documentation]. +For more information about the format of this file, see the link:https://www.graalvm.org/latest/reference-manual/native-image/dynamic-features/Reflection/[GraalVM Reflection in Native Image] guide. ==== The final order of business is to make the configuration file known to the `native-image` executable by adding the proper configuration to `application.properties`: @@ -327,7 +327,7 @@ It should be added to the `native-image` configuration using the `quarkus.native [NOTE] ==== -You can find more information about all this in https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/optimizations-and-performance/ClassInitialization/[the GraalVM documentation]. +For more information, see the link:https://www.graalvm.org/latest/reference-manual/native-image/optimizations-and-performance/ClassInitialization/[GraalVM Class Initialization in Native Image] guide. ==== [NOTE] @@ -360,7 +360,8 @@ com.oracle.svm.core.jdk.UnsupportedFeatureError: Proxy class defined by interfac ---- Solving this issue requires adding the `-H:DynamicProxyConfigurationResources=` option and to provide a dynamic proxy configuration file. -You can find all the information about the format of this file in https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/guides/configure-dynamic-proxies/[the GraalVM documentation]. + +For more information about the format of this file, see the link:https://www.graalvm.org/latest/reference-manual/native-image/guides/configure-dynamic-proxies/[GraalVM Configure Dynamic Proxies Manually] guide. [[modularity-benefits]] === Modularity Benefits @@ -612,7 +613,7 @@ public class SaxParserProcessor { [NOTE] ==== -More information about reflection in GraalVM can be found https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/dynamic-features/Reflection/[here]. +For more information about reflection in GraalVM, see the link:https://www.graalvm.org/latest/reference-manual/native-image/dynamic-features/Reflection/[GraalVM Reflection in Native Image] guide. ==== === Including resources @@ -633,7 +634,7 @@ public class ResourcesProcessor { [NOTE] ==== -For more information about GraalVM resource handling in native executables please refer to https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/dynamic-features/Resources/[the GraalVM documentation]. +For more information about GraalVM resource handling in native executables, see the link:https://www.graalvm.org/latest/reference-manual/native-image/dynamic-features/Resources/[GraalVM Accessing Resources in Native Image] guide. ==== @@ -657,7 +658,7 @@ Using such a construct means that a `--initialize-at-run-time` option will autom [NOTE] ==== -For more information about `--initialize-at-run-time`, please read https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/optimizations-and-performance/ClassInitialization/[the GraalVM documentation]. +For more information about the `--initialize-at-run-time` option, see the link:https://www.graalvm.org/latest/reference-manual/native-image/optimizations-and-performance/ClassInitialization/[GraalVM Class Initialization in Native Image] guide. ==== === Managing Proxy Classes @@ -681,7 +682,7 @@ Using such a construct means that a `-H:DynamicProxyConfigurationResources` opti [NOTE] ==== -For more information about Proxy Classes you can read https://www.graalvm.org/{graalvm-version}/reference-manual/native-image/guides/configure-dynamic-proxies/[the GraalVM documentation]. +For more information about Proxy Classes, see the link:https://www.graalvm.org/latest/reference-manual/native-image/guides/configure-dynamic-proxies/[GraalVM Configure Dynamic Proxies Manually] guide. ==== === Logging with Native Image diff --git a/docs/downstreamdoc.java b/docs/src/main/java/io/quarkus/docs/generation/AssembleDownstreamDocumentation.java similarity index 90% rename from docs/downstreamdoc.java rename to docs/src/main/java/io/quarkus/docs/generation/AssembleDownstreamDocumentation.java index 058fc840c4e19..1a9754b8b0c3c 100755 --- a/docs/downstreamdoc.java +++ b/docs/src/main/java/io/quarkus/docs/generation/AssembleDownstreamDocumentation.java @@ -1,23 +1,11 @@ -//usr/bin/env jbang "$0" "$@" ; exit $? - -//DEPS io.quarkus.platform:quarkus-bom:3.2.2.Final@pom -//DEPS io.quarkus:quarkus-picocli -//DEPS io.quarkus:quarkus-jackson -//DEPS com.fasterxml.jackson.dataformat:jackson-dataformat-yaml - -//JAVAC_OPTIONS -parameters -//JAVA_OPTIONS -Djava.util.logging.manager=org.jboss.logmanager.LogManager - -//Q:CONFIG quarkus.log.level=SEVERE -//Q:CONFIG quarkus.log.category."downstreamdoc".level=INFO -//Q:CONFIG quarkus.banner.enabled=false +package io.quarkus.docs.generation; import java.io.File; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; -import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -34,12 +22,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import picocli.CommandLine.Command; - -@Command(name = "downstreamdoc", mixinStandardHelpOptions = true) -public class downstreamdoc implements Runnable { +public class AssembleDownstreamDocumentation { - private static final Logger LOG = Logger.getLogger(downstreamdoc.class); + private static final Logger LOG = Logger.getLogger(AssembleDownstreamDocumentation.class); private static final Path SOURCE_DOC_PATH = Path.of("src", "main", "asciidoc"); private static final Path DOC_PATH = Path.of("target", "asciidoc", "sources"); @@ -52,8 +37,7 @@ public class downstreamdoc implements Runnable { private static final Path TARGET_GENERATED_DIRECTORY = TARGET_ROOT_DIRECTORY.resolve("_generated"); private static final Path TARGET_LISTING = Path.of("target", "downstream-files.txt"); private static final Set EXCLUDED_FILES = Set.of( - DOC_PATH.resolve("_attributes-local.adoc") - ); + DOC_PATH.resolve("_attributes-local.adoc")); private static final String ADOC_SUFFIX = ".adoc"; private static final Pattern XREF_PATTERN = Pattern.compile("xref:([^\\.#\\[ ]+)\\" + ADOC_SUFFIX); @@ -88,13 +72,13 @@ public class downstreamdoc implements Runnable { Pattern.CASE_INSENSITIVE | Pattern.MULTILINE), "* Using Maven:\n+\n--\n[source, bash, subs=attributes+]\n----\n$1----\n$4--"); - @Override - public void run() { + public static void main(String[] args) throws Exception { if (!Files.isDirectory(DOC_PATH)) { - LOG.error("Transformed AsciiDoc sources directory does not exist. Have you built the documentation?"); + throw new IllegalStateException( + "Transformed AsciiDoc sources directory does not exist. Have you built the documentation?"); } if (!Files.isDirectory(GENERATED_FILES_PATH)) { - LOG.error("Generated files directory does not exist. Have you built the documentation?"); + throw new IllegalStateException("Generated files directory does not exist. Have you built the documentation?"); } try { @@ -196,13 +180,13 @@ public void run() { Files.copy(sourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING); } - Files.writeString(TARGET_LISTING, allResolvedPaths.stream().map(p -> p.toString()).collect(Collectors.joining("\n"))); + Files.writeString(TARGET_LISTING, + allResolvedPaths.stream().map(p -> p.toString()).collect(Collectors.joining("\n"))); LOG.info("Downstream documentation tree is available in: " + TARGET_ROOT_DIRECTORY); LOG.info("Downstream documentation listing is available in: " + TARGET_LISTING); } catch (IOException e) { - LOG.error("An error occurred while generating the downstream tree", e); - System.exit(1); + throw new UncheckedIOException("An error occurred while generating the downstream tree", e); } } @@ -278,7 +262,7 @@ private static void deleteDirectory(Path directory) throws IOException { .forEach(File::delete); } - private void copyAsciidoc(Path sourceFile, Path targetFile, Set downstreamGuides) throws IOException { + private static void copyAsciidoc(Path sourceFile, Path targetFile, Set downstreamGuides) throws IOException { List guideLines = Files.readAllLines(sourceFile); StringBuilder rewrittenGuide = new StringBuilder(); @@ -336,13 +320,14 @@ private void copyAsciidoc(Path sourceFile, Path targetFile, Set downstre String rewrittenGuideWithoutTabs = rewrittenGuide.toString().trim(); for (Entry tabReplacement : TABS_REPLACEMENTS.entrySet()) { - rewrittenGuideWithoutTabs = tabReplacement.getKey().matcher(rewrittenGuideWithoutTabs).replaceAll(tabReplacement.getValue()); + rewrittenGuideWithoutTabs = tabReplacement.getKey().matcher(rewrittenGuideWithoutTabs) + .replaceAll(tabReplacement.getValue()); } Files.writeString(targetFile, rewrittenGuideWithoutTabs.trim()); } - private String rewriteLinks(String content, Set downstreamGuides) { + private static String rewriteLinks(String content, Set downstreamGuides) { content = XREF_PATTERN.matcher(content).replaceAll(mr -> { if (downstreamGuides.contains(mr.group(1) + ADOC_SUFFIX)) { return mr.group(0); diff --git a/extensions/amazon-lambda-http/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/amazon-lambda-http/runtime/src/main/resources/META-INF/quarkus-extension.yaml index 295b2b80f286c..a92cbe2bd01f2 100644 --- a/extensions/amazon-lambda-http/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/amazon-lambda-http/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -10,5 +10,5 @@ metadata: - "rest" categories: - "cloud" - guide: "https://quarkus.io/guides/amazon-lambda-http" + guide: "https://quarkus.io/guides/aws-lambda-http" status: "stable" diff --git a/extensions/amazon-lambda-rest/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/amazon-lambda-rest/runtime/src/main/resources/META-INF/quarkus-extension.yaml index 992fa650deacb..ed6ce602509f3 100644 --- a/extensions/amazon-lambda-rest/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/amazon-lambda-rest/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -10,7 +10,7 @@ metadata: - "rest" categories: - "cloud" - guide: "https://quarkus.io/guides/amazon-lambda-http" + guide: "https://quarkus.io/guides/aws-lambda-http" status: "stable" config: - "quarkus.lambda-http." diff --git a/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/DevServicesLambdaProcessor.java b/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/DevServicesLambdaProcessor.java index 17661e6611b34..c30f5e707a21a 100644 --- a/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/DevServicesLambdaProcessor.java +++ b/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/DevServicesLambdaProcessor.java @@ -22,7 +22,6 @@ import io.quarkus.deployment.builditem.DevServicesResultBuildItem; import io.quarkus.deployment.builditem.LaunchModeBuildItem; import io.quarkus.deployment.builditem.ServiceStartBuildItem; -import io.quarkus.deployment.dev.devservices.GlobalDevServicesConfig; import io.quarkus.runtime.LaunchMode; public class DevServicesLambdaProcessor { @@ -53,7 +52,7 @@ private boolean legacyTestingEnabled() { } @Produce(ServiceStartBuildItem.class) - @BuildStep(onlyIfNot = IsNormal.class, onlyIf = GlobalDevServicesConfig.Enabled.class) + @BuildStep(onlyIfNot = IsNormal.class) // This is required for testing so run it even if devservices.enabled=false public void startEventServer(LaunchModeBuildItem launchMode, LambdaConfig config, Optional override, @@ -64,6 +63,9 @@ public void startEventServer(LaunchModeBuildItem launchMode, return; if (legacyTestingEnabled()) return; + if (!config.mockEventServer.enabled) { + return; + } if (server != null) { return; } diff --git a/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/MockEventServerConfig.java b/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/MockEventServerConfig.java index 8722ba40f47f6..85cde4c70a1fb 100644 --- a/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/MockEventServerConfig.java +++ b/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/MockEventServerConfig.java @@ -9,6 +9,12 @@ */ @ConfigGroup public class MockEventServerConfig { + /** + * Setting to true will start event server even if quarkus.devservices.enabled=false + */ + @ConfigItem(defaultValue = "true") + public boolean enabled; + /** * Port to access mock event server in dev mode */ diff --git a/extensions/amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml index 495288d1d6b53..d51d369398b9e 100644 --- a/extensions/amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -7,7 +7,7 @@ metadata: - "amazon" categories: - "cloud" - guide: "https://quarkus.io/guides/amazon-lambda" + guide: "https://quarkus.io/guides/aws-lambda" status: "stable" codestart: name: "amazon-lambda" diff --git a/extensions/funqy/funqy-amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/funqy/funqy-amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml index e393d3e3e142c..66e8e0e9be783 100644 --- a/extensions/funqy/funqy-amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/funqy/funqy-amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -7,7 +7,7 @@ metadata: - "lambda" - "aws" - "amazon" - guide: "https://quarkus.io/guides/funqy-amazon-lambda" + guide: "https://quarkus.io/guides/funqy-aws-lambda" categories: - "cloud" status: "experimental" diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java index 6c9e3654828e9..53e3fb840c251 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java @@ -426,11 +426,6 @@ private void devModeReload(GrpcContainer grpcContainer, Vertx vertx, GrpcServerC definitions.add(service.definition); } - ServerServiceDefinition reflectionService = new ReflectionServiceV1(definitions).bindService(); - - for (ServerMethodDefinition method : reflectionService.getMethods()) { - methods.put(method.getMethodDescriptor().getFullMethodName(), method); - } List servicesWithInterceptors = new ArrayList<>(); CompressionInterceptor compressionInterceptor = prepareCompressionInterceptor(configuration); for (GrpcServiceDefinition service : services) { @@ -439,6 +434,10 @@ private void devModeReload(GrpcContainer grpcContainer, Vertx vertx, GrpcServerC compressionInterceptor, service, true)); } + // add after actual services, so we don't inspect them for interceptors, etc + servicesWithInterceptors.add(new ReflectionServiceV1(definitions).bindService()); + servicesWithInterceptors.add(new ReflectionServiceV1alpha(definitions).bindService()); + for (ServerServiceDefinition serviceWithInterceptors : servicesWithInterceptors) { for (ServerMethodDefinition method : serviceWithInterceptors.getMethods()) { methods.put(method.getMethodDescriptor().getFullMethodName(), method); diff --git a/extensions/hibernate-search-orm-coordination-outbox-polling/deployment/pom.xml b/extensions/hibernate-search-orm-coordination-outbox-polling/deployment/pom.xml index bf5a1e9fb07c4..a276fa8659a55 100644 --- a/extensions/hibernate-search-orm-coordination-outbox-polling/deployment/pom.xml +++ b/extensions/hibernate-search-orm-coordination-outbox-polling/deployment/pom.xml @@ -128,6 +128,16 @@ See https://www.elastic.co/guide/en/elasticsearch/reference/8.8/modules-cluster.html#disk-based-shard-allocation --> false + + false + false + false + false + false + false + false + false + false 9200:9200 diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/pom.xml b/extensions/hibernate-search-orm-elasticsearch/deployment/pom.xml index 6f2645e3e2bf0..9f9331be27bbf 100644 --- a/extensions/hibernate-search-orm-elasticsearch/deployment/pom.xml +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/pom.xml @@ -133,6 +133,16 @@ See https://www.elastic.co/guide/en/elasticsearch/reference/8.8/modules-cluster.html#disk-based-shard-allocation --> false + + false + false + false + false + false + false + false + false + false 9200:9200 diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/MyEntity1.java b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/MyEntity1.java new file mode 100644 index 0000000000000..e3ecbac58118e --- /dev/null +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/MyEntity1.java @@ -0,0 +1,44 @@ +package io.quarkus.hibernate.search.orm.elasticsearch.test.search.shard_failure; + +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; + +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.KeywordField; + +@Entity +@Indexed +public class MyEntity1 { + + @Id + @GeneratedValue + private Long id; + + @KeywordField + private String text; + + public MyEntity1() { + } + + public MyEntity1(String text) { + this.text = text; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getText() { + return text; + } + + public void setText(String text) { + this.text = text; + } + +} diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/MyEntity2.java b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/MyEntity2.java new file mode 100644 index 0000000000000..b2550b8e475eb --- /dev/null +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/MyEntity2.java @@ -0,0 +1,44 @@ +package io.quarkus.hibernate.search.orm.elasticsearch.test.search.shard_failure; + +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; + +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.KeywordField; + +@Entity +@Indexed +public class MyEntity2 { + + @Id + @GeneratedValue + private Long id; + + @KeywordField + private String text; + + public MyEntity2() { + } + + public MyEntity2(String text) { + this.text = text; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getText() { + return text; + } + + public void setText(String text) { + this.text = text; + } + +} diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/ShardFailureIgnoreDefaultTest.java b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/ShardFailureIgnoreDefaultTest.java new file mode 100644 index 0000000000000..ad3623a7ebae5 --- /dev/null +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/ShardFailureIgnoreDefaultTest.java @@ -0,0 +1,50 @@ +package io.quarkus.hibernate.search.orm.elasticsearch.test.search.shard_failure; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.List; + +import jakarta.inject.Inject; + +import org.hibernate.search.mapper.orm.session.SearchSession; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.hibernate.search.orm.elasticsearch.test.util.TransactionUtils; +import io.quarkus.narayana.jta.QuarkusTransaction; +import io.quarkus.test.QuarkusUnitTest; + +public class ShardFailureIgnoreDefaultTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClass(TransactionUtils.class) + .addClass(MyEntity1.class) + .addClass(MyEntity2.class) + .addAsResource("hsearch-4915/index2.json")) + .withConfigurationResource("application.properties") + // Override the type of the keyword field to integer, to create an error in one shard only. + .overrideConfigKey( + "quarkus.hibernate-search-orm.elasticsearch.indexes.\"MyEntity2\".schema-management.mapping-file", + "hsearch-4915/index2.json"); + + @Inject + SearchSession session; + + @Test + public void testShardFailureIgnored() { + QuarkusTransaction.joiningExisting().run(() -> { + session.toEntityManager().persist(new MyEntity1("42")); + session.toEntityManager().persist(new MyEntity2("42")); + }); + QuarkusTransaction.joiningExisting().run(() -> { + assertThat(session.search(List.of(MyEntity1.class, MyEntity2.class)) + .where(f -> f.wildcard().field("text").matching("4*")) + .fetchHits(20)) + // MyEntity2 fails because "text" is an integer field there + // We expect that index (shard) to be ignored + .hasSize(1); + }); + } +} diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/ShardFailureIgnoreFalseTest.java b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/ShardFailureIgnoreFalseTest.java new file mode 100644 index 0000000000000..dca03bec0ad4e --- /dev/null +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/search/shard_failure/ShardFailureIgnoreFalseTest.java @@ -0,0 +1,53 @@ +package io.quarkus.hibernate.search.orm.elasticsearch.test.search.shard_failure; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.List; + +import jakarta.inject.Inject; + +import org.hibernate.search.mapper.orm.session.SearchSession; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.hibernate.search.orm.elasticsearch.test.util.TransactionUtils; +import io.quarkus.narayana.jta.QuarkusTransaction; +import io.quarkus.test.QuarkusUnitTest; + +public class ShardFailureIgnoreFalseTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClass(TransactionUtils.class) + .addClass(MyEntity1.class) + .addClass(MyEntity2.class) + .addAsResource("hsearch-4915/index2.json")) + .withConfigurationResource("application.properties") + // Request that shard failures cause an exception instead of being ignored + .overrideConfigKey("quarkus.hibernate-search-orm.elasticsearch.query.shard-failure.ignore", "false") + // Override the type of the keyword field to integer, to create an error in one shard only. + .overrideConfigKey( + "quarkus.hibernate-search-orm.elasticsearch.indexes.\"MyEntity2\".schema-management.mapping-file", + "hsearch-4915/index2.json"); + + @Inject + SearchSession session; + + @Test + public void testShardFailureIgnored() { + QuarkusTransaction.joiningExisting().run(() -> { + session.toEntityManager().persist(new MyEntity1("42")); + session.toEntityManager().persist(new MyEntity2("42")); + }); + QuarkusTransaction.joiningExisting().run(() -> { + assertThatThrownBy(() -> session.search(List.of(MyEntity1.class, MyEntity2.class)) + .where(f -> f.wildcard().field("text").matching("4*")) + .fetchHits(20)) + // MyEntity2 fails because "text" is an integer field there + // We expect an exception + .hasMessageContaining("Elasticsearch request failed", + "\"type\": \"query_shard_exception\""); + }); + } +} diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/resources/hsearch-4915/index2.json b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/resources/hsearch-4915/index2.json new file mode 100644 index 0000000000000..561654100718e --- /dev/null +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/resources/hsearch-4915/index2.json @@ -0,0 +1,7 @@ +{ + "properties": { + "text": { + "type": "integer" + } + } +} \ No newline at end of file diff --git a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java index 7f72c8a5a8b82..586a3942338d4 100644 --- a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java +++ b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java @@ -456,6 +456,8 @@ private void contributeBackendRuntimeProperties(BiConsumer prope elasticsearchBackendConfig.threadPool().size()); addBackendConfig(propertyCollector, backendName, ElasticsearchBackendSettings.VERSION_CHECK_ENABLED, elasticsearchBackendConfig.versionCheck()); + addBackendConfig(propertyCollector, backendName, ElasticsearchBackendSettings.QUERY_SHARD_FAILURE_IGNORE, + elasticsearchBackendConfig.query().shardFailure().ignore()); addBackendConfig(propertyCollector, backendName, ElasticsearchBackendSettings.DISCOVERY_ENABLED, elasticsearchBackendConfig.discovery().enabled()); diff --git a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.java b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.java index 8a9fa181fb631..7eef01aa32e97 100644 --- a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.java +++ b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.java @@ -148,6 +148,11 @@ interface ElasticsearchBackendRuntimeConfig { */ ThreadPoolConfig threadPool(); + /** + * Configuration for search queries to this backend. + */ + ElasticsearchQueryConfig query(); + /** * Whether Hibernate Search should check the version of the Elasticsearch cluster on startup. * @@ -505,6 +510,26 @@ interface ThreadPoolConfig { OptionalInt size(); } + @ConfigGroup + interface ElasticsearchQueryConfig { + /** + * Configuration for the behavior on shard failure. + */ + ElasticsearchQueryShardFailureConfig shardFailure(); + } + + @ConfigGroup + interface ElasticsearchQueryShardFailureConfig { + /** + * Whether partial shard failures are ignored (`true`) + * or lead to Hibernate Search throwing an exception (`false`). + *

+ * Will default to `false` in Hibernate Search 7. + */ + @WithDefault("true") + boolean ignore(); + } + // We can't set actual default values in this section, // otherwise "quarkus.hibernate-search-orm.elasticsearch.index-defaults" will be ignored. @ConfigGroup diff --git a/extensions/jdbc/jdbc-oracle/deployment/src/main/java/io/quarkus/jdbc/oracle/deployment/OracleMetadataOverrides.java b/extensions/jdbc/jdbc-oracle/deployment/src/main/java/io/quarkus/jdbc/oracle/deployment/OracleMetadataOverrides.java index e614e0b9ccfe8..317dbd961716c 100644 --- a/extensions/jdbc/jdbc-oracle/deployment/src/main/java/io/quarkus/jdbc/oracle/deployment/OracleMetadataOverrides.java +++ b/extensions/jdbc/jdbc-oracle/deployment/src/main/java/io/quarkus/jdbc/oracle/deployment/OracleMetadataOverrides.java @@ -4,6 +4,7 @@ import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.BuildSteps; import io.quarkus.deployment.builditem.RemovedResourceBuildItem; import io.quarkus.deployment.builditem.nativeimage.ExcludeConfigBuildItem; import io.quarkus.deployment.builditem.nativeimage.NativeImageAllowIncompleteClasspathBuildItem; @@ -11,6 +12,7 @@ import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem; +import io.quarkus.deployment.pkg.steps.NativeOrNativeSourcesBuild; import io.quarkus.maven.dependency.ArtifactKey; /** @@ -34,6 +36,7 @@ * require it, so this would facilitate the option to revert to the older version in * case of problems. */ +@BuildSteps(onlyIf = NativeOrNativeSourcesBuild.class) public final class OracleMetadataOverrides { static final String DRIVER_JAR_MATCH_REGEX = "com\\.oracle\\.database\\.jdbc"; @@ -147,12 +150,6 @@ NativeImageAllowIncompleteClasspathBuildItem naughtyDriver() { return new NativeImageAllowIncompleteClasspathBuildItem("quarkus-jdbc-oracle"); } - @BuildStep - RemovedResourceBuildItem overrideSubstitutions() { - return new RemovedResourceBuildItem(ArtifactKey.fromString("com.oracle.database.jdbc:ojdbc11"), - Collections.singleton("oracle/nativeimage/Target_java_io_ObjectStreamClass.class")); - } - @BuildStep RemovedResourceBuildItem enhancedCharsetSubstitutions() { return new RemovedResourceBuildItem(ArtifactKey.fromString("com.oracle.database.jdbc:ojdbc11"), diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerRecorder.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerRecorder.java index 7284df162cafc..ecf9b2afc6aa9 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerRecorder.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerRecorder.java @@ -94,7 +94,8 @@ public void configureRegistries(MicrometerConfig config, Class registryClass = registry.getClass(); applyMeterFilters(registry, classMeterFilters.get(registryClass)); - var classSpecificCustomizers = classMeterRegistryCustomizers.get(registryClass); + var classSpecificCustomizers = classMeterRegistryCustomizers.getOrDefault(registryClass, + Collections.emptyList()); var newList = new ArrayList( globalMeterRegistryCustomizers.size() + classSpecificCustomizers.size()); newList.addAll(globalMeterRegistryCustomizers); diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxPoolMetrics.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxPoolMetrics.java index 32c62b00691da..9a9da31c36258 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxPoolMetrics.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxPoolMetrics.java @@ -25,6 +25,7 @@ public class VertxPoolMetrics implements PoolMetrics { private final LongAdder current = new LongAdder(); private final LongAdder queue = new LongAdder(); private final Counter completed; + private final Counter rejected; private final Timer queueDelay; VertxPoolMetrics(MeterRegistry registry, String poolType, String poolName, int maxPoolSize) { @@ -89,6 +90,11 @@ public Number get() { .tags(tags) .register(registry); + rejected = Counter.builder(name("rejected")) + .description("Number of times submissions to the pool have been rejected") + .tags(tags) + .register(registry); + } private String name(String suffix) { @@ -104,6 +110,7 @@ public EventTiming submitted() { @Override public void rejected(EventTiming submitted) { queue.decrement(); + rejected.increment(); submitted.end(); } diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CodeAuthenticationMechanism.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CodeAuthenticationMechanism.java index f9e74b1bfe31c..94e4679728aa4 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CodeAuthenticationMechanism.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CodeAuthenticationMechanism.java @@ -304,6 +304,15 @@ private Uni reAuthenticate(Cookie sessionCookie, context.put(TenantConfigContext.class.getName(), configContext); return resolver.getTokenStateManager().getTokens(context, configContext.oidcConfig, sessionCookie.getValue(), getTokenStateRequestContext) + .onFailure(AuthenticationCompletionException.class) + .recoverWithUni( + new Function>() { + @Override + public Uni apply(Throwable t) { + return removeSessionCookie(context, configContext.oidcConfig) + .replaceWith(Uni.createFrom().failure(t)); + } + }) .chain(new Function>() { @Override public Uni apply(AuthorizationCodeTokens session) { @@ -845,7 +854,7 @@ private static boolean verifyNonce(OidcTenantConfig oidcConfig, CodeAuthenticati } } - private static Object errorMessage(Throwable t) { + private static String errorMessage(Throwable t) { return t.getCause() != null ? t.getCause().getMessage() : t.getMessage(); } diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTokenStateManager.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTokenStateManager.java index bc66480aa59fb..fcc50bfe52a5f 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTokenStateManager.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTokenStateManager.java @@ -6,6 +6,7 @@ import io.quarkus.oidc.OidcRequestContext; import io.quarkus.oidc.OidcTenantConfig; import io.quarkus.oidc.TokenStateManager; +import io.quarkus.security.AuthenticationCompletionException; import io.quarkus.security.AuthenticationFailedException; import io.smallrye.mutiny.Uni; import io.vertx.core.http.Cookie; @@ -77,33 +78,38 @@ public Uni getTokens(RoutingContext routingContext, Oid tokenState = decryptAll ? decryptToken(tokenState, routingContext, oidcConfig) : tokenState; String[] tokens = CodeAuthenticationMechanism.COOKIE_PATTERN.split(tokenState); + String idToken = decryptAll ? tokens[0] : decryptToken(tokens[0], routingContext, oidcConfig); String accessToken = null; String refreshToken = null; - if (oidcConfig.tokenStateManager.strategy == OidcTenantConfig.TokenStateManager.Strategy.KEEP_ALL_TOKENS) { - if (!oidcConfig.tokenStateManager.splitTokens) { - accessToken = decryptAll ? tokens[1] : decryptToken(tokens[1], routingContext, oidcConfig); - refreshToken = decryptAll ? tokens[2] : decryptToken(tokens[2], routingContext, oidcConfig); - } else { - Cookie atCookie = getAccessTokenCookie(routingContext, oidcConfig); - if (atCookie != null) { - accessToken = decryptToken(atCookie.getValue(), routingContext, oidcConfig); - } - Cookie rtCookie = getRefreshTokenCookie(routingContext, oidcConfig); - if (rtCookie != null) { - refreshToken = decryptToken(rtCookie.getValue(), routingContext, oidcConfig); + try { + if (oidcConfig.tokenStateManager.strategy == OidcTenantConfig.TokenStateManager.Strategy.KEEP_ALL_TOKENS) { + if (!oidcConfig.tokenStateManager.splitTokens) { + accessToken = decryptAll ? tokens[1] : decryptToken(tokens[1], routingContext, oidcConfig); + refreshToken = decryptAll ? tokens[2] : decryptToken(tokens[2], routingContext, oidcConfig); + } else { + Cookie atCookie = getAccessTokenCookie(routingContext, oidcConfig); + if (atCookie != null) { + accessToken = decryptToken(atCookie.getValue(), routingContext, oidcConfig); + } + Cookie rtCookie = getRefreshTokenCookie(routingContext, oidcConfig); + if (rtCookie != null) { + refreshToken = decryptToken(rtCookie.getValue(), routingContext, oidcConfig); + } } - } - } else if (oidcConfig.tokenStateManager.strategy == OidcTenantConfig.TokenStateManager.Strategy.ID_REFRESH_TOKENS) { - if (!oidcConfig.tokenStateManager.splitTokens) { - refreshToken = decryptAll ? tokens[2] : decryptToken(tokens[2], routingContext, oidcConfig); - } else { - Cookie rtCookie = getRefreshTokenCookie(routingContext, oidcConfig); - if (rtCookie != null) { - refreshToken = decryptToken(rtCookie.getValue(), routingContext, oidcConfig); + } else if (oidcConfig.tokenStateManager.strategy == OidcTenantConfig.TokenStateManager.Strategy.ID_REFRESH_TOKENS) { + if (!oidcConfig.tokenStateManager.splitTokens) { + refreshToken = decryptAll ? tokens[2] : decryptToken(tokens[2], routingContext, oidcConfig); + } else { + Cookie rtCookie = getRefreshTokenCookie(routingContext, oidcConfig); + if (rtCookie != null) { + refreshToken = decryptToken(rtCookie.getValue(), routingContext, oidcConfig); + } } } + } catch (ArrayIndexOutOfBoundsException ex) { + return Uni.createFrom().failure(new AuthenticationCompletionException("Session cookie is malformed")); } return Uni.createFrom().item(new AuthorizationCodeTokens(idToken, accessToken, refreshToken)); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProvider.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProvider.java index 9df7e34a2c4a3..af7752db76223 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProvider.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProvider.java @@ -286,9 +286,9 @@ public TokenIntrospection apply(TokenIntrospection introspectionResult, Throwabl throw new AuthenticationFailedException(t); } if (!introspectionResult.isActive()) { - LOG.debugf("Token issued to client %s is not active", oidcConfig.clientId.get()); verifyTokenExpiry(introspectionResult.getLong(OidcConstants.INTROSPECTION_TOKEN_EXP)); - throw new AuthenticationFailedException(); + throw new AuthenticationFailedException( + String.format("Token issued to client %s is not active", oidcConfig.clientId.get())); } verifyTokenExpiry(introspectionResult.getLong(OidcConstants.INTROSPECTION_TOKEN_EXP)); try { diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxHttpExporter.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxHttpExporter.java index 176ca444d8b93..0ed0ef9764984 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxHttpExporter.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxHttpExporter.java @@ -56,6 +56,7 @@ public CompletableResultCode shutdown() { static final class VertxHttpSender implements HttpSender { private static final String TRACES_PATH = "/v1/traces"; + private final String basePath; private final boolean compressionEnabled; private final Map headers; private final String contentType; @@ -69,6 +70,7 @@ static final class VertxHttpSender implements HttpSender { String contentType, Consumer clientOptionsCustomizer, Vertx vertx) { + this.basePath = determineBasePath(baseUri); this.compressionEnabled = compressionEnabled; this.headers = headersMap; this.contentType = contentType; @@ -81,13 +83,27 @@ static final class VertxHttpSender implements HttpSender { this.client = vertx.createHttpClient(httpClientOptions); } + private static String determineBasePath(URI baseUri) { + String path = baseUri.getPath(); + if (path.isEmpty() || path.equals("/")) { + return ""; + } + if (path.endsWith("/")) { // strip ending slash + path = path.substring(0, path.length() - 1); + } + if (!path.startsWith("/")) { // prepend leading slash + path = "/" + path; + } + return path; + } + @Override public void send(Consumer marshaler, int contentLength, Consumer onResponse, Consumer onError) { - client.request(HttpMethod.POST, TRACES_PATH) + client.request(HttpMethod.POST, basePath + TRACES_PATH) .onSuccess(new Handler<>() { @Override public void handle(HttpClientRequest request) { diff --git a/extensions/reactive-routes/runtime/pom.xml b/extensions/reactive-routes/runtime/pom.xml index d164906cce269..fae3468e7046f 100644 --- a/extensions/reactive-routes/runtime/pom.xml +++ b/extensions/reactive-routes/runtime/pom.xml @@ -59,6 +59,11 @@ io.quarkus quarkus-extension-maven-plugin + + + io.quarkus.reactive-routes + + maven-compiler-plugin diff --git a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/LockInterceptor.java b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/LockInterceptor.java index f454f9feab243..4d0d0c359265c 100644 --- a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/LockInterceptor.java +++ b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/LockInterceptor.java @@ -45,7 +45,9 @@ private Object writeLock(Lock lock, InvocationContext ctx) throws Exception { boolean locked = false; try { - rl.lock(); + if (readHoldCount > 0) { + rl.lock(); + } try { if (readHoldCount > 0) { // Release all read locks hold by the current thread before acquiring the write lock @@ -63,7 +65,9 @@ private Object writeLock(Lock lock, InvocationContext ctx) throws Exception { locked = true; } } finally { - rl.unlock(); + if (readHoldCount > 0) { + rl.unlock(); + } } return ctx.proceed(); } finally { diff --git a/independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/lock/LockInterceptorDoubleWriteDeadlockTest.java b/independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/lock/LockInterceptorDoubleWriteDeadlockTest.java new file mode 100644 index 0000000000000..38880fc727906 --- /dev/null +++ b/independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/lock/LockInterceptorDoubleWriteDeadlockTest.java @@ -0,0 +1,58 @@ +package io.quarkus.arc.test.lock; + +import java.util.ArrayList; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import jakarta.enterprise.context.ApplicationScoped; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.arc.Lock; +import io.quarkus.arc.impl.LockInterceptor; +import io.quarkus.arc.test.ArcTestContainer; + +public class LockInterceptorDoubleWriteDeadlockTest { + + @RegisterExtension + public ArcTestContainer container = new ArcTestContainer(LockBean.class, LockInterceptor.class, + LockInterceptor.class); + + private final ExecutorService threadPool = Executors.newCachedThreadPool(); + + @Test + void testLocks() throws ExecutionException, InterruptedException { + LockBean lockBean = Arc.container().instance(LockBean.class).get(); + final var futures = new ArrayList>(); + for (int i = 0; i < 10; i++) { + final Future submit = threadPool.submit(() -> { + lockBean.method1(); + return "value"; + }); + futures.add(submit); + } + + for (final var future : futures) { + future.get(); + } + + } + + @ApplicationScoped + @Lock + public static class LockBean { + + public void method1() { + // invokes another intercepted write-locked method + method2(); + } + + public void method2() { + } + } + +} diff --git a/independent-projects/enforcer-rules/src/it/smoketest/pom.xml b/independent-projects/enforcer-rules/src/it/smoketest/pom.xml index 661eb1658613b..7703e96701d09 100644 --- a/independent-projects/enforcer-rules/src/it/smoketest/pom.xml +++ b/independent-projects/enforcer-rules/src/it/smoketest/pom.xml @@ -3,9 +3,10 @@ 4.0.0 - org.jboss - jboss-parent - 39 + io.quarkus + quarkus-parent + 999-SNAPSHOT + ../../../parent/pom.xml io.quarkus @@ -19,4 +20,4 @@ - \ No newline at end of file + diff --git a/independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/Descriptors.java b/independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/Descriptors.java index 735f670ab9266..45280fd0fd451 100644 --- a/independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/Descriptors.java +++ b/independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/Descriptors.java @@ -100,5 +100,6 @@ private Descriptors() { CompletionStage.class); public static final FieldDescriptor RESULTS_TRUE = FieldDescriptor.of(Results.class, "TRUE", CompletedStage.class); public static final FieldDescriptor RESULTS_FALSE = FieldDescriptor.of(Results.class, "FALSE", CompletedStage.class); + public static final FieldDescriptor RESULTS_NULL = FieldDescriptor.of(Results.class, "NULL", CompletedStage.class); } diff --git a/independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/ValueResolverGenerator.java b/independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/ValueResolverGenerator.java index 4be90f58db7e6..bc8f3c1a09652 100644 --- a/independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/ValueResolverGenerator.java +++ b/independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/ValueResolverGenerator.java @@ -393,8 +393,12 @@ public void accept(BytecodeCreator bc) { && returnType.asPrimitiveType().primitive() == Primitive.BOOLEAN) { completeBoolean(bc, invokeRet); } else if (method.returnType().name().equals(DotNames.BOOLEAN)) { + BytecodeCreator isNull = bc.ifNull(invokeRet).trueBranch(); + isNull.returnValue(isNull.readStaticField(Descriptors.RESULTS_NULL)); completeBoolean(bc, bc.invokeVirtualMethod(Descriptors.BOOLEAN_VALUE, invokeRet)); } else if (isEnum(returnType)) { + BytecodeCreator isNull = bc.ifNull(invokeRet).trueBranch(); + isNull.returnValue(isNull.readStaticField(Descriptors.RESULTS_NULL)); completeEnum(index.getClassByName(returnType.name()), valueResolver, invokeRet, bc); } else { bc.returnValue(bc.invokeStaticMethod(Descriptors.COMPLETED_STAGE_OF, invokeRet)); @@ -536,11 +540,11 @@ private boolean completeEnum(ClassInfo enumClass, ClassCreator valueResolver, Re BytecodeCreator match; if (ifThenElse == null) { ifThenElse = bc.ifThenElse( - Gizmo.equals(bc, result, bc.readStaticField(enumConstantField))); + Gizmo.equals(bc, bc.readStaticField(enumConstantField), result)); match = ifThenElse.then(); } else { match = ifThenElse.elseIf( - b -> Gizmo.equals(b, result, b.readStaticField(enumConstantField))); + b -> Gizmo.equals(b, b.readStaticField(enumConstantField), result)); } match.returnValue(match.invokeVirtualMethod( enumConstantMethod.getMethodDescriptor(), match.getThis())); diff --git a/independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/MyService.java b/independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/MyService.java index 9e7327d27ffa8..2a835c81cf526 100644 --- a/independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/MyService.java +++ b/independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/MyService.java @@ -33,10 +33,18 @@ public boolean hasName() { return name != null; } - public Boolean isActive() { + public boolean isActive() { return true; } + public Boolean isActiveObject() { + return true; + } + + public Boolean isActiveObjectNull() { + return null; + } + public boolean hasItems() { return false; } @@ -45,6 +53,10 @@ public MyEnum myEnum() { return MyEnum.BAR; } + public MyEnum myEnumNull() { + return null; + } + public List getList(int limit, String dummy) { AtomicInteger idx = new AtomicInteger(0); return Stream.generate(() -> "" + idx.getAndIncrement()) diff --git a/independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/SimpleGeneratorTest.java b/independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/SimpleGeneratorTest.java index 4c7f2722e7a2b..6246789efd319 100644 --- a/independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/SimpleGeneratorTest.java +++ b/independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/SimpleGeneratorTest.java @@ -113,6 +113,8 @@ public void testWithEngine() throws Exception { } Engine engine = builder.build(); assertEquals(" FOO ", engine.parse("{#if isActive} {name.toUpperCase} {/if}").render(new MyService())); + assertEquals(" FOO ", engine.parse("{#if isActiveObject} {name.toUpperCase} {/if}").render(new MyService())); + assertEquals("", engine.parse("{#if isActiveObjectNull} {name.toUpperCase} {/if}").render(new MyService())); assertEquals(" FOO ", engine.parse("{#if active} {name.toUpperCase} {/if}").render(new MyService())); assertEquals(" FOO ", engine.parse("{#if !hasItems} {name.toUpperCase} {/if}").render(new MyService())); assertEquals(" FOO ", engine.parse("{#if !items} {name.toUpperCase} {/if}").render(new MyService())); @@ -138,6 +140,7 @@ public void testWithEngine() throws Exception { assertEquals("5", engine.parse("{#each service.getDummyVarargs(5)}{it}{/}").data("service", new MyService()) .render()); + assertEquals("BAR::", engine.parse("{myEnum}::{myEnumNull}").render(new MyService())); // Namespace resolvers assertEquals("OK", engine.parse("{#if enum is MyEnum:BAR}OK{/if}").data("enum", MyEnum.BAR).render()); diff --git a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/ServerSerialisers.java b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/ServerSerialisers.java index 5cd3502e727f4..60eeac37665a2 100644 --- a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/ServerSerialisers.java +++ b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/ServerSerialisers.java @@ -529,7 +529,7 @@ public static void encodeResponseHeaders(ResteasyReactiveRequestContext requestC vertxResponse.addResponseHeader(header, (CharSequence) HeaderUtil.headerToString(o)); } } - if (header.equals("Transfer-Encoding")) { // using both headers together is not allowed + if (header.equalsIgnoreCase("Transfer-Encoding")) { // using both headers together is not allowed vertxResponse.removeResponseHeader("Content-Length"); } } else { diff --git a/independent-projects/tools/analytics-common/pom.xml b/independent-projects/tools/analytics-common/pom.xml index dd1463013a9f5..dde098bbf5f0c 100644 --- a/independent-projects/tools/analytics-common/pom.xml +++ b/independent-projects/tools/analytics-common/pom.xml @@ -75,4 +75,17 @@ + + + + maven-surefire-plugin + + + + false + + + + + \ No newline at end of file diff --git a/independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/ConfigService.java b/independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/ConfigService.java index 5efe3c771cb5e..7e7b0ee4bf8c2 100644 --- a/independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/ConfigService.java +++ b/independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/ConfigService.java @@ -105,6 +105,8 @@ public void userAcceptance(Function analyticsEnabledSupplier) { *

* Disabled by default. *

+ * If running on CI, false. + *

* If Not explicitly approved by user in dev mode, false *

* If analytics disabled by local property, false @@ -116,6 +118,12 @@ public void userAcceptance(Function analyticsEnabledSupplier) { * @return true if active */ public boolean isActive() { + if (isCi()) { + if (log.isDebugEnabled()) { + log.debug("[Quarkus build analytics] Running on CI. Skipping analytics."); + } + return false; + } if (!isLocalConfigActive()) { if (log.isDebugEnabled()) { log.debug("[Quarkus build analytics] Local config is not active. Skipping analytics."); @@ -139,6 +147,10 @@ public boolean isActive() { return true; } + private boolean isCi() { + return "true".equalsIgnoreCase(System.getenv("CI")); + } + boolean isLocalConfigActive() { if (getProperty(QUARKUS_ANALYTICS_DISABLED_LOCAL_PROP, false)) { return false; // disabled by local property diff --git a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/commands/handlers/UpdateProjectCommandHandler.java b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/commands/handlers/UpdateProjectCommandHandler.java index c57d632913688..3a0895b31e170 100644 --- a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/commands/handlers/UpdateProjectCommandHandler.java +++ b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/commands/handlers/UpdateProjectCommandHandler.java @@ -30,7 +30,6 @@ import io.quarkus.devtools.project.update.ProjectPlatformUpdateInfo; import io.quarkus.devtools.project.update.ProjectUpdateInfos; import io.quarkus.devtools.project.update.rewrite.QuarkusUpdateCommand; -import io.quarkus.devtools.project.update.rewrite.QuarkusUpdateException; import io.quarkus.devtools.project.update.rewrite.QuarkusUpdates; import io.quarkus.devtools.project.update.rewrite.QuarkusUpdatesRepository; import io.quarkus.maven.dependency.ArtifactCoords; @@ -91,7 +90,8 @@ public QuarkusCommandOutcome execute(QuarkusCommandInvocation invocation) throws projectQuarkusPlatformBom.getVersion(), targetPlatformVersion, kotlinVersion, - updateJavaVersion); + updateJavaVersion, + extensionsUpdateInfo); Path recipe = null; try { recipe = Files.createTempFile("quarkus-project-recipe-", ".yaml"); @@ -116,8 +116,6 @@ public QuarkusCommandOutcome execute(QuarkusCommandInvocation invocation) throws rewriteDryRun); } catch (IOException e) { throw new QuarkusCommandException("Error while generating the project update script", e); - } catch (QuarkusUpdateException e) { - throw new QuarkusCommandException("Error while running the project update script", e); } } } diff --git a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateCommand.java b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateCommand.java index 41d4c6ccf6c17..f8d252621bb8f 100644 --- a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateCommand.java +++ b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateCommand.java @@ -12,7 +12,9 @@ import java.nio.file.Paths; import java.util.Arrays; import java.util.Map; +import java.util.Optional; import java.util.Set; +import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -39,7 +41,7 @@ public static String goal(boolean dryRun) { } public static void handle(MessageWriter log, BuildTool buildTool, Path baseDir, - String rewritePluginVersion, String recipesGAV, Path recipe, boolean dryRun) throws QuarkusUpdateException { + String rewritePluginVersion, String recipesGAV, Path recipe, boolean dryRun) { switch (buildTool) { case MAVEN: runMavenUpdate(log, baseDir, rewritePluginVersion, recipesGAV, recipe, dryRun); @@ -53,7 +55,7 @@ public static void handle(MessageWriter log, BuildTool buildTool, Path baseDir, } private static void runGradleUpdate(MessageWriter log, Path baseDir, String rewritePluginVersion, String recipesGAV, - Path recipe, boolean dryRun) throws QuarkusUpdateException { + Path recipe, boolean dryRun) { Path tempInit = null; try { tempInit = Files.createTempFile("openrewrite-init", "gradle"); @@ -77,8 +79,11 @@ private static void runGradleUpdate(MessageWriter log, Path baseDir, String rewr "--init-script", tempInit.toAbsolutePath().toString(), dryRun ? "rewriteDryRun" : "rewriteRun" }; executeCommand(baseDir, command, log); + } catch (QuarkusUpdateException e) { + throw e; } catch (Exception e) { - throw new QuarkusUpdateException("Error while running Gradle rewrite command", e); + throw new QuarkusUpdateException( + "Error while running Gradle rewrite command, see the execution logs above for more details", e); } finally { if (tempInit != null) { try { @@ -93,7 +98,7 @@ private static void runGradleUpdate(MessageWriter log, Path baseDir, String rewr private static void runMavenUpdate(MessageWriter log, Path baseDir, String rewritePluginVersion, String recipesGAV, Path recipe, - boolean dryRun) throws QuarkusUpdateException { + boolean dryRun) { final String mvnBinary = findMvnBinary(baseDir); executeCommand(baseDir, getMavenUpdateCommand(mvnBinary, rewritePluginVersion, recipesGAV, recipe, dryRun), log); @@ -118,7 +123,7 @@ private static String[] getMavenUpdateCommand(String mvnBinary, String rewritePl "-Drewrite.pomCacheEnabled=false" }; } - private static void executeCommand(Path baseDir, String[] command, MessageWriter log) throws QuarkusUpdateException { + private static void executeCommand(Path baseDir, String[] command, MessageWriter log) { ProcessBuilder processBuilder = new ProcessBuilder(); log.info(""); log.info(""); @@ -136,8 +141,28 @@ private static void executeCommand(Path baseDir, String[] command, MessageWriter BufferedReader errorReader = new BufferedReader(new java.io.InputStreamReader(process.getErrorStream())); String line; + LogLevel currentLogLevel = LogLevel.UNKNOWN; + while ((line = inputReader.readLine()) != null) { - log.info(line); + Optional detectedLogLevel = LogLevel.of(line); + if (detectedLogLevel.isPresent()) { + currentLogLevel = detectedLogLevel.get(); + } + switch (currentLogLevel) { + case ERROR: + log.error(currentLogLevel.clean(line)); + break; + case WARNING: + log.warn(currentLogLevel.clean(line)); + break; + case INFO: + log.info(currentLogLevel.clean(line)); + break; + case UNKNOWN: + default: + log.info(line); + break; + } } while ((line = errorReader.readLine()) != null) { log.error(line); @@ -149,14 +174,19 @@ private static void executeCommand(Path baseDir, String[] command, MessageWriter int exitCode = process.waitFor(); if (exitCode != 0) { - throw new QuarkusUpdateException("The command to update the project exited with an error: " + exitCode); + throw new QuarkusUpdateExitErrorException( + "The command to update the project exited with an error, see the execution logs above for more details"); } + } catch (QuarkusUpdateException e) { + throw e; } catch (Exception e) { - throw new QuarkusUpdateException("Error while executing command to udpate the project", e); + throw new QuarkusUpdateException( + "Error while executing the command to update the project, see the execution logs above for more details", + e); } } - static String findMvnBinary(Path baseDir) throws QuarkusUpdateException { + static String findMvnBinary(Path baseDir) { Path mavenCmd = findWrapperOrBinary(baseDir, "mvnw", "mvn"); if (mavenCmd == null) { throw new QuarkusUpdateException("Cannot locate mvnw or mvn" @@ -165,7 +195,7 @@ static String findMvnBinary(Path baseDir) throws QuarkusUpdateException { return mavenCmd.toString(); } - static String findGradleBinary(Path baseDir) throws QuarkusUpdateException { + static String findGradleBinary(Path baseDir) { Path gradleCmd = findWrapperOrBinary(baseDir, "gradlew", "gradle"); if (gradleCmd == null) { throw new QuarkusUpdateException("Cannot gradlew mvnw or gradle" @@ -247,4 +277,49 @@ private static boolean hasMaven(Path dir) { return Files.exists(dir.resolve("pom.xml")); } + private enum LogLevel { + + ERROR, + WARNING, + INFO, + UNKNOWN; + + private static final Pattern LEVEL_PATTERN = Pattern.compile("^\\[[A-Z]+\\].*"); + + private static Optional of(String line) { + if (line == null || line.isBlank()) { + return Optional.empty(); + } + + for (LogLevel level : LogLevel.values()) { + if (level.matches(line)) { + return Optional.of(level); + } + } + + if (LEVEL_PATTERN.matcher(line).matches()) { + return Optional.of(UNKNOWN); + } + + return Optional.empty(); + } + + private String clean(String line) { + if (line == null || line.isBlank()) { + return line; + } + + String pattern = "[" + name() + "]"; + + if (line.length() < pattern.length()) { + return line; + } + + return line.substring(pattern.length()).trim(); + } + + private boolean matches(String line) { + return line != null && line.startsWith("[" + name() + "]"); + } + } } diff --git a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateException.java b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateException.java index 755c396fdbe39..ed5ac4f6160cd 100644 --- a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateException.java +++ b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateException.java @@ -1,6 +1,6 @@ package io.quarkus.devtools.project.update.rewrite; -public class QuarkusUpdateException extends Exception { +public class QuarkusUpdateException extends RuntimeException { private static final long serialVersionUID = 1L; diff --git a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateExitErrorException.java b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateExitErrorException.java new file mode 100644 index 0000000000000..8625594a8d8a1 --- /dev/null +++ b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateExitErrorException.java @@ -0,0 +1,14 @@ +package io.quarkus.devtools.project.update.rewrite; + +public class QuarkusUpdateExitErrorException extends QuarkusUpdateException { + + private static final long serialVersionUID = 1L; + + public QuarkusUpdateExitErrorException(String message, Throwable cause) { + super(message, cause); + } + + public QuarkusUpdateExitErrorException(String message) { + super(message); + } +} diff --git a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdates.java b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdates.java index b5c45ab54e708..14d3df0caaf65 100644 --- a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdates.java +++ b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdates.java @@ -2,12 +2,16 @@ import java.io.IOException; import java.nio.file.Path; +import java.util.List; import java.util.Optional; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; import io.quarkus.devtools.messagewriter.MessageWriter; import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.update.ExtensionUpdateInfo; +import io.quarkus.devtools.project.update.ProjectExtensionsUpdateInfo; import io.quarkus.devtools.project.update.rewrite.QuarkusUpdatesRepository.FetchResult; +import io.quarkus.devtools.project.update.rewrite.operations.UpdateDependencyVersionOperation; import io.quarkus.devtools.project.update.rewrite.operations.UpdateJavaVersionOperation; import io.quarkus.devtools.project.update.rewrite.operations.UpdatePropertyOperation; import io.quarkus.devtools.project.update.rewrite.operations.UpgradeGradlePluginOperation; @@ -33,7 +37,8 @@ public static FetchResult createRecipe(MessageWriter log, Path target, MavenArti switch (request.buildTool) { case MAVEN: recipe.addOperation(new UpdatePropertyOperation("quarkus.platform.version", request.targetVersion)) - .addOperation(new UpdatePropertyOperation("quarkus.version", request.targetVersion)); + .addOperation(new UpdatePropertyOperation("quarkus.version", request.targetVersion)) + .addOperation(new UpdatePropertyOperation("quarkus-plugin.version", request.targetVersion)); if (request.kotlinVersion != null) { recipe.addOperation(new UpdatePropertyOperation("kotlin.version", request.kotlinVersion)); } @@ -48,6 +53,24 @@ public static FetchResult createRecipe(MessageWriter log, Path target, MavenArti break; } + for (List nonPlatformExtensionsUpdates : request.projectExtensionsUpdateInfo + .getNonPlatformExtensions().values()) { + for (ExtensionUpdateInfo nonPlatformExtensionsUpdate : nonPlatformExtensionsUpdates) { + if (nonPlatformExtensionsUpdate.getCurrentDep().isPlatformExtension()) { + // add, my understanding is that we should define the version? As a dependency, as a managed one? + // not completely sure how to make it work for a multi-module project? + } else if (nonPlatformExtensionsUpdate.getRecommendedDependency().isPlatformExtension()) { + // remove, decide what to do here, should we remove the version given it is now managed? Will OpenRewrite support that? + // not completely sure how to make it work for a multi-module project? + } else { + recipe.addOperation(new UpdateDependencyVersionOperation( + nonPlatformExtensionsUpdate.getCurrentDep().getArtifact().getGroupId(), + nonPlatformExtensionsUpdate.getCurrentDep().getArtifact().getArtifactId(), + nonPlatformExtensionsUpdate.getRecommendedDependency().getVersion())); + } + } + } + for (String s : result.getRecipes()) { recipe.addRecipes(QuarkusUpdateRecipeIO.readRecipesYaml(s)); } @@ -62,19 +85,21 @@ public static class ProjectUpdateRequest { public final String targetVersion; public final String kotlinVersion; public final Optional updateJavaVersion; + public final ProjectExtensionsUpdateInfo projectExtensionsUpdateInfo; public ProjectUpdateRequest(String currentVersion, String targetVersion, String kotlinVersion, - Optional updateJavaVersion) { - this(BuildTool.MAVEN, currentVersion, targetVersion, kotlinVersion, updateJavaVersion); + Optional updateJavaVersion, ProjectExtensionsUpdateInfo projectExtensionsUpdateInfo) { + this(BuildTool.MAVEN, currentVersion, targetVersion, kotlinVersion, updateJavaVersion, projectExtensionsUpdateInfo); } public ProjectUpdateRequest(BuildTool buildTool, String currentVersion, String targetVersion, String kotlinVersion, - Optional updateJavaVersion) { + Optional updateJavaVersion, ProjectExtensionsUpdateInfo projectExtensionsUpdateInfo) { this.buildTool = buildTool; this.currentVersion = currentVersion; this.targetVersion = targetVersion; this.kotlinVersion = kotlinVersion; this.updateJavaVersion = updateJavaVersion; + this.projectExtensionsUpdateInfo = projectExtensionsUpdateInfo; } } } diff --git a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/operations/UpdateDependencyVersionOperation.java b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/operations/UpdateDependencyVersionOperation.java index 18c6f1c58741b..3a8fabb65fd20 100644 --- a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/operations/UpdateDependencyVersionOperation.java +++ b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/operations/UpdateDependencyVersionOperation.java @@ -20,6 +20,12 @@ public UpdateDependencyVersionOperation(String groupId, String artifactId, Strin @Override public Map single(BuildTool buildTool) { switch (buildTool) { + case GRADLE: + return Map.of("org.openrewrite.gradle.UpgradeDependencyVersion", + Map.of( + "groupId", groupId, + "artifactId", artifactId, + "newVersion", newVersion)); case MAVEN: return Map.of("org.openrewrite.maven.UpgradeDependencyVersion", Map.of( diff --git a/independent-projects/tools/pom.xml b/independent-projects/tools/pom.xml index a2f65bfe4be5f..aae4bd09b0cee 100644 --- a/independent-projects/tools/pom.xml +++ b/independent-projects/tools/pom.xml @@ -45,7 +45,6 @@ 8.1.1 - 3.11.0 1.6.0 2.12.13 diff --git a/integration-tests/elasticsearch-java-client/pom.xml b/integration-tests/elasticsearch-java-client/pom.xml index bcb1f3405e81f..ab829c3e6a9a8 100644 --- a/integration-tests/elasticsearch-java-client/pom.xml +++ b/integration-tests/elasticsearch-java-client/pom.xml @@ -173,6 +173,16 @@ See https://www.elastic.co/guide/en/elasticsearch/reference/8.8/modules-cluster.html#disk-based-shard-allocation --> false + + false + false + false + false + false + false + false + false + false 9200:9200 @@ -188,7 +198,7 @@ GET 200 - + diff --git a/integration-tests/elasticsearch-rest-client/pom.xml b/integration-tests/elasticsearch-rest-client/pom.xml index 0010baf5a2d06..18a106a3dbff5 100644 --- a/integration-tests/elasticsearch-rest-client/pom.xml +++ b/integration-tests/elasticsearch-rest-client/pom.xml @@ -173,6 +173,16 @@ See https://www.elastic.co/guide/en/elasticsearch/reference/8.8/modules-cluster.html#disk-based-shard-allocation --> false + + false + false + false + false + false + false + false + false + false 9200:9200 diff --git a/integration-tests/hibernate-search-orm-elasticsearch-coordination-outbox-polling/pom.xml b/integration-tests/hibernate-search-orm-elasticsearch-coordination-outbox-polling/pom.xml index 4f59a84df7820..a6da8fd9fdecf 100644 --- a/integration-tests/hibernate-search-orm-elasticsearch-coordination-outbox-polling/pom.xml +++ b/integration-tests/hibernate-search-orm-elasticsearch-coordination-outbox-polling/pom.xml @@ -213,7 +213,18 @@ and lead to problems on large disks with little space left. See https://www.elastic.co/guide/en/elasticsearch/reference/8.8/modules-cluster.html#disk-based-shard-allocation --> - false + false + + false + false + false + false + false + false + false + false + false + 9200:9200 diff --git a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/pom.xml b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/pom.xml index a888a9cbbf521..e2364a665a118 100644 --- a/integration-tests/hibernate-search-orm-elasticsearch-tenancy/pom.xml +++ b/integration-tests/hibernate-search-orm-elasticsearch-tenancy/pom.xml @@ -230,6 +230,16 @@ See https://www.elastic.co/guide/en/elasticsearch/reference/8.8/modules-cluster.html#disk-based-shard-allocation --> false + + false + false + false + false + false + false + false + false + false 9200:9200 diff --git a/integration-tests/hibernate-search-orm-elasticsearch/pom.xml b/integration-tests/hibernate-search-orm-elasticsearch/pom.xml index 5494677e184e1..9e52061daae01 100644 --- a/integration-tests/hibernate-search-orm-elasticsearch/pom.xml +++ b/integration-tests/hibernate-search-orm-elasticsearch/pom.xml @@ -192,6 +192,16 @@ See https://www.elastic.co/guide/en/elasticsearch/reference/8.8/modules-cluster.html#disk-based-shard-allocation --> false + + false + false + false + false + false + false + false + false + false 9200:9200 diff --git a/integration-tests/kubernetes-client/src/test/java/io/quarkus/it/kubernetes/client/KubernetesNewClientTest.java b/integration-tests/kubernetes-client/src/test/java/io/quarkus/it/kubernetes/client/KubernetesNewClientTest.java index e19b6ebc6c451..9eabaacc6d954 100644 --- a/integration-tests/kubernetes-client/src/test/java/io/quarkus/it/kubernetes/client/KubernetesNewClientTest.java +++ b/integration-tests/kubernetes-client/src/test/java/io/quarkus/it/kubernetes/client/KubernetesNewClientTest.java @@ -45,8 +45,8 @@ private void setupMockServerForTest() { Pod pod1 = new PodBuilder().withNewMetadata().withName("pod1").withNamespace("test").and().build(); Pod pod2 = new PodBuilder().withNewMetadata().withName("pod2").withNamespace("test").and().build(); - mockServer.getClient().inNamespace("test").pods().create(pod1); - mockServer.getClient().inNamespace("test").pods().create(pod2); + mockServer.getClient().inNamespace("test").resource(pod1).create(); + mockServer.getClient().inNamespace("test").resource(pod2).create(); } } diff --git a/integration-tests/kubernetes-client/src/test/java/io/quarkus/it/kubernetes/client/OpenShiftTestServerTest.java b/integration-tests/kubernetes-client/src/test/java/io/quarkus/it/kubernetes/client/OpenShiftTestServerTest.java index 019b1dc5b24e6..37b2570d6c70c 100644 --- a/integration-tests/kubernetes-client/src/test/java/io/quarkus/it/kubernetes/client/OpenShiftTestServerTest.java +++ b/integration-tests/kubernetes-client/src/test/java/io/quarkus/it/kubernetes/client/OpenShiftTestServerTest.java @@ -30,12 +30,10 @@ class OpenShiftTestServerTest { @Test void testInjectionDefaultsToCrud() { - mockServer.getOpenshiftClient().projects().createOrReplace(new ProjectBuilder() + client.projects().createOrReplace(new ProjectBuilder() .withNewMetadata().withName("example-project").addToLabels("project", "crud-is-true").endMetadata() .build()); assertThat(client) - .isNotSameAs(mockServer.getOpenshiftClient()) - .isNotSameAs(mockServer.getOpenshiftClient()) .returns("crud-is-true", c -> c.projects().withName("example-project").get().getMetadata().getLabels().get("project")); } diff --git a/integration-tests/logging-gelf/pom.xml b/integration-tests/logging-gelf/pom.xml index 4c1c3e4fd4b5e..03d4e11bcfd49 100644 --- a/integration-tests/logging-gelf/pom.xml +++ b/integration-tests/logging-gelf/pom.xml @@ -165,6 +165,16 @@ See https://www.elastic.co/guide/en/elasticsearch/reference/8.8/modules-cluster.html#disk-based-shard-allocation --> false + + false + false + false + false + false + false + false + false + false Elasticsearch: diff --git a/integration-tests/logging-gelf/src/test/resources/docker-compose-efk.yml b/integration-tests/logging-gelf/src/test/resources/docker-compose-efk.yml index ec5090633fc30..03811a7a0d101 100644 --- a/integration-tests/logging-gelf/src/test/resources/docker-compose-efk.yml +++ b/integration-tests/logging-gelf/src/test/resources/docker-compose-efk.yml @@ -7,7 +7,17 @@ services: ES_JAVA_OPTS: "-Xms512m -Xmx512m" discovery.type: "single-node" xpack.security.enabled: "false" - cluster.routing.allocation.disk.threshold_enabled: false + cluster.routing.allocation.disk.threshold_enabled: "false" + # Disable some features that are not needed in our tests and just slow down startup --> + xpack.profiling.enabled: "false" + xpack.monitoring.templates.enabled: "false" + xpack.ml.enabled: "false" + xpack.watcher.enabled: "false" + xpack.ent_search.enabled: "false" + stack.templates.enabled: "false" + cluster.deprecation_indexing.enabled: "false" + indices.lifecycle.history_index_enabled: "false" + slm.history_index_enabled: "false" networks: - efk diff --git a/integration-tests/logging-gelf/src/test/resources/docker-compose-elk.yml b/integration-tests/logging-gelf/src/test/resources/docker-compose-elk.yml index 6363ac563612e..df8f496fd12d4 100644 --- a/integration-tests/logging-gelf/src/test/resources/docker-compose-elk.yml +++ b/integration-tests/logging-gelf/src/test/resources/docker-compose-elk.yml @@ -8,6 +8,16 @@ services: discovery.type: "single-node" xpack.security.enabled: "false" cluster.routing.allocation.disk.threshold_enabled: false + # Disable some features that are not needed in our tests and just slow down startup --> + xpack.profiling.enabled: "false" + xpack.monitoring.templates.enabled: "false" + xpack.ml.enabled: "false" + xpack.watcher.enabled: "false" + xpack.ent_search.enabled: "false" + stack.templates.enabled: "false" + cluster.deprecation_indexing.enabled: "false" + indices.lifecycle.history_index_enabled: "false" + slm.history_index_enabled: "false" networks: - elk diff --git a/integration-tests/logging-gelf/src/test/resources/docker-compose-graylog.yml b/integration-tests/logging-gelf/src/test/resources/docker-compose-graylog.yml index 645a9e1c615ac..b47e514b605eb 100644 --- a/integration-tests/logging-gelf/src/test/resources/docker-compose-graylog.yml +++ b/integration-tests/logging-gelf/src/test/resources/docker-compose-graylog.yml @@ -8,6 +8,16 @@ services: discovery.type: "single-node" xpack.security.enabled: "false" cluster.routing.allocation.disk.threshold_enabled: false + # Disable some features that are not needed in our tests and just slow down startup --> + xpack.profiling.enabled: "false" + xpack.monitoring.templates.enabled: "false" + xpack.ml.enabled: "false" + xpack.watcher.enabled: "false" + xpack.ent_search.enabled: "false" + stack.templates.enabled: "false" + cluster.deprecation_indexing.enabled: "false" + indices.lifecycle.history_index_enabled: "false" + slm.history_index_enabled: "false" networks: - graylog diff --git a/integration-tests/maven/src/test/resources-filtered/projects/rr-with-json-logging/pom.xml b/integration-tests/maven/src/test/resources-filtered/projects/rr-with-json-logging/pom.xml index f1a923ff17713..f4a8d560e908f 100644 --- a/integration-tests/maven/src/test/resources-filtered/projects/rr-with-json-logging/pom.xml +++ b/integration-tests/maven/src/test/resources-filtered/projects/rr-with-json-logging/pom.xml @@ -48,12 +48,7 @@ commons-io commons-io - 2.6 - - - org.drools - drools-engine - 7.51.0.Final + 2.13.0 io.quarkus diff --git a/integration-tests/maven/src/test/resources-filtered/projects/rr-with-json-logging/src/main/java/org/acme/ClasspathResources.java b/integration-tests/maven/src/test/resources-filtered/projects/rr-with-json-logging/src/main/java/org/acme/ClasspathResources.java index 62da97eb07a1c..4da0183f2e478 100644 --- a/integration-tests/maven/src/test/resources-filtered/projects/rr-with-json-logging/src/main/java/org/acme/ClasspathResources.java +++ b/integration-tests/maven/src/test/resources-filtered/projects/rr-with-json-logging/src/main/java/org/acme/ClasspathResources.java @@ -165,11 +165,13 @@ private String assertCorrectDirectory() { private String assertUniqueDirectories() { final String testType = "unique-directories"; try { - Enumeration resources = this.getClass().getClassLoader().getResources("META-INF/kie.conf"); + Enumeration resources = this.getClass().getClassLoader().getResources("META-INF/quarkus-extension.yaml"); List resourcesList = Collections.list(resources); - // 'META-INF/kie.conf' should be present in 'kie-internal', 'drools-core', 'drools-compiler' and 'drools-model-compiler' - if (resourcesList.size() != 4) { - return errorResult(testType, "wrong number of directory urls"); + // 'META-INF/quarkus-extension.yaml' should be present in all extensions + int expected = 12; + if (resourcesList.size() != expected) { + return errorResult(testType, + "wrong number of directory urls, expected " + expected + " but got " + resourcesList.size()); } return SUCCESS; } catch (Exception e) { diff --git a/integration-tests/mongodb-panache/src/test/java/io/quarkus/it/mongodb/panache/reactive/ReactiveMongodbPanacheResourceTest.java b/integration-tests/mongodb-panache/src/test/java/io/quarkus/it/mongodb/panache/reactive/ReactiveMongodbPanacheResourceTest.java index 6c9949572f75b..ffb7f111150cc 100644 --- a/integration-tests/mongodb-panache/src/test/java/io/quarkus/it/mongodb/panache/reactive/ReactiveMongodbPanacheResourceTest.java +++ b/integration-tests/mongodb-panache/src/test/java/io/quarkus/it/mongodb/panache/reactive/ReactiveMongodbPanacheResourceTest.java @@ -22,6 +22,7 @@ import jakarta.ws.rs.sse.SseEventSource; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import com.fasterxml.jackson.databind.ObjectMapper; @@ -347,6 +348,7 @@ public void testMoreEntityFunctionalities() { } @Test + @Disabled("flaky") public void testMoreRepositoryFunctionalities() { get("/test/reactive/repository").then().statusCode(200); } diff --git a/integration-tests/oidc-code-flow/src/test/java/io/quarkus/it/keycloak/CodeFlowTest.java b/integration-tests/oidc-code-flow/src/test/java/io/quarkus/it/keycloak/CodeFlowTest.java index 9ae62850ca88b..75e3afffc6d24 100644 --- a/integration-tests/oidc-code-flow/src/test/java/io/quarkus/it/keycloak/CodeFlowTest.java +++ b/integration-tests/oidc-code-flow/src/test/java/io/quarkus/it/keycloak/CodeFlowTest.java @@ -733,6 +733,7 @@ public void testIdTokenInjection() throws IOException { Cookie sessionCookie = getSessionCookie(webClient, null); assertNotNull(sessionCookie); + // Replace the session cookie with the correctly formatted cookie but with invalid token values webClient.getCookieManager().clearCookies(); webClient.getCookieManager().addCookie(new Cookie(sessionCookie.getDomain(), sessionCookie.getName(), "1|2|3")); @@ -747,6 +748,22 @@ public void testIdTokenInjection() throws IOException { assertNull(getSessionCookie(webClient, null)); } webClient.getCookieManager().clearCookies(); + + // Replace the session cookie with malformed cookie + webClient.getCookieManager().clearCookies(); + webClient.getCookieManager().addCookie(new Cookie(sessionCookie.getDomain(), sessionCookie.getName(), + "1")); + sessionCookie = getSessionCookie(webClient, null); + assertEquals("1", sessionCookie.getValue()); + + try { + webClient.getPage("http://localhost:8081/web-app"); + fail("401 status error is expected"); + } catch (FailingHttpStatusCodeException ex) { + assertEquals(401, ex.getStatusCode()); + assertNull(getSessionCookie(webClient, null)); + } + webClient.getCookieManager().clearCookies(); } }