diff --git a/.github/dependabot.yml b/.github/dependabot.yml index a8f463964d320..876587472f4c8 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -44,7 +44,7 @@ updates: # JDBC Drivers - dependency-name: org.postgresql:postgresql - dependency-name: org.mariadb.jdbc:mariadb-java-client - - dependency-name: mysql:mysql-connector-java + - dependency-name: com.mysql:mysql-connector-j - dependency-name: org.apache.derby:* - dependency-name: com.ibm.db2:jcc # Kafka diff --git a/bom/application/pom.xml b/bom/application/pom.xml index 1ee4b1581c351..5bf29ba053e7a 100644 --- a/bom/application/pom.xml +++ b/bom/application/pom.xml @@ -25,7 +25,7 @@ 1 1.1.5 2.1.5.Final - 3.1.0.Final + 3.1.1.Final 6.2.7.Final 0.33.0 0.2.4 @@ -130,7 +130,7 @@ 2.2.224 42.7.1 3.3.2 - 8.0.33 + 8.3.0 12.4.2.jre11 1.6.7 23.3.0.23.09 @@ -148,7 +148,7 @@ 1.12.0 1.0.4 3.5.3.Final - 2.5.5 + 2.5.6 3.6.1 1.8.0 1.1.10.5 @@ -207,7 +207,7 @@ 1.11.3 2.5.8.Final 0.1.18.Final - 1.19.3 + 1.19.4 3.3.4 2.0.0 diff --git a/build-parent/pom.xml b/build-parent/pom.xml index 8e65e4e5d157f..aa646481c3e5d 100644 --- a/build-parent/pom.xml +++ b/build-parent/pom.xml @@ -72,8 +72,10 @@ - 6.4.0.Final + but version 6.3+ leads to issues in applications that use it too: + https://github.com/quarkusio/quarkus/issues/38378 + As a workaround, we generate the static metamodel of Panache using an older version of jpamodelgen. --> + 6.2.22.Final 4.13.0 diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/JavaCompilationProvider.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/JavaCompilationProvider.java index 2e0f16e6b73da..bdd951b05767a 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/JavaCompilationProvider.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/JavaCompilationProvider.java @@ -37,7 +37,7 @@ public class JavaCompilationProvider implements CompilationProvider { // -parameters is used to generate metadata for reflection on method parameters // this is useful when people using debuggers against their hot-reloaded app private static final Set COMPILER_OPTIONS = Set.of("-g", "-parameters"); - private static final Set IGNORE_NAMESPACES = Set.of("org.osgi"); + private static final Set IGNORE_NAMESPACES = Set.of("org.osgi", "Annotation processing is enabled because"); private static final String PROVIDER_KEY = "java"; diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java b/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java index 3ac39611cfff3..08a043382dbe5 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java @@ -529,7 +529,7 @@ private void writeJavadocProperties(final TypeElement clazz, final Properties ja rbn, clazz); try (Writer writer = file.openWriter()) { - javadocProps.store(writer, Constants.EMPTY); + PropertyUtils.store(javadocProps, writer); } } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed to persist resource " + rbn + ": " + e); diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocFormatter.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocFormatter.java index 0564a820dbfc7..e153f1aa0afc6 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocFormatter.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocFormatter.java @@ -53,8 +53,8 @@ default String getAnchor(String string) { // Apostrophes. string = string.replaceAll("([a-z])'s([^a-z])", "$1s$2"); - // Allow only letters, -, _, . - string = string.replaceAll("[^\\w-_\\.]", "-").replaceAll("-{2,}", "-"); + // Allow only letters, -, _ + string = string.replaceAll("[^\\w-_]", "-").replaceAll("-{2,}", "-"); // Get rid of any - at the start and end. string = string.replaceAll("-+$", "").replaceAll("^-+", ""); diff --git a/devtools/gradle/settings.gradle.kts b/devtools/gradle/settings.gradle.kts index 286822843e4e8..28532ac501ff8 100644 --- a/devtools/gradle/settings.gradle.kts +++ b/devtools/gradle/settings.gradle.kts @@ -1,5 +1,5 @@ plugins { - id("com.gradle.enterprise") version "3.16.1" + id("com.gradle.enterprise") version "3.16.2" } gradleEnterprise { diff --git a/docs/src/main/asciidoc/build-analytics.adoc b/docs/src/main/asciidoc/build-analytics.adoc index ff94a46c57488..379bcc3843480 100644 --- a/docs/src/main/asciidoc/build-analytics.adoc +++ b/docs/src/main/asciidoc/build-analytics.adoc @@ -166,19 +166,19 @@ mvn clean install -Dquarkus.analytics.disabled=true |=== | Configuration property | Type | Default -| [[build-analytics.quarkus.analytics.disabled]]`link:#build-analytics.quarkus.analytics.disabled[quarkus.analytics.disabled]` +| [[build-analytics-quarkus-analytics-disabled]]`link:#build-analytics-quarkus-analytics-disabled[quarkus.analytics.disabled]` Values can be `true` or `false`. Setting this property to `true` will disable the collection of analytics data on that specific project regardless of any other configuration. | boolean | false -| [[build-analytics.quarkus.analytics.uri.base]]`link:#build-analytics.quarkus.analytics.uri.base[quarkus.analytics.uri.base]` +| [[build-analytics-quarkus-analytics-uri-base]]`link:#build-analytics.quarkus-analytics-uri-base[quarkus.analytics.uri.base]` Base URL of the analytics service. This is were all the data is sent to. | String | "https://api.segment.io/" -| [[build-analytics.quarkus.analytics.timeout]]`link:#build-analytics.quarkus.analytics.timeout[quarkus.analytics.timeout]` +| [[build-analytics-quarkus-analytics-timeout]]`link:#build-analytics-quarkus-analytics-timeout[quarkus.analytics.timeout]` Value in milliseconds for the timeout of the HTTP request to submit the analytics service. | int diff --git a/docs/src/main/asciidoc/databases-dev-services.adoc b/docs/src/main/asciidoc/databases-dev-services.adoc index 2a12b337c0a0b..72e64e7afeff5 100644 --- a/docs/src/main/asciidoc/databases-dev-services.adoc +++ b/docs/src/main/asciidoc/databases-dev-services.adoc @@ -121,7 +121,7 @@ In that case, you will need to stop and remove these containers manually. If you want to reuse containers for some Quarkus applications but not all of them, or some Dev Services but not all of them, you can disable this feature for a specific Dev Service by setting the configuration property -xref:databases-dev-services.adoc#quarkus-datasource-config-group-dev-services-build-time-config_quarkus.datasource.devservices.reuse[`quarkus.datasource.devservices.reuse`/`quarkus.datasource."datasource-name".devservices.reuse`] +xref:databases-dev-services.adoc#quarkus-datasource-config-group-dev-services-build-time-config_quarkus-datasource-devservices-reuse[`quarkus.datasource.devservices.reuse`/`quarkus.datasource."datasource-name".devservices.reuse`] to `false`. == Mapping volumes into Dev Services for Database diff --git a/docs/src/main/asciidoc/datasource.adoc b/docs/src/main/asciidoc/datasource.adoc index 74f7fcfcfd121..6164e60e32676 100644 --- a/docs/src/main/asciidoc/datasource.adoc +++ b/docs/src/main/asciidoc/datasource.adoc @@ -4,7 +4,7 @@ and pull requests should be submitted there: https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// [id="datasources"] -= Configure data sources in Quarkus += Configure data sources in {project-name} include::_attributes.adoc[] :diataxis-type: reference :categories: data,getting-started,reactive diff --git a/docs/src/main/asciidoc/deploying-to-kubernetes.adoc b/docs/src/main/asciidoc/deploying-to-kubernetes.adoc index b634f56f08b47..a097956a8ce67 100644 --- a/docs/src/main/asciidoc/deploying-to-kubernetes.adoc +++ b/docs/src/main/asciidoc/deploying-to-kubernetes.adoc @@ -238,7 +238,7 @@ You can provide the arguments that will be used by the Kubernetes Job via the pr Finally, the Kubernetes job will be launched every time it is installed in Kubernetes. You can know more about how to run Kubernetes jobs in this https://kubernetes.io/docs/concepts/workloads/controllers/job/#running-an-example-job[link]. -You can configure the rest of the Kubernetes Job configuration using the properties under `quarkus.kubernetes.job.xxx` (see xref:deploying-to-kubernetes.adoc#quarkus-kubernetes-kubernetes-config_quarkus.kubernetes.job.parallelism[link]). +You can configure the rest of the Kubernetes Job configuration using the properties under `quarkus.kubernetes.job.xxx` (see xref:deploying-to-kubernetes.adoc#quarkus-kubernetes-kubernetes-config_quarkus-kubernetes-job-parallelism[link]). ==== Generating CronJob resources @@ -253,7 +253,7 @@ quarkus.kubernetes.cron-job.schedule=0 * * * * IMPORTANT: CronJob resources require the https://en.wikipedia.org/wiki/Cron[Cron] expression to specify when to launch the job via the property `quarkus.kubernetes.cron-job.schedule`. If not provide, the build will fail. -You can configure the rest of the Kubernetes CronJob configuration using the properties under `quarkus.kubernetes.cron-job.xxx` (see xref:deploying-to-kubernetes.adoc#quarkus-kubernetes-kubernetes-config_quarkus.kubernetes.cron-job.parallelism[link]). +You can configure the rest of the Kubernetes CronJob configuration using the properties under `quarkus.kubernetes.cron-job.xxx` (see xref:deploying-to-kubernetes.adoc#quarkus-kubernetes-kubernetes-config_quarkus-kubernetes-cron-job-parallelism[link]). === Namespace diff --git a/docs/src/main/asciidoc/deploying-to-openshift.adoc b/docs/src/main/asciidoc/deploying-to-openshift.adoc index 26a2a416268a3..5611e90d45cf1 100644 --- a/docs/src/main/asciidoc/deploying-to-openshift.adoc +++ b/docs/src/main/asciidoc/deploying-to-openshift.adoc @@ -417,7 +417,7 @@ You can provide the arguments that will be used by the Kubernetes Job via the pr Finally, the Kubernetes job will be launched every time that is installed in OpenShift. You can know more about how to run Kubernetes jobs in this https://kubernetes.io/docs/concepts/workloads/controllers/job/#running-an-example-job[link]. -You can configure the rest of the Kubernetes Job configuration using the properties under `quarkus.openshift.job.xxx` (see xref:deploying-to-openshift#quarkus-openshift-openshift-config_quarkus.openshift.job.parallelism[link]). +You can configure the rest of the Kubernetes Job configuration using the properties under `quarkus.openshift.job.xxx` (see xref:deploying-to-openshift#quarkus-openshift-openshift-config_quarkus-openshift-job-parallelism[link]). ===== Generating CronJob resources @@ -432,7 +432,7 @@ quarkus.openshift.cron-job.schedule=0 * * * * IMPORTANT: CronJob resources require the https://en.wikipedia.org/wiki/Cron[Cron] expression to specify when to launch the job via the property `quarkus.openshift.cron-job.schedule`. If not provide, the build will fail. -You can configure the rest of the Kubernetes CronJob configuration using the properties under `quarkus.openshift.cron-job.xxx` (see xref:deploying-to-openshift.adoc#quarkus-openshift-openshift-config_quarkus.openshift.cron-job.parallelism[link]). +You can configure the rest of the Kubernetes CronJob configuration using the properties under `quarkus.openshift.cron-job.xxx` (see xref:deploying-to-openshift.adoc#quarkus-openshift-openshift-config_quarkus-openshift-cron-job-parallelism[link]). ==== Validation diff --git a/docs/src/main/asciidoc/elasticsearch-dev-services.adoc b/docs/src/main/asciidoc/elasticsearch-dev-services.adoc index a254d9a25248b..9e025adf12d8a 100644 --- a/docs/src/main/asciidoc/elasticsearch-dev-services.adoc +++ b/docs/src/main/asciidoc/elasticsearch-dev-services.adoc @@ -123,7 +123,7 @@ including the Elasticsearch schema and the content of indexes. If that's not what you want -- and if your tests write to the indexes, that's probably not what you want -- consider reinitializing your schema and data on application startup. If you use Hibernate Search, -xref:hibernate-search-orm-elasticsearch.adoc#quarkus-hibernate-search-orm-elasticsearch_quarkus.hibernate-search-orm.schema-management.strategy[Hibernate Search's schema management] +xref:hibernate-search-orm-elasticsearch.adoc#quarkus-hibernate-search-orm-elasticsearch_quarkus-hibernate-search-orm-schema-management-strategy[Hibernate Search's schema management] may help with that. ==== @@ -138,7 +138,7 @@ In that case, you will need to stop and remove these containers manually. If you want to reuse containers for some Quarkus applications but not all of them, or some Dev Services but not all of them, you can disable this feature for a specific Dev Service by setting the configuration property -xref:elasticsearch-dev-services.adoc#quarkus-elasticsearch-devservices-elasticsearch-dev-services-build-time-config_quarkus.elasticsearch.devservices.reuse[`quarkus.elasticsearch.devservices.reuse`] +xref:elasticsearch-dev-services.adoc#quarkus-elasticsearch-devservices-elasticsearch-dev-services-build-time-config_quarkus-elasticsearch-devservices-reuse[`quarkus.elasticsearch.devservices.reuse`] to `false`. == Current limitations diff --git a/docs/src/main/asciidoc/getting-started-dev-services.adoc b/docs/src/main/asciidoc/getting-started-dev-services.adoc index 14f62ced451bb..cf308ec06771a 100644 --- a/docs/src/main/asciidoc/getting-started-dev-services.adoc +++ b/docs/src/main/asciidoc/getting-started-dev-services.adoc @@ -238,7 +238,7 @@ Quarkus will automatically stop the container when your application stops. If you play with your code some more, you may notice that sometimes, after making an application change, http://localhost:8080/hello/names doesn't list any names. What's going on? By default, in dev mode, with a Dev Services database, Quarkus configures Hibernate ORM database generation to be `drop-and-create`. - See the xref:hibernate-orm.adoc#quarkus-hibernate-orm_quarkus.hibernate-orm.database-database-related-configuration[Hibernate configuration reference] for more details. + See the xref:hibernate-orm.adoc#quarkus-hibernate-orm_quarkus-hibernate-orm-database-database-related-configuration[Hibernate configuration reference] for more details. If a code change triggers an application restart, the database tables will be dropped (deleted) and then re-created. diff --git a/docs/src/main/asciidoc/hibernate-orm.adoc b/docs/src/main/asciidoc/hibernate-orm.adoc index ff704928e292f..6b383977bb45f 100644 --- a/docs/src/main/asciidoc/hibernate-orm.adoc +++ b/docs/src/main/asciidoc/hibernate-orm.adoc @@ -630,7 +630,7 @@ You can add mapping files following the https://jakarta.ee/specifications/persistence/3.0/jakarta-persistence-spec-3.0.html#a16944[`orm.xml` format (Jakarta Persistence)] or the http://hibernate.org/dtd/hibernate-mapping-3.0.dtd[`hbm.xml` format (specific to Hibernate ORM, deprecated)]: -* in `application.properties` through the (build-time) link:#quarkus-hibernate-orm_quarkus.hibernate-orm.mapping-files[`quarkus.hibernate-orm.mapping-files`] property. +* in `application.properties` through the (build-time) link:#quarkus-hibernate-orm_quarkus-hibernate-orm-mapping-files[`quarkus.hibernate-orm.mapping-files`] property. * in <> through the `` element. XML mapping files are parsed at build time. diff --git a/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc b/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc index d99e2420de01c..9e5d14a159356 100644 --- a/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc +++ b/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc @@ -659,7 +659,7 @@ which is why we provide Quarkus with this connection info in the `prod` profile Because we rely on <>, the database and Elasticsearch schema will automatically be dropped and re-created on each application startup in tests and dev mode -(unless link:#quarkus-hibernate-search-orm-elasticsearch_quarkus.hibernate-search-orm.schema-management.strategy[`quarkus.hibernate-search-orm.schema-management.strategy`] is set explicitly). +(unless <> is set explicitly). If for some reason you cannot use Dev Services, you will have to set the following properties to get similar behavior: @@ -672,7 +672,7 @@ you will have to set the following properties to get similar behavior: %test.quarkus.hibernate-search-orm.schema-management.strategy=drop-and-create ---- -See also link:#quarkus-hibernate-search-orm-elasticsearch_quarkus.hibernate-search-orm.schema-management.strategy[`quarkus.hibernate-search-orm.schema-management.strategy`]. +See also <>. ==== @@ -898,10 +898,10 @@ this could cause a startup failure. To address this, you can configure Hibernate Search to not send any request on startup: * Disable Elasticsearch version checks on startup by setting the configuration property - link:#quarkus-hibernate-search-orm-elasticsearch_quarkus.hibernate-search-orm.elasticsearch.version-check.enabled[`quarkus.hibernate-search-orm.elasticsearch.version-check.enabled`] + <> to `false`. * Disable schema management on startup by setting the configuration property - link:#quarkus-hibernate-search-orm-elasticsearch_quarkus.hibernate-search-orm.schema-management.strategy[`quarkus.hibernate-search-orm.schema-management.strategy`] + <> to `none`. Of course, even with this configuration, Hibernate Search still won't be able to index anything or run search queries @@ -948,7 +948,7 @@ To enable the `outbox-polling` coordination strategy, an additional extension is include::{includes}/devtools/extension-add.adoc[] Once the extension is there, you will need to explicitly select the `outbox-polling` strategy -by setting link:#quarkus-hibernate-search-orm-elasticsearch_quarkus.hibernate-search-orm.coordination.strategy[`quarkus.hibernate-search-orm.coordination.strategy`] +by setting <> to `outbox-polling`. Finally, you will need to make sure that the Hibernate ORM entities added by Hibernate Search @@ -965,16 +965,16 @@ link:{hibernate-search-docs-url}#coordination-outbox-polling-schema[manually alt The database schema Hibernate Search will expect for outbox-polling coordination may be customized through the following configuration properties: -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.agent.catalog[`quarkus.hibernate-search-orm.coordination.entity-mapping.agent.catalog`] -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.agent.schema[`quarkus.hibernate-search-orm.coordination.entity-mapping.agent.schema`] -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.agent.table[`quarkus.hibernate-search-orm.coordination.entity-mapping.agent.table`] -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.agent.uuid-gen-strategy[`quarkus.hibernate-search-orm.coordination.entity-mapping.agent.uuid-gen-strategy`] -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.agent.uuid-type[`quarkus.hibernate-search-orm.coordination.entity-mapping.agent.uuid-type`] -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.catalog[`quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.catalog`] -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.schema[`quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.schema`] -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.table[`quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.table`] -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.uuid-gen-strategy[`quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.uuid-gen-strategy`] -* link:#quarkus-hibernate-search-orm-outboxpolling_quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.uuid-type[`quarkus.hibernate-search-orm.coordination.entity-mapping.outbox-event.uuid-type`] +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> ==== @@ -992,7 +992,7 @@ However, there is one key difference: index updates are necessarily asynchronous they are guaranteed to happen _eventually_, but not immediately. This means in particular that the configuration property -link:#quarkus-hibernate-search-orm-elasticsearch_quarkus.hibernate-search-orm.indexing.plan.synchronization.strategy[`quarkus.hibernate-search-orm.indexing.plan.synchronization.strategy`] +<> cannot be set when using the `outbox-polling` coordination strategy: Hibernate Search will always behave as if this property was set to `write-sync` (the default). @@ -1164,7 +1164,7 @@ For example `class:com.mycompany.MyClass`. * An arbitrary string referencing a built-in implementation. Available values are detailed in the documentation of each configuration property, such as `async`/`read-sync`/`write-sync`/`sync` for -<>. +<>. Other formats are also accepted, but are only useful for advanced use cases. See link:{hibernate-search-docs-url}#configuration-bean-reference-parsing[this section of Hibernate Search's reference documentation] diff --git a/docs/src/main/asciidoc/kubernetes-client.adoc b/docs/src/main/asciidoc/kubernetes-client.adoc index 6dcaeb41828fa..23a5efce9e567 100644 --- a/docs/src/main/asciidoc/kubernetes-client.adoc +++ b/docs/src/main/asciidoc/kubernetes-client.adoc @@ -54,7 +54,7 @@ quarkus.kubernetes-client.trust-certs=false quarkus.kubernetes-client.namespace=default ---- -Note that the full list of properties is available in the link:#quarkus-kubernetes-client_quarkus.kubernetes-client.devservices-dev-services[Dev Services section of the configuration reference]. +Note that the full list of properties is available in the <>. In dev mode and when running tests, xref:kubernetes-dev-services.adoc[Dev Services for Kubernetes] automatically starts a Kubernetes API server. diff --git a/docs/src/main/asciidoc/logging.adoc b/docs/src/main/asciidoc/logging.adoc index 813d367e17ba1..9d93ff634e73f 100644 --- a/docs/src/main/asciidoc/logging.adoc +++ b/docs/src/main/asciidoc/logging.adoc @@ -434,7 +434,7 @@ quarkus.log.category."com.example".handlers=my-console-handler quarkus.log.category."com.example".use-parent-handlers=false ---- -For details about its configuration, see the xref:#quarkus-log-logging-log-config_quarkus.log.console-console-logging[console logging configuration] reference. +For details about its configuration, see the xref:#quarkus-log-logging-log-config_quarkus-log-console-console-logging[console logging configuration] reference. === File log handler @@ -468,7 +468,7 @@ quarkus.log.category."com.example".handlers=my-file-handler quarkus.log.category."com.example".use-parent-handlers=false ---- -For details about its configuration, see the xref:#quarkus-log-logging-log-config_quarkus.log.file-file-logging[file logging configuration] reference. +For details about its configuration, see the xref:#quarkus-log-logging-log-config_quarkus-log-file-file-logging[file logging configuration] reference. === Syslog log handler @@ -500,7 +500,7 @@ quarkus.log.category."com.example".handlers=my-syslog-handler quarkus.log.category."com.example".use-parent-handlers=false ---- -For details about its configuration, see the xref:#quarkus-log-logging-log-config_quarkus.log.syslog-syslog-logging[Syslog logging configuration] reference. +For details about its configuration, see the xref:#quarkus-log-logging-log-config_quarkus-log-syslog-syslog-logging[Syslog logging configuration] reference. == Add a logging filter to your log handler diff --git a/docs/src/main/asciidoc/writing-extensions.adoc b/docs/src/main/asciidoc/writing-extensions.adoc index cdf5dbe5a04ce..40d6399d6e61e 100644 --- a/docs/src/main/asciidoc/writing-extensions.adoc +++ b/docs/src/main/asciidoc/writing-extensions.adoc @@ -1641,7 +1641,7 @@ Given a default non-application endpoint root of `/q`, this will create an endpo Absolute paths also have an impact on nested endpoints. If the above called `nestedRoute("custom-endpoint", "/deep")`, the resulting endpoint will be found at `/deep`. -Refer to the xref:all-config.adoc#quarkus-vertx-http_quarkus.http.non-application-root-path[Quarkus Vertx HTTP configuration reference] +Refer to the xref:all-config.adoc#quarkus-vertx-http_quarkus-http-non-application-root-path[Quarkus Vertx HTTP configuration reference] for details on how the non-application root path is configured. === Extension Health Check diff --git a/docs/src/main/java/io/quarkus/docs/generation/AssembleDownstreamDocumentation.java b/docs/src/main/java/io/quarkus/docs/generation/AssembleDownstreamDocumentation.java index 0acc2d32ddf43..f1ac96a2dbcef 100755 --- a/docs/src/main/java/io/quarkus/docs/generation/AssembleDownstreamDocumentation.java +++ b/docs/src/main/java/io/quarkus/docs/generation/AssembleDownstreamDocumentation.java @@ -50,6 +50,9 @@ public class AssembleDownstreamDocumentation { private static final String SOURCE_BLOCK_PREFIX = "[source"; private static final String SOURCE_BLOCK_DELIMITER = "--"; + private static final String PROJECT_NAME_ATTRIBUTE = "{project-name}"; + private static final String RED_HAT_BUILD_OF_QUARKUS = "Red Hat build of Quarkus"; + private static final String QUARKUS_IO_GUIDES_ATTRIBUTE = "{quarkusio-guides}"; private static final Map TABS_REPLACEMENTS = Map.of( @@ -301,10 +304,17 @@ private static void copyAsciidoc(Path sourceFile, Path targetFile, Set d boolean findDelimiter = false; String currentSourceBlockDelimiter = "----"; int lineNumber = 0; + boolean documentTitleFound = false; for (String line : guideLines) { lineNumber++; + if (!documentTitleFound && line.startsWith("= ")) { + // this is the document title + rewrittenGuide.append(line.replace(PROJECT_NAME_ATTRIBUTE, RED_HAT_BUILD_OF_QUARKUS) + "\n"); + documentTitleFound = true; + continue; + } if (inSourceBlock) { if (findDelimiter) { rewrittenGuide.append(line + "\n"); diff --git a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java index b1801a4bdb991..0222c4049a3e9 100644 --- a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java +++ b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRecorder.java @@ -462,7 +462,7 @@ private void contributeBackendRuntimeProperties(BiConsumer prope addBackendConfig(propertyCollector, backendName, ElasticsearchBackendSettings.THREAD_POOL_SIZE, elasticsearchBackendConfig.threadPool().size()); addBackendConfig(propertyCollector, backendName, ElasticsearchBackendSettings.VERSION_CHECK_ENABLED, - elasticsearchBackendConfig.versionCheck()); + elasticsearchBackendConfig.versionCheck().enabled()); addBackendConfig(propertyCollector, backendName, ElasticsearchBackendSettings.QUERY_SHARD_FAILURE_IGNORE, elasticsearchBackendConfig.query().shardFailure().ignore()); diff --git a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.java b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.java index c9b5b62c1f9d1..32c697e7df9ba 100644 --- a/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.java +++ b/extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.java @@ -154,15 +154,9 @@ interface ElasticsearchBackendRuntimeConfig { ElasticsearchQueryConfig query(); /** - * Whether Hibernate Search should check the version of the Elasticsearch cluster on startup. - * - * Set to `false` if the Elasticsearch cluster may not be available on startup. - * - * @asciidoclet + * Configuration for version checks on this backend. */ - @WithName("version-check.enabled") - @WithDefault("true") - boolean versionCheck(); + ElasticsearchVersionCheckConfig versionCheck(); /** * The default configuration for the Elasticsearch indexes. @@ -212,6 +206,19 @@ public String getHibernateSearchString() { } } + @ConfigGroup + interface ElasticsearchVersionCheckConfig { + /** + * Whether Hibernate Search should check the version of the Elasticsearch cluster on startup. + * + * Set to `false` if the Elasticsearch cluster may not be available on startup. + * + * @asciidoclet + */ + @WithDefault("true") + boolean enabled(); + } + @ConfigGroup interface ElasticsearchIndexRuntimeConfig { /** diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/ApplyResolveNamesImagePolicyDecorator.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/ApplyResolveNamesImagePolicyDecorator.java new file mode 100644 index 0000000000000..1a41c96452c74 --- /dev/null +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/ApplyResolveNamesImagePolicyDecorator.java @@ -0,0 +1,13 @@ +package io.quarkus.kubernetes.deployment; + +import io.dekorate.kubernetes.decorator.NamedResourceDecorator; +import io.fabric8.kubernetes.api.model.ObjectMeta; +import io.fabric8.kubernetes.api.model.PodTemplateSpecFluent; + +public class ApplyResolveNamesImagePolicyDecorator extends NamedResourceDecorator> { + + @Override + public void andThenVisit(PodTemplateSpecFluent podTemplate, ObjectMeta meta) { + podTemplate.editOrNewMetadata().addToAnnotations("alpha.image.policy.openshift.io/resolve-names", "*").endMetadata(); + } +} diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java index 18fa42dbc4e26..f49540aed39ef 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java @@ -236,6 +236,8 @@ public List createDecorators(ApplicationInfoBuildItem applic result.add(new DecoratorBuildItem(new RemoveOptionalFromConfigMapKeySelectorDecorator())); } + result.add(new DecoratorBuildItem(new ApplyResolveNamesImagePolicyDecorator())); + DeploymentResourceKind deploymentKind = config.getDeploymentResourceKind(capabilities); switch (deploymentKind) { case Deployment: diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/AbstractTokensProducer.java b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/AbstractTokensProducer.java index a8eb0806ad95c..3ef2337efc737 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/AbstractTokensProducer.java +++ b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/AbstractTokensProducer.java @@ -6,6 +6,8 @@ import jakarta.annotation.PostConstruct; import jakarta.inject.Inject; +import org.jboss.logging.Logger; + import io.quarkus.arc.Arc; import io.quarkus.oidc.client.OidcClient; import io.quarkus.oidc.client.OidcClients; @@ -13,6 +15,8 @@ import io.smallrye.mutiny.Uni; public abstract class AbstractTokensProducer { + private static final Logger LOG = Logger.getLogger(AbstractTokensProducer.class); + private static final String DEFAULT_OIDC_CLIENT_ID = "Default"; private OidcClient oidcClient; protected boolean earlyTokenAcquisition = true; @@ -46,7 +50,13 @@ protected void initTokens() { } public Uni getTokens() { - return tokensHelper.getTokens(oidcClient); + final boolean forceNewTokens = isForceNewTokens(); + if (forceNewTokens) { + final Optional clientId = clientId(); + LOG.debugf("%s OidcClient will discard the current access and refresh tokens", + clientId.orElse(DEFAULT_OIDC_CLIENT_ID)); + } + return tokensHelper.getTokens(oidcClient, forceNewTokens); } public Tokens awaitTokens() { @@ -60,4 +70,12 @@ public Tokens awaitTokens() { protected Optional clientId() { return Optional.empty(); } + + /** + * @return {@code true} if the OIDC client must acquire a new set of tokens, discarding + * previously obtained access and refresh tokens. + */ + protected boolean isForceNewTokens() { + return false; + } } diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/TokensHelper.java b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/TokensHelper.java index ed41d5082e3b3..1e347ba937918 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/TokensHelper.java +++ b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/TokensHelper.java @@ -21,6 +21,10 @@ public void initTokens(OidcClient oidcClient) { } public Uni getTokens(OidcClient oidcClient) { + return getTokens(oidcClient, false); + } + + public Uni getTokens(OidcClient oidcClient, boolean forceNewTokens) { TokenRequestState currentState = null; TokenRequestState newState = null; //if the tokens are expired we refresh them in an async manner @@ -39,9 +43,9 @@ public Uni getTokens(OidcClient oidcClient) { return currentState.tokenUni; } else { Tokens tokens = currentState.tokens; - if (tokens.isAccessTokenExpired() || tokens.isAccessTokenWithinRefreshInterval()) { + if (forceNewTokens || tokens.isAccessTokenExpired() || tokens.isAccessTokenWithinRefreshInterval()) { newState = new TokenRequestState( - prepareUni((tokens.getRefreshToken() != null && !tokens.isRefreshTokenExpired()) + prepareUni((!forceNewTokens && tokens.getRefreshToken() != null && !tokens.isRefreshTokenExpired()) ? oidcClient.refreshTokens(tokens.getRefreshToken()) : oidcClient.getTokens())); if (tokenRequestStateUpdater.compareAndSet(this, currentState, newState)) { diff --git a/extensions/panache/hibernate-orm-panache-kotlin/runtime/pom.xml b/extensions/panache/hibernate-orm-panache-kotlin/runtime/pom.xml index 466e953432d4e..75c910a9464f9 100644 --- a/extensions/panache/hibernate-orm-panache-kotlin/runtime/pom.xml +++ b/extensions/panache/hibernate-orm-panache-kotlin/runtime/pom.xml @@ -110,7 +110,7 @@ org.hibernate.orm hibernate-jpamodelgen - ${hibernate-orm.version} + ${hibernate-orm.jpamodelgen.for-panache.version} diff --git a/extensions/panache/hibernate-orm-panache/runtime/pom.xml b/extensions/panache/hibernate-orm-panache/runtime/pom.xml index 170a7624aadd5..7f12900d4c6a3 100644 --- a/extensions/panache/hibernate-orm-panache/runtime/pom.xml +++ b/extensions/panache/hibernate-orm-panache/runtime/pom.xml @@ -59,7 +59,7 @@ org.hibernate.orm hibernate-jpamodelgen - ${hibernate-orm.version} + ${hibernate-orm.jpamodelgen.for-panache.version} diff --git a/extensions/panache/hibernate-reactive-panache-kotlin/runtime/pom.xml b/extensions/panache/hibernate-reactive-panache-kotlin/runtime/pom.xml index e007a8da611b8..ff79d15a29ab5 100644 --- a/extensions/panache/hibernate-reactive-panache-kotlin/runtime/pom.xml +++ b/extensions/panache/hibernate-reactive-panache-kotlin/runtime/pom.xml @@ -119,7 +119,7 @@ org.hibernate.orm hibernate-jpamodelgen - ${hibernate-orm.version} + ${hibernate-orm.jpamodelgen.for-panache.version} diff --git a/extensions/panache/hibernate-reactive-panache/runtime/pom.xml b/extensions/panache/hibernate-reactive-panache/runtime/pom.xml index 7c3a24f07a37d..f552aa5671f54 100644 --- a/extensions/panache/hibernate-reactive-panache/runtime/pom.xml +++ b/extensions/panache/hibernate-reactive-panache/runtime/pom.xml @@ -63,7 +63,7 @@ org.hibernate.orm hibernate-jpamodelgen - ${hibernate-orm.version} + ${hibernate-orm.jpamodelgen.for-panache.version} diff --git a/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzSchedulerImpl.java b/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzSchedulerImpl.java index d72befb72a832..60e76ea042e8e 100644 --- a/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzSchedulerImpl.java +++ b/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzSchedulerImpl.java @@ -564,8 +564,9 @@ private Properties getSchedulerConfigurationProperties(QuartzSupport quartzSuppo props.put("org.quartz.scheduler.skipUpdateCheck", "true"); props.put(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, runtimeConfig.instanceName); - props.put(StdSchedulerFactory.PROP_SCHED_BATCH_TIME_WINDOW, runtimeConfig.batchTriggerAcquisitionFireAheadTimeWindow); - props.put(StdSchedulerFactory.PROP_SCHED_MAX_BATCH_SIZE, runtimeConfig.batchTriggerAcquisitionMaxCount); + props.put(StdSchedulerFactory.PROP_SCHED_BATCH_TIME_WINDOW, + "" + runtimeConfig.batchTriggerAcquisitionFireAheadTimeWindow); + props.put(StdSchedulerFactory.PROP_SCHED_MAX_BATCH_SIZE, "" + runtimeConfig.batchTriggerAcquisitionMaxCount); props.put(StdSchedulerFactory.PROP_SCHED_WRAP_JOB_IN_USER_TX, "false"); props.put(StdSchedulerFactory.PROP_SCHED_SCHEDULER_THREADS_INHERIT_CONTEXT_CLASS_LOADER_OF_INITIALIZING_THREAD, "true"); props.put(StdSchedulerFactory.PROP_THREAD_POOL_CLASS, "org.quartz.simpl.SimpleThreadPool"); diff --git a/extensions/spring-cloud-config-client/runtime/src/main/java/io/quarkus/spring/cloud/config/client/runtime/VertxSpringCloudConfigGateway.java b/extensions/spring-cloud-config-client/runtime/src/main/java/io/quarkus/spring/cloud/config/client/runtime/VertxSpringCloudConfigGateway.java index 6c23a986f0063..f863150a861e0 100644 --- a/extensions/spring-cloud-config-client/runtime/src/main/java/io/quarkus/spring/cloud/config/client/runtime/VertxSpringCloudConfigGateway.java +++ b/extensions/spring-cloud-config-client/runtime/src/main/java/io/quarkus/spring/cloud/config/client/runtime/VertxSpringCloudConfigGateway.java @@ -233,7 +233,7 @@ private int getPort(URI uri) { private String getFinalURI(String applicationName, String profile) { String finalURI = baseURI.toString() + "/" + applicationName + "/" + profile; if (config.label().isPresent()) { - finalURI = "/" + config.label().get(); + finalURI += "/" + config.label().get(); } return finalURI; } diff --git a/independent-projects/arc/pom.xml b/independent-projects/arc/pom.xml index 63ebe84f24e03..b4c2166869840 100644 --- a/independent-projects/arc/pom.xml +++ b/independent-projects/arc/pom.xml @@ -47,7 +47,7 @@ 1.7.0 3.1.6 3.5.3.Final - 2.5.5 + 2.5.6 1.6.Final 3.25.1 diff --git a/independent-projects/qute/pom.xml b/independent-projects/qute/pom.xml index dd8183a959411..8b917cd59b21c 100644 --- a/independent-projects/qute/pom.xml +++ b/independent-projects/qute/pom.xml @@ -46,7 +46,7 @@ 3.12.1 3.2.1 3.2.5 - 2.5.5 + 2.5.6 diff --git a/independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/headers/NewCookieHeaderDelegate.java b/independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/headers/NewCookieHeaderDelegate.java index 77bff616144ab..686170816ac23 100644 --- a/independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/headers/NewCookieHeaderDelegate.java +++ b/independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/headers/NewCookieHeaderDelegate.java @@ -140,8 +140,21 @@ public String toString(Object value) { b.append(";HttpOnly"); if (cookie.getSameSite() != null) { b.append(";SameSite="); - b.append(cookie.getSameSite()); + appendCorrectCase(b, cookie.getSameSite()); } return b.toString(); } + + private static void appendCorrectCase(final StringBuilder sb, final Enum e) { + boolean first = true; + for (char c : e.name().toCharArray()) { + if (first) { + sb.append(c); + first = false; + } else { + sb.append(Character.toLowerCase(c)); + } + } + } + } diff --git a/independent-projects/resteasy-reactive/pom.xml b/independent-projects/resteasy-reactive/pom.xml index 91ebb7a80dba1..a822ade00e7cf 100644 --- a/independent-projects/resteasy-reactive/pom.xml +++ b/independent-projects/resteasy-reactive/pom.xml @@ -59,7 +59,7 @@ 3.12.1 3.2.1 3.2.5 - 2.5.5 + 2.5.6 2.1.2 4.5.1 5.4.0 diff --git a/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/headers/CookiesTest.java b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/headers/CookiesTest.java index 3c6376fdc7713..0ed68ba267bbb 100644 --- a/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/headers/CookiesTest.java +++ b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/headers/CookiesTest.java @@ -108,7 +108,7 @@ void testSameSite() { .formParam("cookie", "greeting=\"hello\";SameSite=\"Lax\";") .post("/cookies/set-cookie") .then() - .cookie("greeting", detailedCookie().value("hello").sameSite("LAX")); + .cookie("greeting", detailedCookie().value("hello").sameSite("Lax")); } @Test @@ -119,7 +119,7 @@ void testSameSiteWithoutColon() { .formParam("cookie", "greeting=\"hello\";SameSite=\"None\"") .post("/cookies/set-cookie") .then() - .cookie("greeting", detailedCookie().value("hello").sameSite("NONE")); + .cookie("greeting", detailedCookie().value("hello").sameSite("None")); } @Test @@ -130,7 +130,7 @@ void testSameSiteLowercase() { .formParam("cookie", "greeting=\"hello\";samesite=\"Strict\"") .post("/cookies/set-cookie") .then() - .cookie("greeting", detailedCookie().value("hello").sameSite("STRICT")); + .cookie("greeting", detailedCookie().value("hello").sameSite("Strict")); } @Test diff --git a/integration-tests/hibernate-orm-jpamodelgen/pom.xml b/integration-tests/hibernate-orm-jpamodelgen/pom.xml new file mode 100644 index 0000000000000..5a8f4d59081a9 --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/pom.xml @@ -0,0 +1,161 @@ + + + + quarkus-integration-tests-parent + io.quarkus + 999-SNAPSHOT + + 4.0.0 + + quarkus-integration-test-hibernate-orm-jpamodelgen + Quarkus - Integration Tests - Hibernate ORM with jpamodelgen + + + + io.quarkus + quarkus-hibernate-orm-panache + + + io.quarkus + quarkus-hibernate-orm + + + io.quarkus + quarkus-resteasy-reactive + + + io.quarkus + quarkus-resteasy-reactive-jackson + + + io.quarkus + quarkus-jdbc-h2 + + + + + io.quarkus + quarkus-junit5 + test + + + io.quarkus + quarkus-junit5-internal + test + + + io.rest-assured + rest-assured + test + + + org.assertj + assertj-core + test + + + + + io.quarkus + quarkus-hibernate-orm-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-panache-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-h2-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-jackson-deployment + ${project.version} + pom + test + + + * + * + + + + + + + + + src/main/resources + true + + + + + io.quarkus + quarkus-maven-plugin + + + + build + + + + + + maven-compiler-plugin + + + + org.hibernate.orm + hibernate-jpamodelgen + + + + + + + + + diff --git a/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyPanacheStaticMetamodelEntity.java b/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyPanacheStaticMetamodelEntity.java new file mode 100644 index 0000000000000..80db4184c3515 --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyPanacheStaticMetamodelEntity.java @@ -0,0 +1,26 @@ +package io.quarkus.it.hibernate.jpamodelgen; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; + +import io.quarkus.hibernate.orm.panache.PanacheEntity; + +@Entity +public class MyPanacheStaticMetamodelEntity extends PanacheEntity { + + @Column(unique = true) + public String name; + + MyPanacheStaticMetamodelEntity() { + } + + public MyPanacheStaticMetamodelEntity(String name) { + this.name = name; + } + + @Override + public String toString() { + return "MyPanacheEntity [id=" + id + ", name=" + name + "]"; + } + +} diff --git a/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyPanacheStaticMetamodelResource.java b/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyPanacheStaticMetamodelResource.java new file mode 100644 index 0000000000000..4b5b5ecc4fce4 --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyPanacheStaticMetamodelResource.java @@ -0,0 +1,68 @@ +package io.quarkus.it.hibernate.jpamodelgen; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import jakarta.transaction.Transactional; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; + +import org.hibernate.Session; +import org.hibernate.query.criteria.JpaRoot; +import org.jboss.resteasy.reactive.RestPath; + +@ApplicationScoped +@Produces("application/json") +@Consumes("application/json") +@Path("/panache/static-metamodel/") +public class MyPanacheStaticMetamodelResource { + + @Inject + Session session; + + @POST + @Transactional + public void create(MyPanacheStaticMetamodelEntity entity) { + session.persist(entity); + } + + @GET + @Transactional + @Path("/by/name/{name}") + public MyPanacheStaticMetamodelEntity getByName(@RestPath String name) { + var b = session.getCriteriaBuilder(); + var query = b.createQuery(MyPanacheStaticMetamodelEntity.class); + var e = query.from(MyPanacheStaticMetamodelEntity_.class_); + query.where(e.get(MyPanacheStaticMetamodelEntity_.name).equalTo(name)); + return session.createQuery(query).uniqueResultOptional().orElseThrow(NotFoundException::new); + } + + @POST + @Transactional + @Path("/rename/{before}/to/{after}") + public void rename(@RestPath String before, @RestPath String after) { + var b = session.getCriteriaBuilder(); + var query = b.createCriteriaUpdate(MyPanacheStaticMetamodelEntity.class); + // Cast to work around https://hibernate.atlassian.net/browse/HHH-17682 + var e = (JpaRoot) query.getRoot(); + query.where(e.get(MyPanacheStaticMetamodelEntity_.name).equalTo(before)); + query.set(e.get(MyPanacheStaticMetamodelEntity_.name), after); + session.createMutationQuery(query).executeUpdate(); + } + + @DELETE + @Transactional + @Path("/by/name/{name}") + public void deleteByName(@RestPath String name) { + var b = session.getCriteriaBuilder(); + var query = b.createCriteriaDelete(MyPanacheStaticMetamodelEntity.class); + // Cast to work around https://hibernate.atlassian.net/browse/HHH-17682 + var e = (JpaRoot) query.getRoot(); + query.where(e.get(MyPanacheStaticMetamodelEntity_.name).equalTo(name)); + session.createMutationQuery(query).executeUpdate(); + } +} diff --git a/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyStaticMetamodelEntity.java b/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyStaticMetamodelEntity.java new file mode 100644 index 0000000000000..1fe18164a0eae --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyStaticMetamodelEntity.java @@ -0,0 +1,30 @@ +package io.quarkus.it.hibernate.jpamodelgen; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; + +@Entity +public class MyStaticMetamodelEntity { + + @Id + @GeneratedValue + public Integer id; + + @Column(unique = true) + public String name; + + MyStaticMetamodelEntity() { + } + + public MyStaticMetamodelEntity(String name) { + this.name = name; + } + + @Override + public String toString() { + return "MyOrmEntity [id=" + id + ", name=" + name + "]"; + } + +} diff --git a/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyStaticMetamodelResource.java b/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyStaticMetamodelResource.java new file mode 100644 index 0000000000000..8fe12586db0a3 --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/src/main/java/io/quarkus/it/hibernate/jpamodelgen/MyStaticMetamodelResource.java @@ -0,0 +1,68 @@ +package io.quarkus.it.hibernate.jpamodelgen; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import jakarta.transaction.Transactional; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; + +import org.hibernate.Session; +import org.hibernate.query.criteria.JpaRoot; +import org.jboss.resteasy.reactive.RestPath; + +@ApplicationScoped +@Produces("application/json") +@Consumes("application/json") +@Path("/static-metamodel/") +public class MyStaticMetamodelResource { + + @Inject + Session session; + + @POST + @Transactional + public void create(MyStaticMetamodelEntity entity) { + session.persist(entity); + } + + @GET + @Transactional + @Path("/by/name/{name}") + public MyStaticMetamodelEntity getByName(@RestPath String name) { + var b = session.getCriteriaBuilder(); + var query = b.createQuery(MyStaticMetamodelEntity.class); + var e = query.from(MyStaticMetamodelEntity_.class_); + query.where(e.get(MyStaticMetamodelEntity_.name).equalTo(name)); + return session.createQuery(query).uniqueResultOptional().orElseThrow(NotFoundException::new); + } + + @POST + @Transactional + @Path("/rename/{before}/to/{after}") + public void rename(@RestPath String before, @RestPath String after) { + var b = session.getCriteriaBuilder(); + var query = b.createCriteriaUpdate(MyStaticMetamodelEntity.class); + // Cast to work around https://hibernate.atlassian.net/browse/HHH-17682 + var e = (JpaRoot) query.getRoot(); + query.where(e.get(MyStaticMetamodelEntity_.name).equalTo(before)); + query.set(e.get(MyStaticMetamodelEntity_.name), after); + session.createMutationQuery(query).executeUpdate(); + } + + @DELETE + @Transactional + @Path("/by/name/{name}") + public void deleteByName(@RestPath String name) { + var b = session.getCriteriaBuilder(); + var query = b.createCriteriaDelete(MyStaticMetamodelEntity.class); + // Cast to work around https://hibernate.atlassian.net/browse/HHH-17682 + var e = (JpaRoot) query.getRoot(); + query.where(e.get(MyStaticMetamodelEntity_.name).equalTo(name)); + session.createMutationQuery(query).executeUpdate(); + } +} diff --git a/integration-tests/hibernate-orm-jpamodelgen/src/main/resources/application.properties b/integration-tests/hibernate-orm-jpamodelgen/src/main/resources/application.properties new file mode 100644 index 0000000000000..0c8f975c6ca56 --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/src/main/resources/application.properties @@ -0,0 +1,2 @@ +quarkus.datasource.jdbc.max-size=8 +quarkus.hibernate-orm.database.generation=drop-and-create diff --git a/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/HibernateJpaModelGenInGraalIT.java b/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/HibernateJpaModelGenInGraalIT.java new file mode 100644 index 0000000000000..919c228e5834c --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/HibernateJpaModelGenInGraalIT.java @@ -0,0 +1,8 @@ +package io.quarkus.it.hibernate.jpamodelgen; + +import io.quarkus.test.junit.QuarkusIntegrationTest; + +@QuarkusIntegrationTest +public class HibernateJpaModelGenInGraalIT extends HibernateJpaModelGenTest { + +} diff --git a/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/HibernateJpaModelGenTest.java b/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/HibernateJpaModelGenTest.java new file mode 100644 index 0000000000000..de5e7b5c747c5 --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/HibernateJpaModelGenTest.java @@ -0,0 +1,71 @@ +package io.quarkus.it.hibernate.jpamodelgen; + +import static io.restassured.RestAssured.given; + +import org.junit.jupiter.api.Test; + +import io.quarkus.test.junit.QuarkusTest; +import io.restassured.http.ContentType; + +@QuarkusTest +public class HibernateJpaModelGenTest { + private static final String ROOT = "/static-metamodel"; + + @Test + public void staticMetamodel() { + // Create/retrieve + given() + .pathParam("name", "foo") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(404); + given() + .body(new MyStaticMetamodelEntity("foo")) + .contentType(ContentType.JSON) + .when().post(ROOT) + .then() + .statusCode(204); + given() + .pathParam("name", "foo") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(200); + + // Update + given() + .pathParam("name", "bar") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(404); + given() + .pathParam("before", "foo") + .pathParam("after", "bar") + .contentType(ContentType.JSON) + .when().post(ROOT + "/rename/{before}/to/{after}") + .then() + .statusCode(204); + given() + .pathParam("name", "bar") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(200); + + // Delete + given() + .pathParam("name", "bar") + .contentType(ContentType.JSON) + .when().delete(ROOT + "/by/name/{name}") + .then() + .statusCode(204); + given() + .pathParam("name", "bar") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(404); + } +} diff --git a/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/PanacheJpaModelGenInGraalIT.java b/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/PanacheJpaModelGenInGraalIT.java new file mode 100644 index 0000000000000..5de1b27e4b2f8 --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/PanacheJpaModelGenInGraalIT.java @@ -0,0 +1,8 @@ +package io.quarkus.it.hibernate.jpamodelgen; + +import io.quarkus.test.junit.QuarkusIntegrationTest; + +@QuarkusIntegrationTest +public class PanacheJpaModelGenInGraalIT extends PanacheJpaModelGenTest { + +} diff --git a/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/PanacheJpaModelGenTest.java b/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/PanacheJpaModelGenTest.java new file mode 100644 index 0000000000000..f30525316e285 --- /dev/null +++ b/integration-tests/hibernate-orm-jpamodelgen/src/test/java/io/quarkus/it/hibernate/jpamodelgen/PanacheJpaModelGenTest.java @@ -0,0 +1,71 @@ +package io.quarkus.it.hibernate.jpamodelgen; + +import static io.restassured.RestAssured.given; + +import org.junit.jupiter.api.Test; + +import io.quarkus.test.junit.QuarkusTest; +import io.restassured.http.ContentType; + +@QuarkusTest +public class PanacheJpaModelGenTest { + private static final String ROOT = "/panache/static-metamodel"; + + @Test + public void staticMetamodel() { + // Create/retrieve + given() + .pathParam("name", "foo") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(404); + given() + .body(new MyStaticMetamodelEntity("foo")) + .contentType(ContentType.JSON) + .when().post(ROOT) + .then() + .statusCode(204); + given() + .pathParam("name", "foo") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(200); + + // Update + given() + .pathParam("name", "bar") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(404); + given() + .pathParam("before", "foo") + .pathParam("after", "bar") + .contentType(ContentType.JSON) + .when().post(ROOT + "/rename/{before}/to/{after}") + .then() + .statusCode(204); + given() + .pathParam("name", "bar") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(200); + + // Delete + given() + .pathParam("name", "bar") + .contentType(ContentType.JSON) + .when().delete(ROOT + "/by/name/{name}") + .then() + .statusCode(204); + given() + .pathParam("name", "bar") + .contentType(ContentType.JSON) + .when().get(ROOT + "/by/name/{name}") + .then() + .statusCode(404); + } +} diff --git a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftWithDeploymentResourceAndLocalLookupTest.java b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftWithDeploymentResourceAndLocalLookupTest.java index ffbb34a7cd4e6..5c586feddcaba 100644 --- a/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftWithDeploymentResourceAndLocalLookupTest.java +++ b/integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftWithDeploymentResourceAndLocalLookupTest.java @@ -71,6 +71,9 @@ public void assertGeneratedResources() throws IOException { assertThat(d.getSpec()).satisfies(deploymentSpec -> { assertThat(deploymentSpec.getTemplate()).satisfies(t -> { + assertThat(t.getMetadata()).satisfies(metadata -> assertThat(metadata.getAnnotations()).contains( + entry("alpha.image.policy.openshift.io/resolve-names", "*"))); + assertThat(t.getMetadata()).satisfies(metadata -> assertThat(metadata.getLabels()).containsAnyOf( entry("app.kubernetes.io/name", NAME), entry("app.kubernetes.io/version", "0.1-SNAPSHOT"))); diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 2d9885d52f10a..e0b6a8a90dd0f 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -254,6 +254,7 @@ hibernate-search-orm-opensearch hibernate-search-orm-elasticsearch-tenancy hibernate-orm-tenancy + hibernate-orm-jpamodelgen hibernate-orm-envers vertx-http vertx-web