Skip to content

Commit

Permalink
Merge pull request #29860 from gsmet/2.13.6-backports-2
Browse files Browse the repository at this point in the history
2.13.6 backports 2
  • Loading branch information
gsmet authored Dec 15, 2022
2 parents 8102709 + 09a0d9a commit 63e8b33
Show file tree
Hide file tree
Showing 93 changed files with 1,335 additions and 211 deletions.
6 changes: 1 addition & 5 deletions .github/workflows/ci-actions-incremental.yml
Original file line number Diff line number Diff line change
Expand Up @@ -735,14 +735,10 @@ jobs:
uses: graalvm/setup-graalvm@v1
if: startsWith(matrix.os-name, 'windows')
with:
version: 'latest'
version: 'mandrel-latest'
java-version: '17'
components: 'native-image'
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install native-image component
if: startsWith(matrix.os-name, 'windows')
run: |
gu.cmd install native-image
# We do this so we can get better analytics for the downloaded version of the build images
- name: Update Docker Client User Agent
shell: bash
Expand Down
13 changes: 12 additions & 1 deletion bom/application/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@
<cronutils.version>9.2.0</cronutils.version>
<quartz.version>2.3.2</quartz.version>
<h2.version>2.1.214</h2.version>
<postgresql-jdbc.version>42.5.0</postgresql-jdbc.version>
<postgresql-jdbc.version>42.5.1</postgresql-jdbc.version>
<mariadb-jdbc.version>3.0.8</mariadb-jdbc.version>
<mysql-jdbc.version>8.0.30</mysql-jdbc.version>
<mssql-jdbc.version>11.2.0.jre11</mssql-jdbc.version>
Expand Down Expand Up @@ -205,6 +205,7 @@
<strimzi-oauth.nimbus.version>9.24.4</strimzi-oauth.nimbus.version>
<java-buildpack-client.version>0.0.6</java-buildpack-client.version>
<org-crac.version>0.1.1</org-crac.version>
<sshd-common.version>2.9.2</sshd-common.version>
</properties>

<dependencyManagement>
Expand Down Expand Up @@ -4720,6 +4721,16 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.sshd</groupId>
<artifactId>sshd-common</artifactId>
<version>${sshd-common.version}</version>
</dependency>
<dependency>
<groupId>org.apache.sshd</groupId>
<artifactId>sshd-core</artifactId>
<version>${sshd-common.version}</version>
</dependency>
<dependency>
<groupId>org.wildfly.security</groupId>
<artifactId>wildfly-elytron-sasl-gs2</artifactId>
Expand Down
2 changes: 1 addition & 1 deletion build-parent/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@
<forbiddenapis-maven-plugin.version>3.1</forbiddenapis-maven-plugin.version>

<!-- platform properties - this is a floating tag -->
<platform.quarkus.native.builder-image>graalvm</platform.quarkus.native.builder-image>
<platform.quarkus.native.builder-image>mandrel</platform.quarkus.native.builder-image>

<script.extension>sh</script.extension>
<docker-prune.location>${maven.multiModuleProjectDirectory}/.github/docker-prune.${script.extension}</docker-prune.location>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
import java.net.Socket;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.List;
import java.util.Optional;
Expand Down Expand Up @@ -73,8 +75,8 @@ private TestContainersStrategy(boolean silent) {

@Override
public Result get() {
//testcontainers uses the Unreliables library to test if docker is started
//this runs in threads that start with 'ducttape'
// Testcontainers uses the Unreliables library to test if docker is started
// this runs in threads that start with 'ducttape'
StartupLogCompressor compressor = new StartupLogCompressor("Checking Docker Environment", Optional.empty(), null,
(s) -> s.getName().startsWith("ducttape"));
try {
Expand All @@ -84,7 +86,7 @@ public Result get() {

Class<?> configurationClass = Thread.currentThread().getContextClassLoader()
.loadClass("org.testcontainers.utility.TestcontainersConfiguration");
Object configurationInstance = configurationClass.getMethod("instance").invoke(null);
Object configurationInstance = configurationClass.getMethod("getInstance").invoke(null);
String oldReusePropertyValue = (String) configurationClass
.getMethod("getUserProperty", String.class, String.class)
.invoke(configurationInstance, "testcontainers.reuse.enable", "false"); // use the default provided in TestcontainersConfiguration#environmentSupportsReuse
Expand All @@ -104,7 +106,7 @@ public Result get() {
} catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
if (!silent) {
compressor.closeAndDumpCaptured();
LOGGER.debug("Unable to use testcontainers to determine if Docker is working", e);
LOGGER.debug("Unable to use Testcontainers to determine if Docker is working", e);
}
return Result.UNKNOWN;
} finally {
Expand All @@ -122,26 +124,46 @@ public Result get() {
*/
private static class DockerHostStrategy implements Strategy {

private static final String UNIX_SCHEME = "unix";

@Override
public Result get() {

String dockerHost = System.getenv("DOCKER_HOST");
if (dockerHost != null && !dockerHost.startsWith("unix:")) {
try {
URI url = new URI(dockerHost);

if (dockerHost == null) {
return Result.UNKNOWN;
}

try {
URI dockerHostUri = new URI(dockerHost);

if (UNIX_SCHEME.equals(dockerHostUri.getScheme())) {
// Java 11 does not support connecting to Unix sockets so for now let's use a naive approach
Path dockerSocketPath = Path.of(dockerHostUri.getPath());

if (Files.isWritable(dockerSocketPath)) {
return Result.AVAILABLE;
} else {
LOGGER.warnf(
"Unix socket defined in DOCKER_HOST %s is not writable, make sure Docker is running on the specified host",
dockerHost);
}
} else {
try (Socket s = new Socket()) {
s.connect(new InetSocketAddress(url.getHost(), url.getPort()), DOCKER_HOST_CHECK_TIMEOUT);
s.connect(new InetSocketAddress(dockerHostUri.getHost(), dockerHostUri.getPort()),
DOCKER_HOST_CHECK_TIMEOUT);
return Result.AVAILABLE;
} catch (IOException e) {
LOGGER.warnf(
"Unable to connect to DOCKER_HOST URI %s, make sure docker is running on the specified host",
"Unable to connect to DOCKER_HOST URI %s, make sure Docker is running on the specified host",
dockerHost);
}
} catch (URISyntaxException | IllegalArgumentException e) {
LOGGER.warnf("Unable to parse DOCKER_HOST URI %s, it will be ignored for working docker detection",
dockerHost);
}
} catch (URISyntaxException | IllegalArgumentException e) {
LOGGER.warnf("Unable to parse DOCKER_HOST URI %s, it will be ignored for working Docker detection",
dockerHost);
}

return Result.UNKNOWN;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -316,9 +316,9 @@ private void periodicTestCompile() {
scanLock.lock();
TestScanningLock.lockForTests();
try {
ClassScanResult changedTestClassResult = compileTestClasses();
ClassScanResult changedApp = checkForChangedClasses(compiler, DevModeContext.ModuleInfo::getMain, false, test,
true);
ClassScanResult changedTestClassResult = compileTestClasses();
if (changedApp.compilationHappened) {
if (testCompileProblem != null) {
testSupport.testCompileFailed(testCompileProblem);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,8 @@
*/
public class JarResultBuildStep {

private static final String DOT_JAR = ".jar";

private static final Predicate<String> UBER_JAR_IGNORED_ENTRIES_PREDICATE = new IsEntryIgnoredForUberJarPredicate();

private static final Predicate<String> UBER_JAR_CONCATENATED_ENTRIES_PREDICATE = new Predicate<>() {
Expand Down Expand Up @@ -264,9 +266,20 @@ private JarBuildItem buildUberJar(CurateOutcomeBuildItem curateOutcomeBuildItem,
ClassLoadingConfig classLoadingConfig) throws Exception {

//we use the -runner jar name, unless we are building both types
Path runnerJar = outputTargetBuildItem.getOutputDirectory()
.resolve(outputTargetBuildItem.getBaseName() + packageConfig.getRunnerSuffix() + ".jar");
Files.deleteIfExists(runnerJar);
final Path runnerJar = outputTargetBuildItem.getOutputDirectory()
.resolve(outputTargetBuildItem.getBaseName() + packageConfig.getRunnerSuffix() + DOT_JAR);

// If the runner jar appears to exist already we create a new one with a tmp suffix.
// Deleting an existing runner jar may result in deleting the original (non-runner) jar (in case the runner suffix is empty)
// which is used as a source of content for the runner jar.
final Path tmpRunnerJar;
if (Files.exists(runnerJar)) {
tmpRunnerJar = outputTargetBuildItem.getOutputDirectory()
.resolve(outputTargetBuildItem.getBaseName() + packageConfig.getRunnerSuffix() + ".tmp");
Files.deleteIfExists(tmpRunnerJar);
} else {
tmpRunnerJar = runnerJar;
}

buildUberJar0(curateOutcomeBuildItem,
outputTargetBuildItem,
Expand All @@ -280,12 +293,16 @@ private JarBuildItem buildUberJar(CurateOutcomeBuildItem curateOutcomeBuildItem,
ignoredResources,
mainClassBuildItem,
classLoadingConfig,
runnerJar);
tmpRunnerJar);

if (tmpRunnerJar != runnerJar) {
Files.copy(tmpRunnerJar, runnerJar, StandardCopyOption.REPLACE_EXISTING);
tmpRunnerJar.toFile().deleteOnExit();
}

//for uberjars we move the original jar, so there is only a single jar in the output directory
final Path standardJar = outputTargetBuildItem.getOutputDirectory()
.resolve(outputTargetBuildItem.getOriginalBaseName() + ".jar");

.resolve(outputTargetBuildItem.getOriginalBaseName() + DOT_JAR);
final Path originalJar = Files.exists(standardJar) ? standardJar : null;

return new JarBuildItem(runnerJar, originalJar, null, PackageConfig.UBER_JAR,
Expand Down Expand Up @@ -504,7 +521,7 @@ private JarBuildItem buildLegacyThinJar(CurateOutcomeBuildItem curateOutcomeBuil
ClassLoadingConfig classLoadingConfig) throws Exception {

Path runnerJar = outputTargetBuildItem.getOutputDirectory()
.resolve(outputTargetBuildItem.getBaseName() + packageConfig.getRunnerSuffix() + ".jar");
.resolve(outputTargetBuildItem.getBaseName() + packageConfig.getRunnerSuffix() + DOT_JAR);
Path libDir = outputTargetBuildItem.getOutputDirectory().resolve("lib");
Files.deleteIfExists(runnerJar);
IoUtils.createOrEmptyDir(libDir);
Expand Down Expand Up @@ -660,8 +677,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem,
}

//now the application classes
Path runnerJar = appDir
.resolve(outputTargetBuildItem.getBaseName() + ".jar");
Path runnerJar = appDir.resolve(outputTargetBuildItem.getBaseName() + DOT_JAR);
jars.add(runnerJar);

if (!rebuild) {
Expand Down Expand Up @@ -965,7 +981,7 @@ private NativeImageSourceJarBuildItem buildNativeImageThinJar(CurateOutcomeBuild
copyJsonConfigFiles(applicationArchivesBuildItem, targetDirectory);

Path runnerJar = targetDirectory
.resolve(outputTargetBuildItem.getBaseName() + packageConfig.getRunnerSuffix() + ".jar");
.resolve(outputTargetBuildItem.getBaseName() + packageConfig.getRunnerSuffix() + DOT_JAR);
Path libDir = targetDirectory.resolve(LIB);
Files.createDirectories(libDir);

Expand Down Expand Up @@ -1550,7 +1566,7 @@ public boolean decompile(Path jarToDecompile) {
String jarFileName = jarToDecompile.getFileName().toString();
Path decompiledJar = context.decompiledOutputDir.resolve(jarFileName);
try {
ZipUtils.unzip(decompiledJar, context.decompiledOutputDir.resolve(jarFileName.replace(".jar", "")));
ZipUtils.unzip(decompiledJar, context.decompiledOutputDir.resolve(jarFileName.replace(DOT_JAR, "")));
Files.deleteIfExists(decompiledJar);
} catch (IOException ignored) {
// it doesn't really matter if we can't unzip the jar as we do it merely for user convenience
Expand Down
5 changes: 3 additions & 2 deletions devtools/maven/src/main/java/io/quarkus/maven/BuildMojo.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import io.quarkus.bootstrap.app.AugmentResult;
import io.quarkus.bootstrap.app.CuratedApplication;
import io.quarkus.bootstrap.util.IoUtils;
import io.quarkus.maven.dependency.ArtifactCoords;

/**
* Builds the Quarkus application.
Expand Down Expand Up @@ -77,11 +78,11 @@ protected boolean beforeExecute() throws MojoExecutionException {
getLog().info("Skipping Quarkus build");
return false;
}
if (mavenProject().getPackaging().equals("pom")) {
if (mavenProject().getPackaging().equals(ArtifactCoords.TYPE_POM)) {
getLog().info("Type of the artifact is POM, skipping build goal");
return false;
}
if (!mavenProject().getArtifact().getArtifactHandler().getExtension().equals("jar")) {
if (!mavenProject().getArtifact().getArtifactHandler().getExtension().equals(ArtifactCoords.TYPE_JAR)) {
throw new MojoExecutionException(
"The project artifact's extension is '" + mavenProject().getArtifact().getArtifactHandler().getExtension()
+ "' while this goal expects it be 'jar'");
Expand Down
5 changes: 5 additions & 0 deletions devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java
Original file line number Diff line number Diff line change
Expand Up @@ -382,6 +382,11 @@ public void setLog(Log log) {
@Override
public void execute() throws MojoFailureException, MojoExecutionException {

if (project.getPackaging().equals(ArtifactCoords.TYPE_POM)) {
getLog().info("Type of the artifact is POM, skipping dev goal");
return;
}

mavenVersionEnforcer.ensureMavenVersion(getLog(), session);

initToolchain();
Expand Down
24 changes: 19 additions & 5 deletions docs/src/main/asciidoc/podman.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -92,21 +92,35 @@ sudo apt install podman podman-docker docker-compose

Podman supports two modes of operation: rootful, in which case the container runs as root on the host system, and rootless, where the container runs under a standard Unix user account.
On Linux, the REST API Unix socket is, by default, restricted to only allow the root user to access it.
This prevents someone from using a container to achieve a privilege escalation on the syetem.
This prevents someone from using a container to achieve a privilege escalation on the system.
While these restrictions can be softened to allow a special group instead of just root, the recommended approach is to use rootless Podman on Linux.
To use rootless Podman, you need to set a DOCKER_HOST environment variable to point to the user-specific socket.
To use rootless Podman, you need to set a `DOCKER_HOST` environment variable to point to the user-specific socket.
In both cases, you need to start the REST API by enabling the Podman socket service through systemd.

[source]
----
# Enable the podman socket with Docker REST API (only needs to be done once)
systemctl --user enable podman.socket --now
# Set the required environment variables (need to be run everytime or added to profile)
----

Then, you can obtain the path of the socket with the following command:

[source]
----
$ podman info | grep -A2 'remoteSocket'
remoteSocket:
exists: true
path: /path/to/podman.sock
----

export DOCKER_HOST=unix:///run/user/${UID}/podman/podman.sock
Setting the `DOCKER_HOST` environment variable must be done every time or added to the profile:

[source]
----
export DOCKER_HOST=unix:///path/to/podman.sock <1>
----
<1> Replace `/path/to/podman.sock` with the path you obtained previously.

For a detailed explanation, see this https://quarkus.io/blog/quarkus-devservices-testcontainers-podman/[blog article].

Expand Down
22 changes: 22 additions & 0 deletions docs/src/main/asciidoc/rest-data-panache.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,7 @@ public interface PeopleResource extends PanacheEntityResource<Person, Long> {

* `exposed` - whether resource could be exposed. A global resource property that can be overridden for each method. Default is `true`.
* `path` - resource base path. Default path is a hyphenated lowercase resource name without a suffix of `resource` or `controller`.
* `rolesAllowed` - List of the security roles permitted to access the resources. It needs a Quarkus security extension to be present, otherwise it will be ignored. Default is empty.
* `paged` - whether collection responses should be paged or not.
First, last, previous and next page URIs are included in the response headers if they exist.
Request page index and size are taken from the `page` and `size` query parameters that default to `0` and `20` respectively.
Expand All @@ -319,6 +320,27 @@ Default is `false`.

* `exposed` - does not expose a particular HTTP verb when set to `false`. Default is `true`.
* `path` - operation path (this is appended to the resource base path). Default is an empty string.
* `rolesAllowed` - List of the security roles permitted to access this operation. It needs a Quarkus security extension to be present, otherwise it will be ignored. Default is empty.

== Securing endpoints

REST Data with Panache will use the Security annotations within the package `javax.annotation.security` that are defined on your resource interfaces:

[source,java]
----
import javax.annotation.security.DenyAll;
import javax.annotation.security.RolesAllowed;
@DenyAll
@ResourceProperties
public interface PeopleResource extends PanacheEntityResource<Person, Long> {
@RolesAllowed("superuser")
boolean delete(Long id);
}
----

Additionally, if you are only interested in specifying the roles that are allowed to use the resources, the `@ResourceProperties` and `@MethodProperties` annotations have the field `rolesAllowed` to list the security roles permitted to access the resource or operation.

== Query parameters

Expand Down
5 changes: 5 additions & 0 deletions docs/src/main/asciidoc/security.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,11 @@ For more information, see the link:{vault-guide}[Quarkus and HashiCorp Vault] do
If your Quarkus Security architecture includes RESTEasy Reactive and Jackson, Quarkus can limit the fields that are included in JSON serialization based on the configured security.
For more information, see xref:resteasy-reactive.adoc#secure-serialization[Writing REST services with RESTEasy Reactive].

== Secure auto-generated resources by REST Data with Panache

If you're using the REST Data with Panache extension to auto-generate your resources, you can still use the Security annotations within the package `javax.annotation.security`.
For more information, see xref:rest-data-panache.adoc#securing-endpoints[Securing auto-generated resources].

== National Vulnerability Database

Most of the Quarkus tags are registered in the US link:https://nvd.nist.gov[National Vulnerability Database] (NVD) in Common Platform Enumeration (CPE) name format.
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/asciidoc/writing-extensions.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -560,7 +560,7 @@ The name of the deployment module can be configured in the plugin by setting the
----
plugins {
id 'java'
id 'io.quarkus.extensions'
id 'io.quarkus.extension'
}
quarkusExtension {
Expand Down
Loading

0 comments on commit 63e8b33

Please sign in to comment.