Skip to content

Commit

Permalink
Merge pull request #25752 from gsmet/2.9.2-backports-1
Browse files Browse the repository at this point in the history
2.9.2 backports 1
  • Loading branch information
gsmet authored May 24, 2022
2 parents a35de75 + c5f243e commit 8acfcb8
Show file tree
Hide file tree
Showing 41 changed files with 469 additions and 132 deletions.
8 changes: 4 additions & 4 deletions bom/application/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -81,16 +81,16 @@
<!-- What we actually depend on for the annotations, as latest Graal is not available in Maven fast enough: -->
<graal-sdk.version>22.0.0.2</graal-sdk.version>
<gizmo.version>1.0.10.Final</gizmo.version>
<jackson-bom.version>2.13.2.20220328</jackson-bom.version>
<jackson-bom.version>2.13.3</jackson-bom.version>
<commons-logging-jboss-logging.version>1.0.0.Final</commons-logging-jboss-logging.version>
<commons-lang3.version>3.12.0</commons-lang3.version>
<commons-codec.version>1.15</commons-codec.version>
<classmate.version>1.5.1</classmate.version>
<hibernate-orm.version>5.6.9.Final</hibernate-orm.version> <!-- When updating, align bytebuddy.version to Hibernate needs as well (just below): -->
<bytebuddy.version>1.12.9</bytebuddy.version> <!-- Version controlled by Hibernate ORM's needs -->
<hibernate-reactive.version>1.1.5.Final</hibernate-reactive.version>
<hibernate-reactive.version>1.1.6.Final</hibernate-reactive.version>
<hibernate-validator.version>6.2.3.Final</hibernate-validator.version>
<hibernate-search.version>6.1.3.Final</hibernate-search.version>
<hibernate-search.version>6.1.5.Final</hibernate-search.version>
<narayana.version>5.12.6.Final</narayana.version>
<jboss-transaction-api_1.2_spec.version>1.1.1.Final</jboss-transaction-api_1.2_spec.version>
<agroal.version>1.16</agroal.version>
Expand Down Expand Up @@ -158,7 +158,7 @@
<snakeyaml.version>1.30</snakeyaml.version>
<osgi.version>6.0.0</osgi.version>
<mongo-client.version>4.3.4</mongo-client.version>
<mongo-crypt.version>1.4.0</mongo-crypt.version>
<mongo-crypt.version>1.2.1</mongo-crypt.version>
<proton-j.version>0.33.10</proton-j.version>
<okhttp.version>3.14.9</okhttp.version>
<hibernate-quarkus-local-cache.version>0.1.1</hibernate-quarkus-local-cache.version>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -927,11 +927,23 @@ Set<String> checkForFileChange(Function<DevModeContext.ModuleInfo, DevModeContex
}
}

for (String path : timestampSet.watchedFilePaths.keySet()) {
for (String watchedFilePath : timestampSet.watchedFilePaths.keySet()) {
Path watchedFile = Paths.get(watchedFilePath);
boolean isAbsolute = watchedFile.isAbsolute();
List<Path> watchedRoots = roots;
if (isAbsolute) {
// absolute files are assumed to be read directly from the project root.
// They therefore do not get copied to, and deleted from, the outputdir.
watchedRoots = List.of(Path.of("/"));
}
if (watchedRoots.isEmpty()) {
// this compilation unit has no resource roots, and therefore can not have this file
continue;
}
boolean pathCurrentlyExisting = false;
boolean pathPreviouslyExisting = false;
for (Path root : roots) {
Path file = root.resolve(path);
for (Path root : watchedRoots) {
Path file = root.resolve(watchedFilePath);
if (file.toFile().exists()) {
pathCurrentlyExisting = true;
try {
Expand All @@ -940,7 +952,7 @@ Set<String> checkForFileChange(Function<DevModeContext.ModuleInfo, DevModeContex
//existing can be null when running tests
//as there is both normal and test resources, but only one set of watched timestampts
if (existing != null && value > existing) {
ret.add(path);
ret.add(watchedFilePath);
//a write can be a 'truncate' + 'write'
//if the file is empty we may be seeing the middle of a write
if (Files.size(file) == 0) {
Expand All @@ -956,8 +968,8 @@ Set<String> checkForFileChange(Function<DevModeContext.ModuleInfo, DevModeContex
value = Files.getLastModifiedTime(file).toMillis();

log.infof("File change detected: %s", file);
if (doCopy && !Files.isDirectory(file)) {
Path target = outputDir.resolve(path);
if (!isAbsolute && doCopy && !Files.isDirectory(file)) {
Path target = outputDir.resolve(watchedFilePath);
byte[] data = Files.readAllBytes(file);
try (FileOutputStream out = new FileOutputStream(target.toFile())) {
out.write(data);
Expand All @@ -975,57 +987,16 @@ Set<String> checkForFileChange(Function<DevModeContext.ModuleInfo, DevModeContex
}
if (!pathCurrentlyExisting) {
if (pathPreviouslyExisting) {
ret.add(path);
ret.add(watchedFilePath);
}

Path target = outputDir.resolve(path);
try {
FileUtil.deleteIfExists(target);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}

// Mostly a copy of the code above but to handle watched files that are set with absolute path (not in the app resources)
for (String watchedFilePath : timestampSet.watchedFilePaths.keySet()) {
Path watchedFile = Paths.get(watchedFilePath);
if (watchedFile.isAbsolute()) {
if (watchedFile.toFile().exists()) {
if (!isAbsolute) {
Path target = outputDir.resolve(watchedFilePath);
try {
long value = Files.getLastModifiedTime(watchedFile).toMillis();
Long existing = timestampSet.watchedFileTimestamps.get(watchedFile);
//existing can be null when running tests
//as there is both normal and test resources, but only one set of watched timestampts
if (existing != null && value > existing) {
ret.add(watchedFilePath);
//a write can be a 'truncate' + 'write'
//if the file is empty we may be seeing the middle of a write
if (Files.size(watchedFile) == 0) {
try {
Thread.sleep(200);
} catch (InterruptedException e) {
//ignore
}
}
//re-read, as we may have read the original TS if the middle of
//a truncate+write, even if the write had completed by the time
//we read the size
value = Files.getLastModifiedTime(watchedFile).toMillis();

log.infof("File change detected: %s", watchedFile);
timestampSet.watchedFileTimestamps.put(watchedFile, value);
}
FileUtil.deleteIfExists(target);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} else {

Long prevValue = timestampSet.watchedFileTimestamps.put(watchedFile, 0L);

if (prevValue != null && prevValue > 0) {
ret.add(watchedFilePath);
}
}
}
}
Expand Down
31 changes: 31 additions & 0 deletions docs/src/main/asciidoc/cli-tooling.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ The Quarkus CLI is available in several developer-oriented package managers such
* https://sdkman.io[SDKMAN!]
* https://brew.sh[Homebrew]
* https://community.chocolatey.org/packages/quarkus[Chocolatey]
* https://scoop.sh[Scoop]

If you already use (or want to use) one of these tools, it is the simplest way to install the Quarkus CLI and keep it updated.

Expand All @@ -30,6 +31,7 @@ Choose the alternative that is the most practical for you:
* SDKMAN! - for Linux and macOS
* Homebrew - for Linux and macOS
* Chocolatey - for Windows
* Scoop - for Windows

[role="primary asciidoc-tabs-sync-jbang"]
.JBang
Expand Down Expand Up @@ -217,6 +219,35 @@ choco upgrade quarkus
----
****

[role="secondary asciidoc-tabs-sync-scoop"]
.Scoop
****
https://scoop.sh[Scoop] is a package manager for Windows.
You can use Scoop to install (and update) the Quarkus CLI.
[NOTE]
====
Make sure you have a JDK installed before installing the Quarkus CLI.
You can install a JDK with `scoop install openjdk17` for Java 17 or `scoop install openjdk11` for Java 11.
====
To install the Quarkus CLI using Scoop, run the following command:
[source,shell]
----
scoop install quarkus-cli
----
It will install the latest version of the Quarkus CLI.
Once installed `quarkus` will be in your `$PATH` and if you run `quarkus --version` it will print the installed version:
[source,shell,subs=attributes+]
----
quarkus --version
{quarkus-version}
----
You can upgrade the Quarkus CLI with:
[source,shell]
----
scoop update quarkus-cli
----
****

== Using the CLI

Use `--help` to display help information with all the available commands:
Expand Down
6 changes: 3 additions & 3 deletions docs/src/main/asciidoc/deploying-to-openshift.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -352,16 +352,16 @@ quarkus.openshift.env.fields.foo=metadata.name
----

==== Using Deployment instead of DeploymentConfig
Out of the box the extension will generate a `DeploymentConfig` resource. Often users, prefer to use `Deployment` as the main deployment resource, but still make use of Openshift specific resources like `Route`, `BuildConfig` etc.
Out of the box the extension will generate a `DeploymentConfig` resource. Often users, prefer to use `Deployment` as the main deployment resource, but still make use of OpenShift specific resources like `Route`, `BuildConfig` etc.
This feature is enabled by setting `quarkus.openshift.deployment-kind` to `Deployment`.

[source,properties]
----
quarkus.openshift.deployment-kind=Deployment
----

Since `Deployment` is a Kubernetes resource and not Openshift specific, it can't possibly leverage `ImageStream` resources, as is the case with `DeploymentConfig`. This means that the image references need to include the container image registry that hosts the image.
When the image is built, using Openshift builds (s2i binary and docker strategy) the Openshift internral image registry `image-registry.openshift-image-registry.svc:5000` will be used, unless an other registry has been explicitly specified by the user. Please note, that in the internal registry the project/namespace name is added as part of the image repository: `image-registry.openshift-image-registry.svc:5000/<project name>/<name>:<tag>`, so users will need to make sure that the target project/namespace name is aligned with the `quarkus.container-image.group`.
Since `Deployment` is a Kubernetes resource and not OpenShift specific, it can't possibly leverage `ImageStream` resources, as is the case with `DeploymentConfig`. This means that the image references need to include the container image registry that hosts the image.
When the image is built, using OpenShift builds (s2i binary and docker strategy) the OpenShift internal image registry `image-registry.openshift-image-registry.svc:5000` will be used, unless an other registry has been explicitly specified by the user. Please note, that in the internal registry the project/namespace name is added as part of the image repository: `image-registry.openshift-image-registry.svc:5000/<project name>/<name>:<tag>`, so users will need to make sure that the target project/namespace name is aligned with the `quarkus.container-image.group`.

[source,properties]
----
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/asciidoc/kubernetes-config.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc
include::./attributes.adoc[]


Quarkus includes the `kubernetes-config` extension which allows developers to use Kubernetes https://cloud.google.com/kubernetes-engine/docs/concepts/configmap[ConfigMaps] and https://cloud.google.com/kubernetes-engine/docs/concepts/secret[Secrets] as a configuration source, without having to mount them into the https://kubernetes.io/docs/concepts/workloads/pods/pod/[Pod] running the Quarkus application or make any other modifications to their Kubernetes `Deployment` (or Openshift `DeploymentConfig`).
Quarkus includes the `kubernetes-config` extension which allows developers to use Kubernetes https://cloud.google.com/kubernetes-engine/docs/concepts/configmap[ConfigMaps] and https://cloud.google.com/kubernetes-engine/docs/concepts/secret[Secrets] as a configuration source, without having to mount them into the https://kubernetes.io/docs/concepts/workloads/pods/pod/[Pod] running the Quarkus application or make any other modifications to their Kubernetes `Deployment` (or OpenShift `DeploymentConfig`).


== Configuration
Expand Down
5 changes: 3 additions & 2 deletions docs/src/main/asciidoc/rest-client-reactive.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -649,9 +649,10 @@ A simple example of implementing such a `ResponseExceptionMapper` for the `Exten

[source, java]
----
public interface MyResponseExceptionMapper implements ResponseExceptionMapper<RuntimeException> {
public class MyResponseExceptionMapper implements ResponseExceptionMapper<RuntimeException> {
RuntimeException toThrowable(Response response) {
@Override
public RuntimeException toThrowable(Response response) {
if (response.getStatus() == 500) {
throw new RuntimeException("The remote service responded with HTTP 500");
}
Expand Down
4 changes: 2 additions & 2 deletions docs/src/main/asciidoc/security-webauthn.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -888,7 +888,7 @@ webAuthn.registerOnly({ name: userName, displayName: firstName + " " + lastName
});
----

=== Only invoke the registration challenge and authenticator
=== Only invoke the login challenge and authenticator

The `webAuthn.loginOnly` method invokes the login challenge endpoint, then calls the authenticator and returns
a https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise[Promise object] containing a
Expand All @@ -897,7 +897,7 @@ in hidden form `input` elements, for example, and send it as part of a regular H

[source,javascript]
----
webAuthn.login({ name: userName })
webAuthn.loginOnly({ name: userName })
.then(body => {
// store the login JSON in form elements
document.getElementById('webAuthnId').value = body.id;
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/asciidoc/security.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ Below is a summary of the options.
|Introspection
|Local Verification
|Introspection
|Custom Token Verificition
|Custom Token Verification
|No
|With Injected JWTParser
|No
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/asciidoc/vertx.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ public Multi<String> readLargeFile() {
new OpenOptions().setRead(true)
)
.onItem().transformToMulti(file -> file.toMulti()) // <3>
.onItem().transform(content -> content.toString(StandardCharsets.UTF_8)) // <4>
.onItem().transform(content -> content.toString(StandardCharsets.UTF_8) // <4>
+ "\n------------\n"); // <5>
}
----
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import io.quarkus.oidc.common.runtime.OidcConstants;
import io.smallrye.mutiny.Uni;
import io.vertx.core.http.HttpHeaders;
import io.vertx.core.json.DecodeException;
import io.vertx.core.json.JsonObject;
import io.vertx.mutiny.core.MultiMap;
import io.vertx.mutiny.core.buffer.Buffer;
Expand Down Expand Up @@ -181,6 +182,8 @@ private static JsonObject decodeJwtToken(String accessToken) {
return new JsonObject(new String(Base64.getUrlDecoder().decode(parts[1]), StandardCharsets.UTF_8));
} catch (IllegalArgumentException ex) {
LOG.debug("JWT token can not be decoded using the Base64Url encoding scheme");
} catch (DecodeException ex) {
LOG.debug("JWT token can not be decoded");
}
} else {
LOG.debug("Access token is not formatted as the encoded JWT token");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import io.quarkus.qute.Expression;
import io.quarkus.qute.Expressions;
import io.quarkus.qute.Expressions.SplitConfig;
import io.quarkus.qute.TemplateException;
import io.quarkus.qute.TemplateNode.Origin;

Expand Down Expand Up @@ -169,16 +170,35 @@ private static Type resolveType(String value) {
return Type.create(DotName.createSimple(value), Kind.CLASS);
} else {
String name = value.substring(0, angleIdx);
String params = value.substring(angleIdx + 1, value.length() - 1);
DotName rawName = DotName.createSimple(name);
String[] parts = value.substring(angleIdx + 1, value.length() - 1).split(",");
Type[] arguments = new Type[parts.length];
List<String> parts = Expressions.splitParts(params, PARAMETERIZED_TYPE_SPLIT_CONFIG);
Type[] arguments = new Type[parts.size()];
for (int i = 0; i < arguments.length; i++) {
arguments[i] = resolveType(parts[i].trim());
arguments[i] = resolveType(parts.get(i).trim());
}
return ParameterizedType.create(rawName, arguments, null);
}
}

static final SplitConfig PARAMETERIZED_TYPE_SPLIT_CONFIG = new SplitConfig() {

@Override
public boolean isSeparator(char candidate) {
return ',' == candidate;
}

public boolean isInfixNotationSupported() {
return false;
}

@Override
public boolean isLiteralSeparator(char candidate) {
return candidate == '<' || candidate == '>';
}

};

private TypeInfos() {
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,10 @@
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Matcher;

import org.jboss.jandex.DotName;
import org.jboss.jandex.Index;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.Indexer;
Expand All @@ -20,6 +22,7 @@
import io.quarkus.qute.Engine;
import io.quarkus.qute.Expression;
import io.quarkus.qute.deployment.TypeInfos.Info;
import io.quarkus.qute.deployment.TypeInfos.TypeInfo;

public class TypeInfosTest {

Expand Down Expand Up @@ -61,6 +64,24 @@ public void testCreate() throws IOException {
assertEquals("size", infos.get(1).value);
}

@Test
public void testNestedGenerics() throws IOException {
List<Expression> expressions = Engine.builder().build()
.parse("{@java.util.List<java.util.Map$Entry<String,Integer>> list}{list.size}")
.getExpressions();
IndexView index = index(Foo.class, List.class, Entry.class);
List<Info> infos = TypeInfos.create(expressions.get(0), index, id -> "dummy");
assertEquals(2, infos.size());
assertTrue(infos.get(0).isTypeInfo());
TypeInfo info = infos.get(0).asTypeInfo();
assertEquals(DotName.createSimple(List.class.getName()), info.rawClass.name());
ParameterizedType entryType = info.resolvedType.asParameterizedType().arguments().get(0).asParameterizedType();
assertEquals(DotName.createSimple(Entry.class.getName()), entryType.name());
assertEquals(DotName.createSimple("String"), entryType.arguments().get(0).name());
assertEquals(DotName.createSimple("Integer"), entryType.arguments().get(1).name());
assertTrue(infos.get(1).isProperty());
}

private void assertHints(String hintStr, String... expectedHints) {
Matcher m = TypeInfos.HintInfo.HINT_PATTERN.matcher(hintStr);
List<String> hints = new ArrayList<>();
Expand Down
Loading

0 comments on commit 8acfcb8

Please sign in to comment.