Skip to content

Commit

Permalink
Merge pull request redhat-developer#32 from bf2fc6cc711aee1a0c2a/java…
Browse files Browse the repository at this point in the history
…_draft

Java draft
  • Loading branch information
secondsun authored Jan 21, 2021
2 parents 752005e + 273ef50 commit 159ff68
Show file tree
Hide file tree
Showing 16 changed files with 367 additions and 159 deletions.
1 change: 1 addition & 0 deletions java/openapi/managed-services-api.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
---
components:
examples:
400CreationExample:
Expand Down
9 changes: 2 additions & 7 deletions java/rhoas/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ This operator creates and manages custom resources used by the RHOAS Managed Kaf
## TL;DR;

```sh
export CLIENT_BEARERTOKEN=$YOUR_SERVICE_API_TOKEN;
mvn clean install;
cd rhoas;
mvn quarkus:dev;
Expand All @@ -28,13 +27,9 @@ mvn clean install;

## Running in dev mode

The operator uses [quarkus](https://quarkus.io) and the [java-operator-sdk](https://github.com/java-operator-sdk/java-operator-sdk). Before you can run the operator you need to set a `CLIENT_BEARERTOKEN` environment variable, or set the `client.bearertoken` system variable.
The operator uses [quarkus](https://quarkus.io) and the [java-operator-sdk](https://github.com/java-operator-sdk/java-operator-sdk).

```sh
export CLIENT_BEARERTOKEN=$YOUR_SERVICE_API_TOKEN;
```

Once the bearer token is set you can start the operator in dev mode.
You can start the operator in dev mode.

```sh
mvn quarkus:dev
Expand Down
24 changes: 13 additions & 11 deletions java/rhoas/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
<name>rhoas-operator</name>
<description>rhoas-operator</description>
<properties>
<fabric8.version>5.0.0</fabric8.version>
<operator.sdk.version>1.7.0</operator.sdk.version>
<quarkus.version>1.11.0.Final</quarkus.version>
<fabric8.version>5.0.0</fabric8.version>
</properties>
<dependencyManagement>
<dependencies>
Expand All @@ -27,13 +27,11 @@
</dependencies>
</dependencyManagement>
<dependencies>

<dependency>
<groupId>io.fabric8</groupId>
<artifactId>kubernetes-client</artifactId>
<version>${fabric8.version}</version>
</dependency>

<dependency>
<groupId>io.javaoperatorsdk</groupId>
<artifactId>operator-framework</artifactId>
Expand All @@ -44,12 +42,6 @@
<artifactId>operator-framework-quarkus-extension</artifactId>
<version>${operator.sdk.version}</version>
</dependency>
<!--
<dependency>
<groupId>io.dekorate</groupId>
<artifactId>crd-annotations</artifactId>
<version>2.0.0-alpha-1</version>
</dependency> -->
<dependency>
<groupId>com.openshift.cloud</groupId>
<artifactId>managed-services-api</artifactId>
Expand All @@ -60,8 +52,18 @@
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>


<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-scheduler</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-smallrye-jwt-build</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-smallrye-jwt</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
Expand Down
17 changes: 14 additions & 3 deletions java/rhoas/src/main/java/com/openshift/cloud/RHOASOperator.java
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
package com.openshift.cloud;

import com.openshift.cloud.controllers.ManagedKafkaConnectionController;
import com.openshift.cloud.controllers.ManagedKafkaRequestController;
import com.openshift.cloud.v1alpha.models.ManagedKafkaRequest;

import io.fabric8.kubernetes.client.KubernetesClient;
import io.javaoperatorsdk.operator.Operator;
import io.javaoperatorsdk.operator.api.config.ConfigurationService;
import io.javaoperatorsdk.operator.api.config.ControllerConfiguration;
import io.quarkus.runtime.Quarkus;
import io.quarkus.runtime.QuarkusApplication;
import io.quarkus.runtime.annotations.QuarkusMain;
Expand All @@ -23,19 +27,26 @@ public class RHOASOperator implements QuarkusApplication {
ConfigurationService configuration;

@Inject
ManagedKafkaConnectionController controller;
ManagedKafkaConnectionController connectionController;

@Inject
ManagedKafkaRequestController requestController;

private static final Logger LOG = Logger.getLogger(RHOASOperator.class);

public static void main(String... args) {
LOG.info("RHOAS Operator starting");
Quarkus.run(RHOASOperator.class, args);
}

@Override
public int run(String... args) throws Exception {
final var config = configuration.getConfigurationFor(controller);
ControllerConfiguration<?> config = configuration.getConfigurationFor(connectionController);
LOG.info("CR class: " + config.getCustomResourceClass());

config = configuration.getConfigurationFor(requestController);
LOG.info("CR class: " + config.getCustomResourceClass());

Quarkus.waitForExit();
return 0;
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
package com.openshift.cloud.beans;

import java.util.Optional;

import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import javax.inject.Singleton;

import com.openshift.cloud.v1alpha.models.ManagedKafkaConnection;
import com.openshift.cloud.v1alpha.models.ManagedKafkaConnectionList;
import com.openshift.cloud.v1alpha.models.ManagedKafkaRequest;
import com.openshift.cloud.v1alpha.models.ManagedKafkaRequestList;

import org.jboss.logging.Logger;

Expand All @@ -29,23 +28,25 @@
* provides apiClients for use
*/
@Singleton
public final class ApiClients {
public final class ManagedKafkaK8sClients {

private static final Logger LOG = Logger.getLogger(ApiClients.class.getName());
private static final Logger LOG = Logger.getLogger(ManagedKafkaK8sClients.class.getName());

@Inject
KubernetesClient client;

private CustomResourceDefinition mkcCrd;

private CustomResourceDefinition mkrCrd;

@PostConstruct
public void init() {
LOG.info("ApiClient bean init begun");

var crds = client.apiextensions().v1beta1();

this.mkcCrd = initManagedKafkaConnectionCRDAndClient(crds);

this.mkrCrd = initManagedKafkaRequestCRDAndClient(crds);
LOG.info("ApiClient bean init ended");

}
Expand All @@ -60,6 +61,17 @@ public MixedOperation<ManagedKafkaConnection, ManagedKafkaConnectionList, Resour
return client.customResources(mkcCrdContext, ManagedKafkaConnection.class, ManagedKafkaConnectionList.class);
}

public MixedOperation<ManagedKafkaRequest, ManagedKafkaRequestList, Resource<ManagedKafkaRequest>> managedKafkaRequest() {
KubernetesDeserializer.registerCustomKind(getApiVersion(ManagedKafkaRequest.class), mkrCrd.getKind(),
ManagedKafkaRequest.class);

var mkcCrdContext = CustomResourceDefinitionContext.fromCrd(this.mkrCrd);

// lets create a client for the CRD
return client.customResources(mkcCrdContext, ManagedKafkaRequest.class, ManagedKafkaRequestList.class);
}


private CustomResourceDefinition initManagedKafkaConnectionCRDAndClient(
V1beta1ApiextensionAPIGroupDSL crds) {

Expand Down Expand Up @@ -89,6 +101,34 @@ private CustomResourceDefinition initManagedKafkaConnectionCRDAndClient(

}

private CustomResourceDefinition initManagedKafkaRequestCRDAndClient(
V1beta1ApiextensionAPIGroupDSL crds) {

CustomResourceDefinition mkcCrd;

var crdsItems = crds.customResourceDefinitions().list().getItems();
var managedKafkaRequestCRDName = CustomResource.getCRDName(ManagedKafkaRequest.class);

var mkcCrdOptional = crdsItems.stream()
.filter(
crd -> managedKafkaRequestCRDName.equals(crd.getMetadata().getName())
)
.findFirst();

if (mkcCrdOptional.isEmpty()) {
LOG.info("Creating ManagedKafkaRequest CRD");
mkcCrd = CustomResourceDefinitionContext.v1beta1CRDFromCustomResourceType(ManagedKafkaRequest.class)
.build();
client.apiextensions().v1beta1().customResourceDefinitions().create(mkcCrd);
LOG.info("ManagedKafkaRequest CRD Created");
} else {
LOG.info("Found ManagedKafkaRequest CRD");
mkcCrd = mkcCrdOptional.get();
}

return mkcCrd;

}
/**
* Computes the {@code apiVersion} associated with this HasMetadata
* implementation. The value is derived from the {@link Group} and
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
package com.openshift.cloud.beans;

import io.vertx.core.json.JsonObject;
import org.eclipse.microprofile.config.inject.ConfigProperty;

import javax.inject.Singleton;
import java.io.IOException;
import java.net.URI;
import java.net.URLEncoder;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;

/**
* Utility bean to exchange offline tokens for access tokens
*/
@Singleton
public class TokenExchanger {

@ConfigProperty(name = "auth.serverUrl", defaultValue = "https://sso.redhat.com/auth/realms/redhat-external")
String authServerUrl;

@ConfigProperty(name = "auth.clientId", defaultValue = "cloud-services")
String clientId;

@ConfigProperty(name = "auth.tokenPath", defaultValue = "protocol/openid-connect/token")
String tokenPath;

public String getToken(String secret) {
try {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(authServerUrl + "/" + tokenPath))
.header("content-type", "application/x-www-form-urlencoded")
.timeout(Duration.ofMinutes(2))
.POST(ofFormData("grant_type","refresh_token", "client_id","cloud-services", "refresh_token", secret))
.build();

HttpClient client = HttpClient.newBuilder().build();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
if (response.statusCode() == 200 )
{
var tokens = response.body();
var json = new JsonObject(tokens);
return json.getString("access_token");
} else {
throw new RuntimeException(response.body());
}
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}


}

public static HttpRequest.BodyPublisher ofFormData(String... data) {

var builder = new StringBuilder();
if (data.length % 2 == 1) {
throw new IllegalArgumentException("Data must be key value pairs, but an number of data were given. ");
}

for (int index = 0; index < data.length; index+=2) {
if (builder.length() > 0) {
builder.append("&");
}
builder.append(URLEncoder.encode(data[index], StandardCharsets.UTF_8));
builder.append("=");
builder.append(URLEncoder.encode(data[index + 1], StandardCharsets.UTF_8));
}

return HttpRequest.BodyPublishers.ofString(builder.toString());
}

}
Loading

0 comments on commit 159ff68

Please sign in to comment.