Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Handle nested query parameters in ApiClient.withQuery #125

Merged
merged 9 commits into from
Jul 27, 2023
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,13 @@
import com.databricks.sdk.core.http.Response;
import com.databricks.sdk.core.utils.RealTimer;
import com.databricks.sdk.core.utils.Timer;
import com.databricks.sdk.support.QueryParam;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.Map;
import java.util.Random;
Expand Down Expand Up @@ -91,25 +89,11 @@ private <I> Request withQuery(Request in, I entity) {
if (entity == null) {
return in;
}
try {
// deterministic query string: in the order of class fields
for (Field field : entity.getClass().getDeclaredFields()) {
QueryParam param = field.getAnnotation(QueryParam.class);
if (param == null) {
continue;
}
field.setAccessible(true);
Object value = field.get(entity);
field.setAccessible(false);
if (value == null) {
continue;
}
in.withQueryParam(param.value(), value.toString());
}
return in;
} catch (IllegalAccessException e) {
throw new DatabricksException("Cannot create query string: " + e.getMessage(), e);
for (GrpcTranscodingQueryParamsSerializer.HeaderEntry e :
GrpcTranscodingQueryParamsSerializer.serialize(entity)) {
in.withQueryParam(e.getKey(), e.getValue());
}
return in;
}

public <I, O> Collection<O> getCollection(String path, I in, Class<O> element) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
package com.databricks.sdk.core;

import com.databricks.sdk.support.QueryParam;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.lang.reflect.Field;
import java.util.*;

/**
* Serializes an object into a list of query parameter entries compatible with gRPC-transcoding.
*
* <p>The Databricks REST API uses gRPC transcoding to expose gRPC services as REST APIs. This
* serializer is used to serialize objects into a map of query parameter entries that can be used to
* invoke a gRPC service via REST.
*
* <p>See <a
* href="https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule">the
* documentation for gRPC transcoding</a> for more details.
*/
public class GrpcTranscodingQueryParamsSerializer {
public static class HeaderEntry {
private final String key;
private final String value;

public HeaderEntry(String key, String value) {
this.key = key;
this.value = value;
}

public String getKey() {
return key;
}

public String getValue() {
return value;
}
}
/**
* Serializes an object into a map of query parameter values compatible with gRPC-transcoding.
*
* <p>This method respects the QueryParam and JsonProperty annotations on the object's fields when
* serializing the field name. If both annotations are present, the value of the QueryParam
* annotation is used.
*
* <p>The returned object does not contain any top-level fields that are not annotated with
* QueryParam. All nested fields are included, even if they are not annotated with QueryParam.
*
* @param o The object to serialize.
* @return A list of query parameter entries compatible with gRPC-transcoding.
*/
public static List<HeaderEntry> serialize(Object o) {
Map<String, Object> flattened = flattenObject(o);
for (Field f : o.getClass().getDeclaredFields()) {
QueryParam queryParam = f.getAnnotation(QueryParam.class);
if (queryParam == null) {
flattened.remove(getFieldName(f));
}
}

List<HeaderEntry> result = new ArrayList<>();
for (Map.Entry<String, Object> entry : flattened.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (value instanceof Collection) {
for (Object v : (Collection<Object>) value) {
result.add(new HeaderEntry(key, v.toString()));
}
} else {
result.add(new HeaderEntry(key, value.toString()));
}
}
return result;
}

private static final List<Class<?>> primitiveTypes =
Arrays.asList(
boolean.class,
Boolean.class,
byte.class,
Byte.class,
char.class,
Character.class,
short.class,
Short.class,
int.class,
Integer.class,
long.class,
Long.class,
float.class,
Float.class,
double.class,
Double.class,
String.class);
Comment on lines +74 to +92
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can probably reuse this in future, it might be good to have this in a separate file.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we need to reuse it, we can make it public at some point. It's hard to figure out what class/file this would exist in that would be useful in other contexts, so I would rather leave it as is until we have two or three other things that it makes sense to group this with.


private static String getFieldName(Field f) {
QueryParam queryParam = f.getAnnotation(QueryParam.class);
JsonProperty jsonProperty = f.getAnnotation(JsonProperty.class);
if (queryParam != null) {
return queryParam.value();
} else if (jsonProperty != null) {
return jsonProperty.value();
} else {
return f.getName();
}
}

private static Map<String, Object> flattenObject(Object o) {
// LinkedHashMap ensures consistent ordering of fields.
Map<String, Object> result = new LinkedHashMap<>();
Field[] fields = o.getClass().getDeclaredFields();
for (Field f : fields) {
f.setAccessible(true);
try {
String name = getFieldName(f);
Object value = f.get(o);
if (value == null) {
continue;
}
// check if object is a primitive type
if (primitiveTypes.contains(f.getType()) || Iterable.class.isAssignableFrom(f.getType())) {
result.put(name, value);
} else {
// recursively flatten the object
Map<String, Object> flattened = flattenObject(value);
for (Map.Entry<String, Object> entry : flattened.entrySet()) {
result.put(name + "." + entry.getKey(), entry.getValue());
}
}
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} finally {
f.setAccessible(false);
}
}
return result;
}
}
Original file line number Diff line number Diff line change
@@ -1,14 +1,22 @@
package com.databricks.sdk.core.http;

import java.util.Map;
import java.util.*;

public class FormRequest extends Request {
public FormRequest(String url, Map<String, String> form) {
this(POST, url, form);
}

public FormRequest(String method, String url, Map<String, String> form) {
super(method, url, mapToQuery(form));
super(method, url, mapToQuery(wrapValuesInList(form)));
withHeader("Content-Type", "application/x-www-form-urlencoded");
}

static Map<String, List<String>> wrapValuesInList(Map<String, String> map) {
Map<String, List<String>> m = new LinkedHashMap<>();
for (Map.Entry<String, String> entry : map.entrySet()) {
m.put(entry.getKey(), Collections.singletonList(entry.getValue()));
}
return m;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ public class Request {
private final String method;
private String url;
private final Map<String, String> headers = new HashMap<>();
private final Map<String, String> query = new TreeMap<>();
private final Map<String, List<String>> query = new TreeMap<>();
private final String body;

public Request(String method, String url) {
Expand All @@ -40,7 +40,12 @@ public Request withHeader(String key, String value) {
}

public Request withQueryParam(String key, String value) {
query.put(key, value);
List<String> values = query.get(key);
if (values == null) {
values = new ArrayList<>();
}
values.add(value);
query.put(key, values);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we handle the use case for when key exists?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if the key exists, values is not null, so we just skip the if block. Note that I've refactored this, as the put at the end is only needed in the case that values is null.

return this;
}

Expand All @@ -49,13 +54,15 @@ public Request withUrl(String url) {
return this;
}

protected static String mapToQuery(Map<String, String> in) {
protected static String mapToQuery(Map<String, List<String>> in) {
StringJoiner joiner = new StringJoiner("&");
for (Map.Entry<String, String> entry : in.entrySet()) {
for (Map.Entry<String, List<String>> entry : in.entrySet()) {
try {
String encodedKey = URLEncoder.encode(entry.getKey(), StandardCharsets.UTF_8.name());
String encodedValue = URLEncoder.encode(entry.getValue(), StandardCharsets.UTF_8.name());
joiner.add(encodedKey + "=" + encodedValue);
for (String value : entry.getValue()) {
String encodedValue = URLEncoder.encode(value, StandardCharsets.UTF_8.name());
joiner.add(encodedKey + "=" + encodedValue);
}
} catch (UnsupportedEncodingException e) {
throw new DatabricksException("Unable to encode query parameter: " + e.getMessage(), e);
}
Expand Down Expand Up @@ -116,7 +123,7 @@ public Map<String, String> getHeaders() {
return headers;
}

public Map<String, String> getQuery() {
public Map<String, List<String>> getQuery() {
return query;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,9 @@ private Iterator<T> outerIterator() {
return new Iterator<T>() {
@Override
public boolean hasNext() {
if (currentPage == null) {
return false;
}
Comment on lines +74 to +76
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Curious -- why wasn't this required before?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is kind of a separate bugfix, but I think that the query history API does not include the "results" field at all if there are no results. This is handled in flipNextPage line 63, but not in hasNext().

if (currentPage.hasNext()) {
return true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@

import static org.junit.jupiter.api.Assertions.*;

import java.util.LinkedHashMap;
import java.util.Map;
import java.util.TreeMap;
import org.junit.jupiter.api.Test;

class FormRequestTest {
@Test
public void itWorks() {
Map<String, String> data = new TreeMap<>();
Map<String, String> data = new LinkedHashMap<>();
data.put("foo", "bar");
data.put("new", "foo");
FormRequest request = new FormRequest("/foo", data);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
package com.databricks.sdk.integration;

import com.databricks.sdk.WorkspaceClient;
import com.databricks.sdk.integration.framework.EnvContext;
import com.databricks.sdk.integration.framework.EnvTest;
import com.databricks.sdk.service.sql.ListQueryHistoryRequest;
import com.databricks.sdk.service.sql.QueryFilter;
import com.databricks.sdk.service.sql.QueryInfo;
import com.databricks.sdk.service.sql.TimeRange;
import java.util.Arrays;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;

@EnvContext("workspace")
@ExtendWith(EnvTest.class)
public class SqlIT {
@Test
void listQueryHistoryTimeRange(WorkspaceClient w) {
TimeRange timeRange =
new TimeRange().setStartTimeMs(1690243200000L).setEndTimeMs(1690329600000L);
ListQueryHistoryRequest request =
new ListQueryHistoryRequest()
.setFilterBy(new QueryFilter().setQueryStartTimeRange(timeRange));
Iterable<QueryInfo> queries = w.queryHistory().list(request);
for (QueryInfo query : queries) {
System.out.println(query);
}
}

@Test
void listQueryHistoryUserIds(WorkspaceClient w) {
ListQueryHistoryRequest request =
new ListQueryHistoryRequest()
.setFilterBy(new QueryFilter().setUserIds(Arrays.asList(123L, 456L)));
Iterable<QueryInfo> queries = w.queryHistory().list(request);
for (QueryInfo query : queries) {
System.out.println(query);
}
}
}