Skip to content

Commit

Permalink
java conversions
Browse files Browse the repository at this point in the history
  • Loading branch information
mengxr committed Oct 9, 2014
1 parent 618e349 commit 905bb89
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ case class AttributeReference(
var h = 17
h = h * 37 + exprId.hashCode()
h = h * 37 + dataType.hashCode()
h = h * 37 + metadata.hashCode()
h
}

Expand Down
24 changes: 19 additions & 5 deletions sql/core/src/main/java/org/apache/spark/sql/api/java/DataType.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@

package org.apache.spark.sql.api.java;

import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.*;

/**
* The base type of all Spark SQL data types.
Expand Down Expand Up @@ -146,15 +144,31 @@ public static MapType createMapType(
* Creates a StructField by specifying the name ({@code name}), data type ({@code dataType}) and
* whether values of this field can be null values ({@code nullable}).
*/
public static StructField createStructField(String name, DataType dataType, boolean nullable) {
public static StructField createStructField(
String name,
DataType dataType,
boolean nullable,
Map<String, Object> metadata) {
if (name == null) {
throw new IllegalArgumentException("name should not be null.");
}
if (dataType == null) {
throw new IllegalArgumentException("dataType should not be null.");
}
if (metadata == null) {
throw new IllegalArgumentException("metadata should not be null.");
}

return new StructField(name, dataType, nullable, metadata);
}

return new StructField(name, dataType, nullable);
/**
* Creates a StructField with empty metadata.
*
* @see #createStructField(String, DataType, boolean, java.util.Map)
*/
public static StructField createStructField(String name, DataType dataType, boolean nullable) {
return createStructField(name, dataType, nullable, new HashMap<String, Object>());
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,27 +17,37 @@

package org.apache.spark.sql.api.java;

import java.util.Map;

/**
* A StructField object represents a field in a StructType object.
* A StructField object comprises three fields, {@code String name}, {@code DataType dataType},
* and {@code boolean nullable}. The field of {@code name} is the name of a StructField.
* The field of {@code dataType} specifies the data type of a StructField.
* The field of {@code nullable} specifies if values of a StructField can contain {@code null}
* values.
* The field of {@code metadata} provides extra information of the StructField, which is a map from
* string to simple type that can be serialized to JSON automatically
*
* To create a {@link StructField},
* {@link DataType#createStructField(String, DataType, boolean)}
* {@link DataType#createStructField(String, DataType, boolean, Map)}
* should be used.
*/
public class StructField {
private String name;
private DataType dataType;
private boolean nullable;
private Map<String, Object> metadata;

protected StructField(String name, DataType dataType, boolean nullable) {
protected StructField(
String name,
DataType dataType,
boolean nullable,
Map<String, Object> metadata) {
this.name = name;
this.dataType = dataType;
this.nullable = nullable;
this.metadata = metadata;
}

public String getName() {
Expand All @@ -52,6 +62,10 @@ public boolean isNullable() {
return nullable;
}

public Map<String, Object> getMetadata() {
return metadata;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
Expand All @@ -62,6 +76,7 @@ public boolean equals(Object o) {
if (nullable != that.nullable) return false;
if (!dataType.equals(that.dataType)) return false;
if (!name.equals(that.name)) return false;
if (!metadata.equals(that.metadata)) return false;

return true;
}
Expand All @@ -71,6 +86,7 @@ public int hashCode() {
int result = name.hashCode();
result = 31 * result + dataType.hashCode();
result = 31 * result + (nullable ? 1 : 0);
result = 31 * result + metadata.hashCode();
return result;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ protected[sql] object DataTypeConversions {
JDataType.createStructField(
scalaStructField.name,
asJavaDataType(scalaStructField.dataType),
scalaStructField.nullable)
scalaStructField.nullable,
scalaStructField.metadata.asJava.asInstanceOf[java.util.Map[String, Object]])
}

/**
Expand Down Expand Up @@ -67,7 +68,8 @@ protected[sql] object DataTypeConversions {
StructField(
javaStructField.getName,
asScalaDataType(javaStructField.getDataType),
javaStructField.isNullable)
javaStructField.isNullable,
javaStructField.getMetadata.asScala.toMap)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ class ScalaSideDataTypeConversionSuite extends FunSuite {
SStructField("simpleArray", simpleScalaArrayType, true) ::
SStructField("simpleMap", simpleScalaMapType, true) ::
SStructField("simpleStruct", simpleScalaStructType, true) ::
SStructField("boolean", org.apache.spark.sql.BooleanType, false) :: Nil)
SStructField("boolean", org.apache.spark.sql.BooleanType, false) ::
SStructField("withMeta", org.apache.spark.sql.DoubleType, false, Map("name" -> "age")) :: Nil)
checkDataType(complexScalaStructType)

// Complex ArrayType.
Expand Down

0 comments on commit 905bb89

Please sign in to comment.