Skip to content

Commit

Permalink
Backport to branch(3.15) : Add support for new date, time data types (#…
Browse files Browse the repository at this point in the history
…2559)

Co-authored-by: inv-jishnu <31100916+inv-jishnu@users.noreply.github.com>
  • Loading branch information
feeblefakie and inv-jishnu authored Mar 3, 2025
1 parent 4b276e5 commit 628b487
Show file tree
Hide file tree
Showing 7 changed files with 185 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
import com.scalar.db.io.DataType;
import com.scalar.db.transaction.consensuscommit.ConsensusCommitUtils;
import java.nio.charset.Charset;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Base64;
import java.util.Iterator;
import java.util.LinkedHashSet;
Expand Down Expand Up @@ -144,6 +148,26 @@ private String convertToString(Result result, String columnName, DataType dataTy
case TEXT:
value = result.getText(columnName);
break;
case DATE:
LocalDate date = result.getDate(columnName);
assert date != null;
value = date.toString();
break;
case TIME:
LocalTime time = result.getTime(columnName);
assert time != null;
value = time.toString();
break;
case TIMESTAMP:
LocalDateTime localDateTime = result.getTimestamp(columnName);
assert localDateTime != null;
value = localDateTime.toString();
break;
case TIMESTAMPTZ:
Instant instant = result.getTimestampTZ(columnName);
assert instant != null;
value = instant.toString();
break;
default:
throw new AssertionError("Unknown data type:" + dataType);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
import com.scalar.db.io.DataType;
import com.scalar.db.transaction.consensuscommit.ConsensusCommitUtils;
import java.nio.charset.Charset;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Base64;
import java.util.LinkedHashSet;
import java.util.List;
Expand Down Expand Up @@ -117,6 +121,26 @@ private void addToObjectNode(
byte[] encoded = Base64.getEncoder().encode(result.getBlobAsBytes(columnName));
objectNode.put(columnName, new String(encoded, Charset.defaultCharset()));
break;
case DATE:
LocalDate date = result.getDate(columnName);
assert date != null;
objectNode.put(columnName, date.toString());
break;
case TIME:
LocalTime time = result.getTime(columnName);
assert time != null;
objectNode.put(columnName, time.toString());
break;
case TIMESTAMP:
LocalDateTime localDateTime = result.getTimestamp(columnName);
assert localDateTime != null;
objectNode.put(columnName, localDateTime.toString());
break;
case TIMESTAMPTZ:
Instant instant = result.getTimestampTZ(columnName);
assert instant != null;
objectNode.put(columnName, instant.toString());
break;
default:
throw new AssertionError("Unknown data type:" + dataType);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
import com.scalar.db.io.DataType;
import com.scalar.db.transaction.consensuscommit.ConsensusCommitUtils;
import java.nio.charset.Charset;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Base64;
import java.util.LinkedHashSet;
import java.util.List;
Expand Down Expand Up @@ -128,6 +132,26 @@ private void addToObjectNode(
byte[] encoded = Base64.getEncoder().encode(result.getBlobAsBytes(columnName));
objectNode.put(columnName, new String(encoded, Charset.defaultCharset()));
break;
case DATE:
LocalDate date = result.getDate(columnName);
assert date != null;
objectNode.put(columnName, date.toString());
break;
case TIME:
LocalTime time = result.getTime(columnName);
assert time != null;
objectNode.put(columnName, time.toString());
break;
case TIMESTAMP:
LocalDateTime localDateTime = result.getTimestamp(columnName);
assert localDateTime != null;
objectNode.put(columnName, localDateTime.toString());
break;
case TIMESTAMPTZ:
Instant instant = result.getTimestampTZ(columnName);
assert instant != null;
objectNode.put(columnName, instant.toString());
break;
default:
throw new AssertionError("Unknown data type:" + dataType);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,18 @@
import com.scalar.db.io.BooleanColumn;
import com.scalar.db.io.Column;
import com.scalar.db.io.DataType;
import com.scalar.db.io.DateColumn;
import com.scalar.db.io.DoubleColumn;
import com.scalar.db.io.FloatColumn;
import com.scalar.db.io.IntColumn;
import com.scalar.db.io.TextColumn;
import com.scalar.db.io.TimeColumn;
import com.scalar.db.io.TimestampColumn;
import com.scalar.db.io.TimestampTZColumn;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Base64;
import javax.annotation.Nullable;

Expand Down Expand Up @@ -73,6 +81,22 @@ public static Column<?> createColumnFromValue(
return value != null
? BlobColumn.of(columnName, Base64.getDecoder().decode(value))
: BlobColumn.ofNull(columnName);
case DATE:
return value != null
? DateColumn.of(columnName, LocalDate.parse(value))
: DateColumn.ofNull(columnName);
case TIME:
return value != null
? TimeColumn.of(columnName, LocalTime.parse(value))
: TimeColumn.ofNull(columnName);
case TIMESTAMP:
return value != null
? TimestampColumn.of(columnName, LocalDateTime.parse(value))
: TimestampColumn.ofNull(columnName);
case TIMESTAMPTZ:
return value != null
? TimestampTZColumn.of(columnName, Instant.parse(value))
: TimestampTZColumn.ofNull(columnName);
default:
throw new AssertionError();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,20 @@
import com.scalar.db.io.BooleanColumn;
import com.scalar.db.io.Column;
import com.scalar.db.io.DataType;
import com.scalar.db.io.DateColumn;
import com.scalar.db.io.DoubleColumn;
import com.scalar.db.io.FloatColumn;
import com.scalar.db.io.IntColumn;
import com.scalar.db.io.TextColumn;
import com.scalar.db.io.TimeColumn;
import com.scalar.db.io.TimestampColumn;
import com.scalar.db.io.TimestampTZColumn;
import com.scalar.db.transaction.consensuscommit.Attribute;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.*;

/** Utils for the service unit tests */
Expand All @@ -31,6 +39,10 @@ public class UnitTestUtils {
public static final String TEST_COLUMN_5 = "col5";
public static final String TEST_COLUMN_6 = "col6";
public static final String TEST_COLUMN_7 = "col7";
public static final String TEST_COLUMN_8 = "col8";
public static final String TEST_COLUMN_9 = "col9";
public static final String TEST_COLUMN_10 = "col10";
public static final String TEST_COLUMN_11 = "col11";

public static final String TEST_VALUE_TEXT = "test value";

Expand All @@ -44,6 +56,10 @@ public class UnitTestUtils {
public static final Long TEST_VALUE_LONG = BigIntColumn.MAX_VALUE;
public static final boolean TEST_VALUE_BOOLEAN = true;
public static final double TEST_VALUE_DOUBLE = Double.MIN_VALUE;
public static final LocalDate TEST_VALUE_DATE = LocalDate.of(2000, 1, 1);
public static final LocalTime TEST_VALUE_TIME = LocalTime.of(1, 1, 1);
public static final LocalDateTime TEST_VALUE_DATE_TIME = LocalDateTime.of(2000, 1, 1, 1, 1);
public static final Instant TEST_VALUE_INSTANT = Instant.ofEpochMilli(1740041740);
public static final String TEST_CSV_DELIMITER = ";";

public static TableMetadata createTestTableMetadata() {
Expand All @@ -55,6 +71,10 @@ public static TableMetadata createTestTableMetadata() {
.addColumn(TEST_COLUMN_5, DataType.DOUBLE)
.addColumn(TEST_COLUMN_6, DataType.TEXT)
.addColumn(TEST_COLUMN_7, BLOB)
.addColumn(TEST_COLUMN_8, DataType.DATE)
.addColumn(TEST_COLUMN_9, DataType.TIME)
.addColumn(TEST_COLUMN_10, DataType.TIMESTAMP)
.addColumn(TEST_COLUMN_11, DataType.TIMESTAMPTZ)
.addColumn(Attribute.BEFORE_PREFIX + TEST_COLUMN_4, DataType.FLOAT)
.addColumn(Attribute.BEFORE_PREFIX + TEST_COLUMN_5, DataType.DOUBLE)
.addColumn(Attribute.BEFORE_PREFIX + TEST_COLUMN_6, DataType.TEXT)
Expand Down Expand Up @@ -85,6 +105,10 @@ public static ObjectNode getOutputDataWithMetadata() {
rootNode.put(TEST_COLUMN_5, TEST_VALUE_DOUBLE);
rootNode.put(TEST_COLUMN_6, TEST_VALUE_TEXT);
rootNode.put(TEST_COLUMN_7, TEST_VALUE_BLOB);
rootNode.put(TEST_COLUMN_8, TEST_VALUE_DATE.toString());
rootNode.put(TEST_COLUMN_9, TEST_VALUE_TIME.toString());
rootNode.put(TEST_COLUMN_10, TEST_VALUE_DATE_TIME.toString());
rootNode.put(TEST_COLUMN_11, TEST_VALUE_INSTANT.toString());
rootNode.put(Attribute.BEFORE_PREFIX + TEST_COLUMN_4, TEST_VALUE_FLOAT);
rootNode.put(Attribute.BEFORE_PREFIX + TEST_COLUMN_5, TEST_VALUE_DOUBLE);
rootNode.put(Attribute.BEFORE_PREFIX + TEST_COLUMN_6, TEST_VALUE_TEXT);
Expand Down Expand Up @@ -112,6 +136,11 @@ public static ObjectNode getOutputDataWithoutMetadata() {
rootNode.put(TEST_COLUMN_5, TEST_VALUE_DOUBLE);
rootNode.put(TEST_COLUMN_6, TEST_VALUE_TEXT);
rootNode.put(TEST_COLUMN_7, TEST_VALUE_BLOB);
rootNode.put(TEST_COLUMN_8, TEST_VALUE_DATE.toString());
rootNode.put(TEST_COLUMN_9, TEST_VALUE_TIME.toString());
rootNode.put(TEST_COLUMN_10, TEST_VALUE_DATE_TIME.toString());
rootNode.put(TEST_COLUMN_11, TEST_VALUE_INSTANT.toString());

return rootNode;
}

Expand All @@ -124,6 +153,10 @@ public static List<String> getColumnsListOfMetadata() {
projectedColumns.add(TEST_COLUMN_5);
projectedColumns.add(TEST_COLUMN_6);
projectedColumns.add(TEST_COLUMN_7);
projectedColumns.add(TEST_COLUMN_8);
projectedColumns.add(TEST_COLUMN_9);
projectedColumns.add(TEST_COLUMN_10);
projectedColumns.add(TEST_COLUMN_11);
projectedColumns.add(Attribute.BEFORE_PREFIX + TEST_COLUMN_4);
projectedColumns.add(Attribute.BEFORE_PREFIX + TEST_COLUMN_5);
projectedColumns.add(Attribute.BEFORE_PREFIX + TEST_COLUMN_6);
Expand All @@ -150,6 +183,10 @@ public static Map<String, DataType> getColumnData() {
columnData.put(TEST_COLUMN_5, DataType.DOUBLE);
columnData.put(TEST_COLUMN_6, DataType.TEXT);
columnData.put(TEST_COLUMN_7, BLOB);
columnData.put(TEST_COLUMN_8, DataType.DATE);
columnData.put(TEST_COLUMN_9, DataType.TIME);
columnData.put(TEST_COLUMN_10, DataType.TIMESTAMP);
columnData.put(TEST_COLUMN_11, DataType.TIMESTAMPTZ);
columnData.put(Attribute.BEFORE_PREFIX + TEST_COLUMN_4, DataType.FLOAT);
columnData.put(Attribute.BEFORE_PREFIX + TEST_COLUMN_5, DataType.DOUBLE);
columnData.put(Attribute.BEFORE_PREFIX + TEST_COLUMN_6, DataType.TEXT);
Expand All @@ -176,6 +213,10 @@ public static Map<String, Column<?>> createTestValues() {
values.put(TEST_COLUMN_5, DoubleColumn.of(TEST_COLUMN_5, TEST_VALUE_DOUBLE));
values.put(TEST_COLUMN_6, TextColumn.of(TEST_COLUMN_6, TEST_VALUE_TEXT));
values.put(TEST_COLUMN_7, BlobColumn.of(TEST_COLUMN_7, TEST_VALUE_BLOB));
values.put(TEST_COLUMN_8, DateColumn.of(TEST_COLUMN_8, TEST_VALUE_DATE));
values.put(TEST_COLUMN_9, TimeColumn.of(TEST_COLUMN_9, TEST_VALUE_TIME));
values.put(TEST_COLUMN_10, TimestampColumn.of(TEST_COLUMN_10, TEST_VALUE_DATE_TIME));
values.put(TEST_COLUMN_11, TimestampTZColumn.of(TEST_COLUMN_11, TEST_VALUE_INSTANT));
values.put(
Attribute.BEFORE_PREFIX + TEST_COLUMN_4,
FloatColumn.of(Attribute.BEFORE_PREFIX + TEST_COLUMN_4, TEST_VALUE_FLOAT));
Expand Down Expand Up @@ -219,6 +260,14 @@ public static String getSourceTestValue(DataType dataType) {
return TEST_VALUE_BLOB_BASE64;
case BOOLEAN:
return Boolean.toString(TEST_VALUE_BOOLEAN);
case DATE:
return TEST_VALUE_DATE.toString();
case TIME:
return TEST_VALUE_TIME.toString();
case TIMESTAMP:
return TEST_VALUE_DATE_TIME.toString();
case TIMESTAMPTZ:
return TEST_VALUE_INSTANT.toString();
case TEXT:
default:
return TEST_VALUE_TEXT;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ void process_withEmptyResultList_shouldReturnEmptyString() {
@Test
void process_withValidResultList_shouldReturnValidCsvString() {
String expectedOutput =
"9007199254740992,2147483647,true,0.000000000000000000000000000000000000000000001401298464324817,0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000049,test value,YmxvYiB0ZXN0IHZhbHVl";
"9007199254740992,2147483647,true,0.000000000000000000000000000000000000000000001401298464324817,0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000049,test value,YmxvYiB0ZXN0IHZhbHVl,2000-01-01,01:01:01,2000-01-01T01:01,1970-01-21T03:20:41.740Z";
Map<String, Column<?>> values = UnitTestUtils.createTestValues();
Result result = new ResultImpl(values, mockMetadata);
List<Result> resultList = new ArrayList<>();
Expand All @@ -52,7 +52,7 @@ void process_withValidResultList_shouldReturnValidCsvString() {
void process_withValidResultListWithMetadata_shouldReturnValidCsvString() {
csvProducerTask = new CsvProducerTask(true, projectedColumns, mockMetadata, columnData, ",");
String expectedOutput =
"9007199254740992,2147483647,true,0.000000000000000000000000000000000000000000001401298464324817,0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000049,test value,YmxvYiB0ZXN0IHZhbHVl,0.000000000000000000000000000000000000000000001401298464324817,0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000049,test value,YmxvYiB0ZXN0IHZhbHVl,txt value 464654654,2147483647,2147483647,9007199254740992,9007199254740992,test value,2147483647,2147483647,9007199254740992,9007199254740992";
"9007199254740992,2147483647,true,0.000000000000000000000000000000000000000000001401298464324817,0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000049,test value,YmxvYiB0ZXN0IHZhbHVl,2000-01-01,01:01:01,2000-01-01T01:01,1970-01-21T03:20:41.740Z,0.000000000000000000000000000000000000000000001401298464324817,0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000049,test value,YmxvYiB0ZXN0IHZhbHVl,txt value 464654654,2147483647,2147483647,9007199254740992,9007199254740992,test value,2147483647,2147483647,9007199254740992,9007199254740992";
Map<String, Column<?>> values = UnitTestUtils.createTestValues();
Result result = new ResultImpl(values, mockMetadata);
List<Result> resultList = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,19 @@
import com.scalar.db.io.BooleanColumn;
import com.scalar.db.io.Column;
import com.scalar.db.io.DataType;
import com.scalar.db.io.DateColumn;
import com.scalar.db.io.DoubleColumn;
import com.scalar.db.io.FloatColumn;
import com.scalar.db.io.IntColumn;
import com.scalar.db.io.TextColumn;
import com.scalar.db.io.TimeColumn;
import com.scalar.db.io.TimestampColumn;
import com.scalar.db.io.TimestampTZColumn;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Base64;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
Expand Down Expand Up @@ -61,7 +69,36 @@ private static Stream<Arguments> provideColumnsForCreateColumnFromValue() {
"blobColumn",
Base64.getEncoder().encodeToString("binary".getBytes(StandardCharsets.UTF_8)),
BlobColumn.of("blobColumn", "binary".getBytes(StandardCharsets.UTF_8))),
Arguments.of(DataType.BLOB, "blobColumn", null, BlobColumn.ofNull("blobColumn")));
Arguments.of(DataType.BLOB, "blobColumn", null, BlobColumn.ofNull("blobColumn")),
Arguments.of(
DataType.DATE,
"dateColumn",
LocalDate.of(2000, 1, 1).toString(),
DateColumn.of("dateColumn", LocalDate.of(2000, 1, 1))),
Arguments.of(DataType.DATE, "dateColumn", null, DateColumn.ofNull("dateColumn")),
Arguments.of(
DataType.TIME,
"timeColumn",
LocalTime.of(1, 1, 1).toString(),
TimeColumn.of("timeColumn", LocalTime.of(1, 1, 1))),
Arguments.of(DataType.TIME, "timeColumn", null, TimeColumn.ofNull("timeColumn")),
Arguments.of(
DataType.TIMESTAMP,
"timestampColumn",
LocalDateTime.of(2000, 1, 1, 1, 1).toString(),
TimestampColumn.of("timestampColumn", LocalDateTime.of(2000, 1, 1, 1, 1))),
Arguments.of(
DataType.TIMESTAMP, "timestampColumn", null, TimestampColumn.ofNull("timestampColumn")),
Arguments.of(
DataType.TIMESTAMPTZ,
"timestampTZColumn",
Instant.ofEpochMilli(1940041740).toString(),
TimestampTZColumn.of("timestampTZColumn", Instant.ofEpochMilli(1940041740))),
Arguments.of(
DataType.TIMESTAMPTZ,
"timestampTZColumn",
null,
TimestampTZColumn.ofNull("timestampTZColumn")));
}

@ParameterizedTest
Expand Down

0 comments on commit 628b487

Please sign in to comment.