Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Tiramisu Splitting Up Work] Serializer implementations #19

Merged
merged 3 commits into from
Jan 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common.cache.tier;

import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;

import java.io.IOException;
import java.util.Arrays;

public class BytesReferenceSerializer implements Serializer<BytesReference, byte[]> {
// This class does not get passed to ehcache itself, so it's not required that classes match after deserialization.

public BytesReferenceSerializer() {}
@Override
public byte[] serialize(BytesReference object) {
return BytesReference.toBytes(object);
}

@Override
public BytesReference deserialize(byte[] bytes) {
if (bytes == null) {
return null;
}
return new BytesArray(bytes);
}

@Override
public boolean equals(BytesReference object, byte[] bytes) {
return Arrays.equals(serialize(object), bytes);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.indices;

import org.opensearch.OpenSearchException;
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.BytesStreamInput;
import org.opensearch.common.cache.tier.Serializer;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;

/**
* This class serializes the IndicesRequestCache.Key using its writeTo method.
*/
public class IRCKeyWriteableSerializer implements Serializer<IndicesRequestCache.Key, byte[]> {

IndicesRequestCache irc;
public IRCKeyWriteableSerializer(IndicesRequestCache irc) {
this.irc = irc;
}
@Override
public byte[] serialize(IndicesRequestCache.Key object) {
try {
BytesStreamOutput os = new BytesStreamOutput();
object.writeTo(os);
return BytesReference.toBytes(os.bytes());
} catch (IOException e) {
throw new OpenSearchException(e);
}
}

@Override
public IndicesRequestCache.Key deserialize(byte[] bytes) {
if (bytes == null) {
return null;
}
try {
BytesStreamInput is = new BytesStreamInput(bytes, 0, bytes.length);
return irc.new Key(is);
} catch (IOException e) {
throw new OpenSearchException(e);
}
}

@Override
public boolean equals(IndicesRequestCache.Key object, byte[] bytes) {
// Deserialization is much slower than serialization for keys of order 1 KB,
// while time to serialize is fairly constant (per byte)
if (bytes.length < 5000) {
return Arrays.equals(serialize(object), bytes);
} else {
return object.equals(deserialize(bytes));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import org.opensearch.common.util.concurrent.ConcurrentCollections;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
import org.opensearch.core.common.unit.ByteSizeValue;

Expand Down Expand Up @@ -284,7 +285,7 @@ interface CacheEntity extends Accountable, Writeable {
*
* @opensearch.internal
*/
public class Key implements Accountable {
class Key implements Accountable, Writeable {
private final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Key.class);

public final CacheEntity entity; // use as identity equality
Expand Down Expand Up @@ -332,6 +333,13 @@ public int hashCode() {
result = 31 * result + value.hashCode();
return result;
}

@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalWriteable(entity);
out.writeOptionalString(readerCacheKeyId);
out.writeBytesReference(value);
}
}

private class CleanupKey implements IndexReader.ClosedListener {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common.cache.tier;

import org.opensearch.common.Randomness;
import org.opensearch.common.bytes.ReleasableBytesReference;
import org.opensearch.common.util.BigArrays;
import org.opensearch.common.util.PageCacheRecycler;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.bytes.CompositeBytesReference;
import org.opensearch.core.common.util.ByteArray;
import org.opensearch.test.OpenSearchTestCase;

import java.util.Random;

public class BytesReferenceSerializerTests extends OpenSearchTestCase {
public void testEquality() throws Exception {
BytesReferenceSerializer ser = new BytesReferenceSerializer();
// Test that values are equal before and after serialization, for each implementation of BytesReference.
byte[] bytesValue = new byte[1000];
Random rand = Randomness.get();
rand.nextBytes(bytesValue);

BytesReference ba = new BytesArray(bytesValue);
byte[] serialized = ser.serialize(ba);
assertTrue(ser.equals(ba, serialized));
BytesReference deserialized = ser.deserialize(serialized);
assertEquals(ba, deserialized);

ba = new BytesArray(new byte[] {});
serialized = ser.serialize(ba);
assertTrue(ser.equals(ba, serialized));
deserialized = ser.deserialize(serialized);
assertEquals(ba, deserialized);

BytesReference cbr = CompositeBytesReference.of(new BytesArray(bytesValue), new BytesArray(bytesValue));
serialized = ser.serialize(cbr);
assertTrue(ser.equals(cbr, serialized));
deserialized = ser.deserialize(serialized);
assertEquals(cbr, deserialized);

// We need the PagedBytesReference to be larger than the page size (16 KB) in order to actually create it
byte[] pbrValue = new byte[PageCacheRecycler.PAGE_SIZE_IN_BYTES * 2];
rand.nextBytes(pbrValue);
ByteArray arr = BigArrays.NON_RECYCLING_INSTANCE.newByteArray(pbrValue.length);
arr.set(0L, pbrValue, 0, pbrValue.length);
assert !arr.hasArray();
BytesReference pbr = BytesReference.fromByteArray(arr, pbrValue.length);
serialized = ser.serialize(pbr);
assertTrue(ser.equals(pbr, serialized));
deserialized = ser.deserialize(serialized);
assertEquals(pbr, deserialized);

BytesReference rbr = new ReleasableBytesReference(new BytesArray(bytesValue), ReleasableBytesReference.NO_OP);
serialized = ser.serialize(rbr);
assertTrue(ser.equals(rbr, serialized));
deserialized = ser.deserialize(serialized);
assertEquals(rbr, deserialized);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,46 @@ public void testBasicGetAndPut() throws IOException {
}
for (Map.Entry<String, String> entry : keyValueMap.entrySet()) {
String value = ehCacheDiskCachingTierNew.get(entry.getKey());
assertEquals(entry.getValue(), value);
}
ehCacheDiskCachingTierNew.close();
}
}

public void testBasicGetAndPutBytesReference() throws Exception {
Settings settings = Settings.builder().build();
try (NodeEnvironment env = newNodeEnvironment(settings)) {
EhCacheDiskCachingTier<String, BytesReference> ehCacheDiskCachingTier = new EhCacheDiskCachingTier.Builder<String, BytesReference>()
.setKeyType(String.class)
.setValueType(BytesReference.class)
.setExpireAfterAccess(TimeValue.MAX_VALUE)
.setSettings(settings)
.setThreadPoolAlias("ehcacheTest")
.setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES * 2) // bigger so no evictions happen
.setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache")
.setSettingPrefix(SETTING_PREFIX)
.setKeySerializer(new StringSerializer())
.setValueSerializer(new BytesReferenceSerializer())
.build();
int randomKeys = randomIntBetween(10, 100);
int valueLength = 1000;
Random rand = Randomness.get();
Map<String, BytesReference> keyValueMap = new HashMap<>();
for (int i = 0; i < randomKeys; i++) {
byte[] valueBytes = new byte[valueLength];
rand.nextBytes(valueBytes);
keyValueMap.put(UUID.randomUUID().toString(), new BytesArray(valueBytes));
}
for (Map.Entry<String, BytesReference> entry : keyValueMap.entrySet()) {
ehCacheDiskCachingTier.put(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, BytesReference> entry : keyValueMap.entrySet()) {
BytesReference value = ehCacheDiskCachingTier.get(entry.getKey());
assertEquals(entry.getValue(), value);
}
ehCacheDiskCachingTier.close();
}
}

public void testConcurrentPut() throws Exception {
Settings settings = Settings.builder().build();
try (NodeEnvironment env = newNodeEnvironment(settings)) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
package org.opensearch.indices;

import org.opensearch.common.Randomness;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.index.IndexService;
import org.opensearch.index.shard.IndexShard;
import org.opensearch.test.OpenSearchSingleNodeTestCase;
import org.opensearch.test.OpenSearchTestCase;

import java.nio.ByteBuffer;
import java.util.Random;
import java.util.UUID;

public class IRCKeyWriteableSerializerTests extends OpenSearchSingleNodeTestCase {

public void testSerializer() throws Exception {
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndicesRequestCache irc = new IndicesRequestCache(Settings.EMPTY, indicesService);
IndexService indexService = createIndex("test");
IndexShard indexShard = indexService.getShardOrNull(0);
IndicesService.IndexShardCacheEntity entity = indicesService.new IndexShardCacheEntity(indexShard);
IRCKeyWriteableSerializer ser = new IRCKeyWriteableSerializer(irc);

int NUM_KEYS = 1000;
int[] valueLengths = new int[]{ 1000, 6000 }; // test both branches in equals()
Random rand = Randomness.get();
for (int valueLength: valueLengths) {
for (int i = 0; i < NUM_KEYS; i++) {
IndicesRequestCache.Key key = getRandomIRCKey(valueLength, rand, irc, entity);
byte[] serialized = ser.serialize(key);
assertTrue(ser.equals(key, serialized));
IndicesRequestCache.Key deserialized = ser.deserialize(serialized);
assertTrue(key.equals(deserialized));
}
}
}
private IndicesRequestCache.Key getRandomIRCKey(int valueLength, Random random, IndicesRequestCache irc, IndicesService.IndexShardCacheEntity entity) {
byte[] value = new byte[valueLength];
for (int i = 0; i < valueLength; i++) {
value[i] = (byte) (random.nextInt(126 - 32) + 32);
}
BytesReference keyValue = new BytesArray(value);
return irc.new Key(entity, keyValue, UUID.randomUUID().toString()); // same UUID source as used in real key
}
}

Loading