diff --git a/README.md b/README.md
index d073ece..8071d7b 100644
--- a/README.md
+++ b/README.md
@@ -97,12 +97,12 @@ PalDB is available on Maven Central, hence just add the following dependency:
net.soundvibe
paldb
- 2.1.0
+ 2.1.1
```
Scala SBT
```
-libraryDependencies += "net.soundvibe" % "paldb" % "2.1.0"
+libraryDependencies += "net.soundvibe" % "paldb" % "2.1.1"
```
diff --git a/pom.xml b/pom.xml
index cd62f79..c7f8a9c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
net.soundvibe
paldb
- 2.1.0
+ 2.1.1
jar
paldb
Embeddable persistent key-value store
diff --git a/src/main/java/com/linkedin/paldb/api/StoreInitializer.java b/src/main/java/com/linkedin/paldb/api/StoreInitializer.java
index e8ba389..cb27000 100644
--- a/src/main/java/com/linkedin/paldb/api/StoreInitializer.java
+++ b/src/main/java/com/linkedin/paldb/api/StoreInitializer.java
@@ -9,6 +9,8 @@ public interface StoreInitializer extends AutoCloseable {
void put(K key, V value);
+ void remove(K key);
+
@Override
void close();
}
diff --git a/src/main/java/com/linkedin/paldb/api/StoreWriter.java b/src/main/java/com/linkedin/paldb/api/StoreWriter.java
index f892ccd..d966272 100644
--- a/src/main/java/com/linkedin/paldb/api/StoreWriter.java
+++ b/src/main/java/com/linkedin/paldb/api/StoreWriter.java
@@ -81,4 +81,10 @@ default void putAll(K[] keys, V[] values) {
* null
*/
void put(byte[] key, byte[] value);
+
+ /**
+ * Removes key from the store
+ * @param key Key to be removed
+ */
+ void remove(K key);
}
diff --git a/src/main/java/com/linkedin/paldb/impl/ReaderImpl.java b/src/main/java/com/linkedin/paldb/impl/ReaderImpl.java
index 6f054df..e58c470 100644
--- a/src/main/java/com/linkedin/paldb/impl/ReaderImpl.java
+++ b/src/main/java/com/linkedin/paldb/impl/ReaderImpl.java
@@ -16,6 +16,7 @@
import com.linkedin.paldb.api.*;
import com.linkedin.paldb.api.errors.StoreClosed;
+import com.linkedin.paldb.impl.StorageSerialization.RemovedValue;
import com.linkedin.paldb.utils.DataInputOutput;
import org.slf4j.*;
@@ -100,7 +101,8 @@ public V get(K key, V defaultValue) {
try {
byte[] valueBytes = storage.get(serialization.serializeKey(key));
if (valueBytes != null) {
- return serialization.deserializeValue(new DataInputOutput(valueBytes));
+ var value = serialization.deserializeValue(new DataInputOutput(valueBytes));
+ return value == RemovedValue.INSTANCE ? null : value;
} else {
return defaultValue;
}
diff --git a/src/main/java/com/linkedin/paldb/impl/StorageReader.java b/src/main/java/com/linkedin/paldb/impl/StorageReader.java
index d3dd0a0..61330e8 100644
--- a/src/main/java/com/linkedin/paldb/impl/StorageReader.java
+++ b/src/main/java/com/linkedin/paldb/impl/StorageReader.java
@@ -27,6 +27,7 @@
import java.time.Instant;
import java.util.*;
+import static com.linkedin.paldb.impl.StorageSerialization.REMOVED_ID;
import static com.linkedin.paldb.utils.DataInputOutput.*;
@@ -43,6 +44,7 @@ public class StorageReader implements Iterable> {
private final long keyCount;
// Key count for each key length
private final long[] keyCounts;
+ private final long[] actualKeyCounts;
// Slot size for each key length
private final int[] slotSizes;
// Number of slots for each key length
@@ -151,6 +153,7 @@ public class StorageReader implements Iterable> {
indexOffsets = new long[maxKeyLength + 1];
dataOffsets = new long[maxKeyLength + 1];
keyCounts = new long[maxKeyLength + 1];
+ actualKeyCounts = new long[maxKeyLength + 1];
slots = new long[maxKeyLength + 1];
slotSizes = new int[maxKeyLength + 1];
@@ -159,6 +162,7 @@ public class StorageReader implements Iterable> {
int keyLength = dataInputStream.readInt();
keyCounts[keyLength] = dataInputStream.readLong();
+ actualKeyCounts[keyLength] = dataInputStream.readLong();
slots[keyLength] = dataInputStream.readLong();
slotSizes[keyLength] = dataInputStream.readInt();
indexOffsets[keyLength] = dataInputStream.readLong();
@@ -407,8 +411,8 @@ public StorageIterator(boolean value) {
}
private void nextKeyLength() {
- for (int i = currentKeyLength + 1; i < keyCounts.length; i++) {
- long c = keyCounts[i];
+ for (int i = currentKeyLength + 1; i < actualKeyCounts.length; i++) {
+ long c = actualKeyCounts[i];
if (c > 0) {
currentKeyLength = i;
keyLimit += c;
@@ -436,14 +440,15 @@ public FastEntry next() {
}
byte[] key = Arrays.copyOf(currentSlotBuffer, currentKeyLength);
- byte[] value = null;
+ long valueOffset = currentDataOffset + offset;
+ var value = mMapData ? getMMapBytes(valueOffset) : getDiskBytes(valueOffset);
- if (withValue) {
- long valueOffset = currentDataOffset + offset;
- value = mMapData ? getMMapBytes(valueOffset) : getDiskBytes(valueOffset);
+ boolean isRemoved = (value.length > 0 && (((int) value[0] & 0xff) == REMOVED_ID));
+ if (isRemoved) {
+ return next();
}
- entry.set(key, value);
+ entry.set(key, withValue ? value : null);
if (++keyIndex == keyLimit) {
nextKeyLength();
diff --git a/src/main/java/com/linkedin/paldb/impl/StorageSerialization.java b/src/main/java/com/linkedin/paldb/impl/StorageSerialization.java
index 116d2ad..de0b4d0 100644
--- a/src/main/java/com/linkedin/paldb/impl/StorageSerialization.java
+++ b/src/main/java/com/linkedin/paldb/impl/StorageSerialization.java
@@ -118,8 +118,14 @@ boolean isCompressionEnabled() {
// SERIALIZATION
- private static final int NULL_ID = -1;
+ public static class RemovedValue {
+
+ public static final RemovedValue INSTANCE = new RemovedValue();
+
+ }
+
private static final int NULL = 0;
+ public static final int REMOVED_ID = 1;
private static final int BOOLEAN_TRUE = 2;
private static final int BOOLEAN_FALSE = 3;
private static final int INTEGER_MINUS_1 = 4;
@@ -217,6 +223,8 @@ private void serialize(final DataOutput out, final Object obj, boolean compr
if (obj == null) {
out.write(NULL);
+ } else if (obj == RemovedValue.INSTANCE) {
+ out.write(REMOVED_ID);
} else if (serializer != null) {
out.write(compress ? CUSTOM_C : CUSTOM);
var bytes = serializer.write((T) obj);
@@ -694,7 +702,7 @@ private T deserialize(byte[] buf, Serializer serializer) throws IOExcepti
DataInputOutput bs = new DataInputOutput(buf);
Object ret = deserialize(bs, serializer);
if (bs.available() != 0) {
- throw new RuntimeException("bytes left: " + bs.available());
+ throw new IOException("bytes left: " + bs.available());
}
return (T) ret;
@@ -731,6 +739,9 @@ private T deserialize(DataInput is, Serializer serializer) throws IOExcep
break;
case NULL:
break;
+ case REMOVED_ID:
+ ret = RemovedValue.INSTANCE;
+ break;
case BOOLEAN_TRUE:
ret = Boolean.TRUE;
break;
@@ -980,9 +991,7 @@ private T deserialize(DataInput is, Serializer serializer) throws IOExcep
case ARRAY_OBJECT:
ret = deserializeArrayObject(is, serializer);
break;
- case -1:
- throw new EOFException();
-
+ default: throw new EOFException();
}
return (T) ret;
}
@@ -1101,7 +1110,7 @@ private static String[] deserializeStringArray(DataInput is) throws IOException
return ret;
}
- private static final byte[] EMPTY_BYTES = new byte[0];
+ public static final byte[] EMPTY_BYTES = new byte[0];
private static byte[] deserializeByteArray(DataInput is) throws IOException {
int size = LongPacker.unpackInt(is);
diff --git a/src/main/java/com/linkedin/paldb/impl/StorageWriter.java b/src/main/java/com/linkedin/paldb/impl/StorageWriter.java
index a0952b3..0ac4b46 100644
--- a/src/main/java/com/linkedin/paldb/impl/StorageWriter.java
+++ b/src/main/java/com/linkedin/paldb/impl/StorageWriter.java
@@ -26,6 +26,7 @@
import java.text.DecimalFormat;
import java.util.*;
+import static com.linkedin.paldb.impl.StorageSerialization.*;
import static com.linkedin.paldb.utils.DataInputOutput.*;
/**
@@ -58,6 +59,7 @@ public class StorageWriter {
// Number of keys
private long keyCount;
private long[] keyCounts;
+ private long[] actualKeyCounts;
// Number of values
private long valueCount;
// Number of collisions
@@ -87,9 +89,12 @@ public class StorageWriter {
dataLengths = new long[0];
maxOffsetLengths = new int[0];
keyCounts = new long[0];
+ actualKeyCounts = new long[0];
segmentSize = config.getLong(Configuration.MMAP_SEGMENT_SIZE);
duplicatesEnabled = config.getBoolean(Configuration.ALLOW_DUPLICATES);
- }
+ }
+
+ private static final byte[] REMOVED_BYTES = new byte[] {REMOVED_ID};
public void put(byte[] key, byte[] value) throws IOException {
int keyLength = key.length;
@@ -113,6 +118,14 @@ public void put(byte[] key, byte[] value) throws IOException {
// Write offset and record max offset length
int offsetDataLength = LongPacker.packLong(indexStream, dataLength);
maxOffsetLengths[keyLength] = Math.max(offsetDataLength, maxOffsetLengths[keyLength]);
+ byte[] val;
+ if (value == null) {
+ val = REMOVED_BYTES;
+ LongPacker.packInt(indexStream, REMOVED_ID);
+ } else {
+ val = value;
+ LongPacker.packInt(indexStream, 0);
+ }
// Write if data is not the same
if (!sameValue) {
@@ -120,21 +133,22 @@ public void put(byte[] key, byte[] value) throws IOException {
DataOutputStream dataStream = getDataStream(keyLength);
// Write size and value
- int valueSize = LongPacker.packInt(dataStream, value.length);
- dataStream.write(value);
+ int valueSize = LongPacker.packInt(dataStream, val.length);
+ dataStream.write(val);
// Update data length
- dataLengths[keyLength] += valueSize + value.length;
+ dataLengths[keyLength] += valueSize + val.length;
// Update last value
- lastValues[keyLength] = value;
- lastValuesLength[keyLength] = valueSize + value.length;
+ lastValues[keyLength] = val;
+ lastValuesLength[keyLength] = valueSize + val.length;
valueCount++;
}
keyCount++;
keyCounts[keyLength]++;
+ actualKeyCounts[keyLength]++;
}
public void close() throws IOException {
@@ -241,6 +255,7 @@ private void writeMetadata(RandomAccessFile dataOutputStream, BloomFilter bloomF
// Write key count
dataOutputStream.writeLong(keyCounts[i]);
+ dataOutputStream.writeLong(actualKeyCounts[i]);
// Write slot count
long slots = Math.round(keyCounts[i] / loadFactor);
@@ -289,15 +304,15 @@ private MappedByteBuffer[] initIndexBuffers(FileChannel channel, long indexSize)
private File buildIndex(int keyLength, BloomFilter bloomFilter) throws IOException {
long count = keyCounts[keyLength];
+ long duplicateCount = 0L;
long numSlots = Math.round(count / loadFactor);
int offsetLength = maxOffsetLengths[keyLength];
int slotSize = keyLength + offsetLength;
-
// Init index
var indexFile = new File(tempFolder, "index" + keyLength + ".dat");
- try (RandomAccessFile indexAccessFile = new RandomAccessFile(indexFile, "rw")) {
+ try (var indexAccessFile = new RandomAccessFile(indexFile, "rw")) {
indexAccessFile.setLength(numSlots * slotSize);
- try (FileChannel indexChannel = indexAccessFile.getChannel()) {
+ try (var indexChannel = indexAccessFile.getChannel()) {
var indexBuffers = initIndexBuffers(indexChannel, indexAccessFile.length());
// Init reading stream
File tempIndexFile = indexFiles[keyLength];
@@ -313,19 +328,24 @@ private File buildIndex(int keyLength, BloomFilter bloomFilter) throws IOExcepti
// Read offset
long offsetData = LongPacker.unpackLong(tempIndexStream);
-
+ int head = LongPacker.unpackInt(tempIndexStream);
+ boolean isRemoved = head == REMOVED_ID;
// Hash
long hash = Murmur3.hash(keyBuffer);
if (bloomFilter != null) {
bloomFilter.add(keyBuffer);
}
-
boolean collision = false;
for (long probe = 0; probe < count; probe++) {
long slot = ((hash + probe) % numSlots);
getFromBuffers(indexBuffers, slot * slotSize, slotBuffer, slotSize, segmentSize);
long found = LongPacker.unpackLong(slotBuffer, keyLength);
- if (found == 0) {
+ if (found == 0L) {
+
+ if (isRemoved) {
+ duplicateCount++;
+ break;
+ }
// The spot is empty use it
putIntoBuffers(indexBuffers, slot * slotSize, keyBuffer, keyBuffer.length, segmentSize);
int pos = LongPacker.packLong(offsetBuffer, offsetData);
@@ -335,10 +355,13 @@ private File buildIndex(int keyLength, BloomFilter bloomFilter) throws IOExcepti
collision = true;
// Check for duplicates
if (isKey(slotBuffer, keyBuffer)) {
- if (duplicatesEnabled) {
+ if (isRemoved || duplicatesEnabled) {
putIntoBuffers(indexBuffers, slot * slotSize, keyBuffer, keyBuffer.length, segmentSize);
int pos = LongPacker.packLong(offsetBuffer, offsetData);
putIntoBuffers(indexBuffers, (slot * slotSize) + keyBuffer.length, offsetBuffer, pos, segmentSize);
+ duplicateCount++;
+ if (isRemoved) duplicateCount++;
+ break;
} else {
throw new DuplicateKeyException(
String.format("A duplicate key has been found for key bytes %s", Arrays.toString(keyBuffer)));
@@ -359,7 +382,7 @@ private File buildIndex(int keyLength, BloomFilter bloomFilter) throws IOExcepti
} finally {
unmap(indexBuffers);
- if (tempIndexFile.delete()) {
+ if (tempIndexFile.delete()) {
log.info("Temporary index file {} has been deleted", tempIndexFile.getName());
}
}
@@ -368,6 +391,8 @@ private File buildIndex(int keyLength, BloomFilter bloomFilter) throws IOExcepti
Files.deleteIfExists(indexFile.toPath());
throw e;
}
+ keyCount -= duplicateCount;
+ actualKeyCounts[keyLength] -= duplicateCount;
return indexFile;
}
@@ -463,6 +488,7 @@ private DataOutputStream getIndexStream(int keyLength) throws IOException {
indexStreams = copyOfIndexStreams;
indexFiles = Arrays.copyOf(indexFiles, keyLength + 1);
keyCounts = Arrays.copyOf(keyCounts, keyLength + 1);
+ actualKeyCounts = Arrays.copyOf(actualKeyCounts, keyLength + 1);
maxOffsetLengths = Arrays.copyOf(maxOffsetLengths, keyLength + 1);
lastValues = Arrays.copyOf(lastValues, keyLength + 1);
lastValuesLength = Arrays.copyOf(lastValuesLength, keyLength + 1);
diff --git a/src/main/java/com/linkedin/paldb/impl/StoreImpl.java b/src/main/java/com/linkedin/paldb/impl/StoreImpl.java
index 7a411a0..2a1b4ec 100644
--- a/src/main/java/com/linkedin/paldb/impl/StoreImpl.java
+++ b/src/main/java/com/linkedin/paldb/impl/StoreImpl.java
@@ -61,14 +61,7 @@ public static StoreWriter createWriter(File file, Configuration
}
try {
log.info("Initialize writer from file {}", file.getName());
- File parent = file.getParentFile();
- if (parent != null && !parent.exists()) {
- if (parent.mkdirs()) {
- log.info("Creating directories for path {}", file.getName());
- } else {
- throw new IOException(String.format("Couldn't create directory %s", parent));
- }
- }
+ Files.createDirectories(file.isDirectory() ? file.toPath() : file.toPath().toAbsolutePath().getParent());
return new WriterImpl<>(config, file);
} catch (IOException ex) {
throw new UncheckedIOException(ex);
@@ -89,7 +82,7 @@ public static StoreRW createRW(File file, Configuration config
}
log.info("Initialize RW from file {}", file.getName());
try {
- Files.createDirectories(file.isDirectory() ? file.toPath() : file.toPath().getParent());
+ Files.createDirectories(file.isDirectory() ? file.toPath() : file.toPath().toAbsolutePath().getParent());
return new StoreRWImpl<>(config, file);
} catch (IOException e) {
throw new UncheckedIOException(e);
diff --git a/src/main/java/com/linkedin/paldb/impl/StoreRWImpl.java b/src/main/java/com/linkedin/paldb/impl/StoreRWImpl.java
index c1eadac..d9e2f8a 100644
--- a/src/main/java/com/linkedin/paldb/impl/StoreRWImpl.java
+++ b/src/main/java/com/linkedin/paldb/impl/StoreRWImpl.java
@@ -13,6 +13,8 @@
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.*;
+import static java.util.function.Predicate.not;
+
public class StoreRWImpl implements StoreRW {
private static final Logger log = LoggerFactory.getLogger(StoreRWImpl.class);
@@ -88,6 +90,11 @@ public void put(K key, V value) {
writer.put(key, value);
}
+ @Override
+ public void remove(K key) {
+ writer.remove(key);
+ }
+
@Override
public void close() {
writer.close();
@@ -253,9 +260,12 @@ public synchronized CompletableFuture> flushAsync() {
reader.set(new ReaderImpl<>(config, file));
entries.forEach((k, v) -> buffer.computeIfPresent(k, (key, oldValue) -> {
- if (oldValue.equals(v)) return null;
+ if (oldValue.equals(v)) {
+ return null;
+ }
return oldValue;
}));
+
} finally {
rwLock.writeLock().unlock();
}
@@ -280,7 +290,11 @@ public long size() {
checkOpen();
rwLock.readLock().lock();
try {
- return reader.get().size() + buffer.size();
+ return Math.max(0L,
+ reader.get().size() +
+ buffer.values().stream()
+ .filter(not(v -> v == REMOVED))
+ .count());
} finally {
rwLock.readLock().unlock();
}
diff --git a/src/main/java/com/linkedin/paldb/impl/WriterImpl.java b/src/main/java/com/linkedin/paldb/impl/WriterImpl.java
index ba95889..692cc12 100644
--- a/src/main/java/com/linkedin/paldb/impl/WriterImpl.java
+++ b/src/main/java/com/linkedin/paldb/impl/WriterImpl.java
@@ -128,6 +128,20 @@ public void put(byte[] key, byte[] value) {
}
}
+ @Override
+ public void remove(K key) {
+ checkOpen();
+ if (key == null) {
+ throw new NullPointerException();
+ }
+ try {
+ byte[] keyBytes = serialization.serializeKey(key);
+ storage.put(keyBytes, null);
+ } catch (IOException ex) {
+ throw new UncheckedIOException(ex);
+ }
+ }
+
// UTILITIES
private void checkOpen() {
diff --git a/src/main/java/com/linkedin/paldb/utils/LongPacker.java b/src/main/java/com/linkedin/paldb/utils/LongPacker.java
index fd798c1..4fab680 100644
--- a/src/main/java/com/linkedin/paldb/utils/LongPacker.java
+++ b/src/main/java/com/linkedin/paldb/utils/LongPacker.java
@@ -90,9 +90,7 @@ public static int packLong(byte[] ba, long value) {
* @return the long value
* @throws IOException if an error occurs with the stream
*/
- public static long unpackLong(DataInput is)
- throws IOException {
-
+ public static long unpackLong(DataInput is) throws IOException {
long result = 0;
for (int offset = 0; offset < 64; offset += 7) {
long b = is.readUnsignedByte();
diff --git a/src/test/java/com/linkedin/paldb/impl/StoreRWImplTest.java b/src/test/java/com/linkedin/paldb/impl/StoreRWImplTest.java
index 4552321..8d94b83 100644
--- a/src/test/java/com/linkedin/paldb/impl/StoreRWImplTest.java
+++ b/src/test/java/com/linkedin/paldb/impl/StoreRWImplTest.java
@@ -71,6 +71,62 @@ void should_test_basic_operations(@TempDir Path tempDir) throws IOException, Int
}
}
+ @Test
+ void should_put_and_remove_on_init(@TempDir Path tempDir) {
+ var file = tempDir.resolve("test.paldb");
+ try (var sut = PalDB.createRW(file.toFile(), PalDBConfigBuilder.create().build())) {
+
+ try (var init = sut.init()) {
+ init.remove("any");
+ init.put("any", "value");
+ init.put("other", "value2");
+ init.put("other", "value3");
+ init.remove("other");
+ }
+
+ assertEquals(1, sut.size());
+ sut.put("new", "element");
+ assertEquals(2, sut.size());
+
+ assertEquals("value", sut.get("any"));
+ assertEquals("element", sut.get("new"));
+ assertNull(sut.get("other"));
+
+ sut.remove("any");
+ assertEquals(2, sut.size()); //approximate count before flush
+ sut.flush();
+ assertEquals(1, sut.size());
+ assertNull(sut.get("any"));
+ }
+ }
+
+ @Test
+ void should_clear_from_buffer_removed_after_flush(@TempDir Path tempDir) {
+ var file = tempDir.resolve("test.paldb");
+ try (var sut = PalDB.createRW(file.toFile(), PalDBConfigBuilder.create().build())) {
+
+ try (var init = sut.init()) {
+ init.put("any", "value");
+ init.put("other", "value2");
+ init.put("other", "value3");
+ }
+
+ assertEquals(2, sut.size());
+ sut.put("new", "element");
+ assertEquals(3, sut.size());
+
+ sut.remove("any");
+ sut.remove("other");
+
+ sut.flush();
+
+ assertEquals(1, sut.size());
+ assertNull(sut.get("any"));
+ assertNull(sut.get("other"));
+ assertEquals("element", sut.get("new"));
+ }
+ }
+
@Test
void should_put_trigger_compaction_in_background(@TempDir Path tempDir) throws IOException, InterruptedException {
var file = tempDir.resolve("test.paldb");
diff --git a/src/test/java/com/linkedin/paldb/impl/TestStorageSerialization.java b/src/test/java/com/linkedin/paldb/impl/TestStorageSerialization.java
index 5a1335e..2a8db47 100644
--- a/src/test/java/com/linkedin/paldb/impl/TestStorageSerialization.java
+++ b/src/test/java/com/linkedin/paldb/impl/TestStorageSerialization.java
@@ -25,7 +25,7 @@
import java.nio.*;
import static org.junit.jupiter.api.Assertions.*;
-
+@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class)
class TestStorageSerialization {
private StorageSerialization serialization;
@@ -593,6 +593,38 @@ void testMultiDimensionalLongArray() throws IOException {
assertArrayEquals(d, res);
}
+ @Test
+ void should_throw_when_deserializing_and_serializer_is_not_registered() throws IOException {
+ var config = PalDBConfigBuilder.create()
+ .withValueSerializer(new TestStoreReader.PointSerializer())
+ .build();
+ var serialization = new StorageSerialization<>(config);
+ var bytes = serialization.serializeValue(new Point(5, 4));
+ var deserialization = new StorageSerialization<>(new Configuration());
+ assertThrows(MissingSerializer.class, () -> deserialization.deserializeValue(bytes));
+ }
+
+ @Test
+ void should_throw_when_deserializing_and_serializer_is_not_registered_compressed() throws IOException {
+ var config = PalDBConfigBuilder.create()
+ .withEnableCompression(true)
+ .withValueSerializer(new TestStoreReader.PointSerializer())
+ .build();
+ var serialization = new StorageSerialization<>(config);
+ var bytes = serialization.serializeValue(new Point(5, 4));
+ var deserialization = new StorageSerialization<>(PalDBConfigBuilder.create()
+ .withEnableCompression(true)
+ .build());
+ assertThrows(MissingSerializer.class, () -> deserialization.deserializeValue(bytes));
+ }
+
+ @Test
+ void should_serialize_removed_instance() throws IOException {
+ var serialization = new StorageSerialization<>(new Configuration<>());
+ var bytes = serialization.serializeValue(StorageSerialization.RemovedValue.INSTANCE);
+ assertSame(StorageSerialization.RemovedValue.INSTANCE, serialization.deserializeValue(bytes));
+ }
+
// UTILITY
private static int[] generateIntArray(int size) {
diff --git a/src/test/java/com/linkedin/paldb/impl/TestStore.java b/src/test/java/com/linkedin/paldb/impl/TestStore.java
index 4792a06..0edf053 100644
--- a/src/test/java/com/linkedin/paldb/impl/TestStore.java
+++ b/src/test/java/com/linkedin/paldb/impl/TestStore.java
@@ -208,6 +208,70 @@ void testPutSerializedKey() throws IOException {
}
}
+ @Test
+ void testPutNullValue() {
+ try (StoreWriter writer = PalDB.createWriter(storeFile)) {
+ writer.put(1, null);
+ }
+
+ try (StoreReader reader = PalDB.createReader(storeFile)) {
+ assertEquals(1, reader.size());
+ assertNull(reader.get(1));
+ }
+ }
+
+ @Test
+ void testRemoveAfterPut() {
+ try (StoreWriter writer = PalDB.createWriter(storeFile)) {
+ writer.put(1, "foo");
+ writer.remove(1);
+ }
+
+ try (StoreReader reader = PalDB.createReader(storeFile)) {
+ assertEquals(0, reader.size());
+ assertNull(reader.get(1));
+ }
+ }
+
+ @Test
+ void testRemoveArrayAfterPut() {
+ try (StoreWriter writer = PalDB.createWriter(storeFile)) {
+ writer.put(1, new String[] {"foo"});
+ writer.remove(1);
+ }
+
+ try (StoreReader reader = PalDB.createReader(storeFile)) {
+ assertEquals(0, reader.size());
+ assertNull(reader.get(1));
+ }
+ }
+
+ @Test
+ void testRemoveBeforePut() {
+ try (StoreWriter writer = PalDB.createWriter(storeFile)) {
+ writer.remove(1);
+ writer.put(1, "foo");
+ }
+
+ try (StoreReader reader = PalDB.createReader(storeFile)) {
+ assertEquals(1, reader.size());
+ assertEquals("foo", reader.get(1));
+ }
+ }
+
+ @Test
+ void testRemoveArrayBeforePut() {
+ try (StoreWriter writer = PalDB.createWriter(storeFile)) {
+ writer.remove(1);
+ writer.put(1, new String[] {"foo"});
+ }
+
+ try (StoreReader reader = PalDB.createReader(storeFile)) {
+ assertEquals(1, reader.size());
+ assertArrayEquals(new String[] {"foo"}, reader.get(1));
+ }
+ }
+
@Test
void testByteMarkOneKey() throws IOException {
try (FileOutputStream fos = new FileOutputStream(storeFile);
@@ -530,6 +594,7 @@ void should_allow_duplicates() {
try (StoreReader reader = PalDB.createReader(storeFile, config)) {
assertArrayEquals("test".getBytes(), reader.get("foobar"));
assertArrayEquals("test2".getBytes(), reader.get("any data"));
+ assertEquals(2, reader.size());
}
}