Update to java 16

This commit is contained in:
Andrea Cavalli 2021-05-21 00:18:39 +02:00
parent bce04a20a4
commit f39070f2f3
16 changed files with 48 additions and 67 deletions

30
pom.xml
View File

@ -30,8 +30,6 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.release>11</maven.compiler.release>
<maven.compiler.>11</maven.compiler.>
</properties>
<repositories>
@ -55,9 +53,10 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.20</version>
<groupId>io.soabase.record-builder</groupId>
<artifactId>record-builder-core</artifactId>
<version>1.19</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.jetbrains</groupId>
@ -88,21 +87,24 @@
</dependencies>
<build>
<sourceDirectory>src/main/lombok</sourceDirectory>
<testSourceDirectory>src/test/lombok</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<version>3.8.1</version>
<configuration>
<source>16</source>
<target>16</target>
<annotationProcessorPaths>
<annotationProcessorPath>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.20</version>
<groupId>io.soabase.record-builder</groupId>
<artifactId>record-builder-processor</artifactId>
<version>1.19</version>
</annotationProcessorPath>
</annotationProcessorPaths>
<annotationProcessors>
<annotationProcessor>io.soabase.recordbuilder.processor.RecordBuilderProcessor</annotationProcessor>
</annotationProcessors>
</configuration>
</plugin>
</plugins>
@ -118,10 +120,6 @@
<artifactId>maven-resources-plugin</artifactId>
<version>3.0.2</version>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.1</version>

View File

@ -32,14 +32,11 @@ import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.disk.LLLocalDictionary.ReleasableSlice;
import it.unimi.dsi.fastutil.booleans.BooleanArrayList;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import org.jetbrains.annotations.Nullable;
import org.warp.filesponge.DiskMetadata.DiskMetadataSerializer;
import reactor.core.publisher.Flux;
@ -86,8 +83,8 @@ public class DiskCache implements URLsDiskHandler, URLsWriter {
return oldValue;
} else {
return diskMetadataSerializer.serialize(new DiskMetadata(
metadata.getSize(),
BooleanArrayList.wrap(new boolean[DiskMetadata.getBlocksCount(metadata.getSize(), BLOCK_SIZE)])
metadata.size(),
BooleanArrayList.wrap(new boolean[DiskMetadata.getBlocksCount(metadata.size(), BLOCK_SIZE)])
));
}
}, UpdateReturnMode.NOTHING),
@ -118,10 +115,10 @@ public class DiskCache implements URLsDiskHandler, URLsWriter {
@Nullable DiskMetadata result;
if (prevBytes != null) {
DiskMetadata prevMeta = diskMetadataSerializer.deserialize(prevBytes);
if (!prevMeta.getDownloadedBlocks().getBoolean(dataBlock.getId())) {
BooleanArrayList bal = prevMeta.getDownloadedBlocks().clone();
if (!prevMeta.downloadedBlocks().getBoolean(dataBlock.getId())) {
BooleanArrayList bal = prevMeta.downloadedBlocks().clone();
bal.set(dataBlock.getId(), true);
result = new DiskMetadata(prevMeta.getSize(), bal);
result = new DiskMetadata(prevMeta.size(), bal);
} else {
result = prevMeta;
}
@ -144,7 +141,7 @@ public class DiskCache implements URLsDiskHandler, URLsWriter {
public Flux<DataBlock> requestContent(URL url) {
return requestDiskMetadata(url)
.filter(DiskMetadata::isDownloadedFully)
.flatMapMany(meta -> Flux.fromIterable(meta.getDownloadedBlocks())
.flatMapMany(meta -> Flux.fromIterable(meta.downloadedBlocks())
.index()
// Get only downloaded blocks
.filter(Tuple2::getT2)
@ -164,8 +161,8 @@ public class DiskCache implements URLsDiskHandler, URLsWriter {
try {
int blockOffset = getBlockOffset(blockId);
int blockLength = data.readableBytes();
if (blockOffset + blockLength >= meta.getSize()) {
if (blockOffset + blockLength > meta.getSize()) {
if (blockOffset + blockLength >= meta.size()) {
if (blockOffset + blockLength > meta.size()) {
throw new IllegalStateException("Overflowed data size");
}
} else {

View File

@ -29,39 +29,30 @@ import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import lombok.Data;
import lombok.SneakyThrows;
import java.io.IOException;
import org.apache.commons.lang3.SerializationException;
import org.jetbrains.annotations.NotNull;
import org.warp.filesponge.DiskMetadata.DiskMetadataSerializer;
@Data
public class DiskMetadata {
/**
* -1 = unknown size
*/
private final int size;
private final BooleanArrayList downloadedBlocks;
private Boolean downloadedFully;
/**
* size -1 = unknown size
*/
public record DiskMetadata(int size, BooleanArrayList downloadedBlocks) {
public boolean isDownloadedFully() {
if (downloadedFully == null) {
// Ensure blocks count is valid by calling getBlocksCount()
getBlocksCount();
// It's fully downloaded if every block is true
downloadedFully = !this.getDownloadedBlocks().contains(false);
}
return downloadedFully;
boolean downloadedFullyVal;
// Ensure blocks count is valid by calling getBlocksCount()
getBlocksCount();
// It's fully downloaded if every block is true
downloadedFullyVal = !this.downloadedBlocks().contains(false);
return downloadedFullyVal;
}
private int getBlocksCount() {
var expectedBlocksCount = getBlocksCount(size, FileSponge.BLOCK_SIZE);
if (this.getDownloadedBlocks().size() != expectedBlocksCount) {
if (this.downloadedBlocks.size() != expectedBlocksCount) {
throw new IllegalStateException(
"Blocks array length (" + this.getDownloadedBlocks().size()
+ ") != expected blocks count (" + expectedBlocksCount + ")");
"Blocks array length (" + this.downloadedBlocks().size() + ") != expected blocks count ("
+ expectedBlocksCount + ")");
}
return expectedBlocksCount;
}
@ -81,8 +72,7 @@ public class DiskMetadata {
public DiskMetadataSerializer(ByteBufAllocator allocator) {
this.allocator = allocator;
}
@SneakyThrows
@Override
public @NotNull DiskMetadata deserialize(@NotNull ByteBuf serialized) {
try {
@ -95,24 +85,27 @@ public class DiskMetadata {
downloadedBlocks.add(dis.readBoolean());
}
return new DiskMetadata(size, downloadedBlocks);
} catch (IOException e) {
throw new SerializationException(e);
} finally {
serialized.release();
}
}
@SneakyThrows
@Override
public @NotNull ByteBuf serialize(@NotNull DiskMetadata deserialized) {
ByteBuf buffer = allocator.buffer();
try (var bos = new ByteBufOutputStream(buffer)) {
try (var dos = new DataOutputStream(bos)) {
dos.writeInt(deserialized.getSize());
dos.writeInt(deserialized.size());
deserialized.getBlocksCount();
for (boolean downloadedBlock : deserialized.getDownloadedBlocks()) {
for (boolean downloadedBlock : deserialized.downloadedBlocks) {
dos.writeBoolean(downloadedBlock);
}
}
return buffer;
} catch (IOException e) {
throw new SerializationException(e);
}
}

View File

@ -18,7 +18,6 @@
package org.warp.filesponge;
import it.cavallium.dbengine.database.disk.LLLocalDictionary.ReleasableSlice;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;

View File

@ -18,14 +18,8 @@
package org.warp.filesponge;
import lombok.Value;
@Value
public class Metadata {
/**
* -1 = unknown size
*/
int size;
}
/**
* size -1 = unknown size
*/
public record Metadata(int size) {}