Merge pull request #68 from netty/modules
Split the repo into multiple modules and make building with Java 11 possible
This commit is contained in:
commit
aaf8e294cc
42
.github/workflows/ci-workflow.yml
vendored
42
.github/workflows/ci-workflow.yml
vendored
@ -1,4 +1,4 @@
|
|||||||
name: CI Build
|
name: Build
|
||||||
|
|
||||||
# Controls when the action will run.
|
# Controls when the action will run.
|
||||||
on:
|
on:
|
||||||
@ -13,14 +13,9 @@ on:
|
|||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
|
||||||
jobs:
|
jobs:
|
||||||
# This workflow contains a single job called "build"
|
java17:
|
||||||
build:
|
|
||||||
# The type of runner that the job will run on
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
|
||||||
steps:
|
steps:
|
||||||
# http://man7.org/linux/man-pages/man1/date.1.html
|
# http://man7.org/linux/man-pages/man1/date.1.html
|
||||||
- name: Create Cache Key
|
- name: Create Cache Key
|
||||||
@ -50,7 +45,7 @@ jobs:
|
|||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
report_paths: '**/target/surefire-reports/TEST-*.xml'
|
report_paths: '**/target/surefire-reports/TEST-*.xml'
|
||||||
commit: ${{ github.event.workflow_run.head_commit.id }}
|
commit: ${{ github.event.workflow_run.head_commit.id }}
|
||||||
check_name: Build test reports
|
check_name: java17 test reports
|
||||||
- name: Upload build artefacts
|
- name: Upload build artefacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
if: ${{ failure() }}
|
if: ${{ failure() }}
|
||||||
@ -60,3 +55,34 @@ jobs:
|
|||||||
# Make room for the docker layer caching to package up layers
|
# Make room for the docker layer caching to package up layers
|
||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
run: rm -fr *
|
run: rm -fr *
|
||||||
|
java11:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up Java 11
|
||||||
|
uses: actions/setup-java@v2
|
||||||
|
with:
|
||||||
|
distribution: 'adopt' # See 'Supported distributions' for available options
|
||||||
|
java-version: '11'
|
||||||
|
- uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ~/.m2/repository
|
||||||
|
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-maven-
|
||||||
|
- name: Maven build
|
||||||
|
run: mvn verify -B -C -T1C -fae
|
||||||
|
- name: Publish Test Report
|
||||||
|
uses: scacap/action-surefire-report@v1.0.9
|
||||||
|
if: ${{ always() }}
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
report_paths: '**/target/surefire-reports/TEST-*.xml'
|
||||||
|
commit: ${{ github.event.workflow_run.head_commit.id }}
|
||||||
|
check_name: java11 test reports
|
||||||
|
- name: Upload build artefacts
|
||||||
|
uses: actions/upload-artifact@v2
|
||||||
|
if: ${{ failure() }}
|
||||||
|
with:
|
||||||
|
name: artifacts
|
||||||
|
path: target/
|
||||||
|
@ -29,8 +29,12 @@ RUN git clone --depth 1 -b master https://github.com/netty/netty.git netty \
|
|||||||
&& rm -fr netty
|
&& rm -fr netty
|
||||||
|
|
||||||
# Prepare our own build
|
# Prepare our own build
|
||||||
|
RUN mkdir buffer-api && mkdir buffer-memseg && mkdir buffer-tests
|
||||||
COPY pom.xml pom.xml
|
COPY pom.xml pom.xml
|
||||||
RUN mvn dependency:go-offline surefire:test checkstyle:check -ntp
|
COPY buffer-api/pom.xml buffer-api/pom.xml
|
||||||
|
COPY buffer-memseg/pom.xml buffer-memseg/pom.xml
|
||||||
|
COPY buffer-tests/pom.xml buffer-tests/pom.xml
|
||||||
|
RUN mvn install dependency:go-offline surefire:test checkstyle:check -ntp
|
||||||
|
|
||||||
# Copy over the project code and run our build
|
# Copy over the project code and run our build
|
||||||
COPY . .
|
COPY . .
|
||||||
|
3
Makefile
3
Makefile
@ -25,6 +25,9 @@ build: image
|
|||||||
docker start -a build-container || (docker cp build-container:/home/build target/container-output && false)
|
docker start -a build-container || (docker cp build-container:/home/build target/container-output && false)
|
||||||
docker wait build-container || (docker cp build-container:/home/build target/container-output && false)
|
docker wait build-container || (docker cp build-container:/home/build target/container-output && false)
|
||||||
docker cp build-container:/home/build/target .
|
docker cp build-container:/home/build/target .
|
||||||
|
docker cp build-container:/home/build/buffer-api/target .
|
||||||
|
docker cp build-container:/home/build/buffer-memseg/target .
|
||||||
|
docker cp build-container:/home/build/buffer-tests/target .
|
||||||
docker rm build-container
|
docker rm build-container
|
||||||
|
|
||||||
rebuild: clean clean-layer-cache build
|
rebuild: clean clean-layer-cache build
|
||||||
|
@ -6,9 +6,9 @@ See the xref:RATIONALE.adoc[RATIONALE] document for more background.
|
|||||||
|
|
||||||
== Building and Testing
|
== Building and Testing
|
||||||
|
|
||||||
Short version: just run `make`.
|
Short version: just run `make` if you want to build on Java 17, otherwise run `mvn install` if you want to build with Java 11, and without support for the `jdk.incubator.foreign` APIs.
|
||||||
|
|
||||||
The project currently relies on snapshot versions of the https://github.com/openjdk/panama-foreign[Panama Foreign] fork of OpenJDK.
|
The project (specifically, the `buffer-memseg` module) currently relies on snapshot versions of the https://github.com/openjdk/panama-foreign[Panama Foreign] fork of OpenJDK.
|
||||||
This allows us to test out the most recent version of the `jdk.incubator.foreign` APIs, but also make building, and local development more involved.
|
This allows us to test out the most recent version of the `jdk.incubator.foreign` APIs, but also make building, and local development more involved.
|
||||||
To simplify things, we have a Docker based build, controlled via a Makefile with the following commands:
|
To simplify things, we have a Docker based build, controlled via a Makefile with the following commands:
|
||||||
|
|
||||||
|
43
buffer-api/pom.xml
Normal file
43
buffer-api/pom.xml
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
~ Copyright 2021 The Netty Project
|
||||||
|
~
|
||||||
|
~ The Netty Project licenses this file to you under the Apache License,
|
||||||
|
~ version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
~ with the License. You may obtain a copy of the License at:
|
||||||
|
~
|
||||||
|
~ https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
~
|
||||||
|
~ Unless required by applicable law or agreed to in writing, software
|
||||||
|
~ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
~ License for the specific language governing permissions and limitations
|
||||||
|
~ under the License.
|
||||||
|
-->
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>io.netty.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-parent</artifactId>
|
||||||
|
<version>0.0.1.Final-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<artifactId>netty-incubator-buffer-api</artifactId>
|
||||||
|
<version>0.0.1.Final-SNAPSHOT</version>
|
||||||
|
<name>Netty/Incubator/Buffer</name>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-common</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-buffer</artifactId>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
</project>
|
@ -15,10 +15,11 @@
|
|||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.internal.Statics;
|
||||||
|
|
||||||
import java.lang.invoke.VarHandle;
|
import java.lang.invoke.VarHandle;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
import static io.netty.buffer.api.internal.Statics.findVarHandle;
|
|
||||||
import static java.lang.invoke.MethodHandles.lookup;
|
import static java.lang.invoke.MethodHandles.lookup;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -36,7 +37,7 @@ import static java.lang.invoke.MethodHandles.lookup;
|
|||||||
* @param <T> The concrete {@link BufferHolder} type.
|
* @param <T> The concrete {@link BufferHolder} type.
|
||||||
*/
|
*/
|
||||||
public abstract class BufferHolder<T extends BufferHolder<T>> implements Rc<T> {
|
public abstract class BufferHolder<T extends BufferHolder<T>> implements Rc<T> {
|
||||||
private static final VarHandle BUF = findVarHandle(lookup(), BufferHolder.class, "buf", Buffer.class);
|
private static final VarHandle BUF = Statics.findVarHandle(lookup(), BufferHolder.class, "buf", Buffer.class);
|
||||||
private Buffer buf;
|
private Buffer buf;
|
||||||
|
|
||||||
/**
|
/**
|
@ -15,7 +15,6 @@
|
|||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api;
|
||||||
|
|
||||||
import java.io.Serial;
|
|
||||||
import java.util.ArrayDeque;
|
import java.util.ArrayDeque;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
@ -176,7 +175,6 @@ abstract class LifecycleTracer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static final class Traceback extends Throwable {
|
private static final class Traceback extends Throwable {
|
||||||
@Serial
|
|
||||||
private static final long serialVersionUID = 941453986194634605L;
|
private static final long serialVersionUID = 941453986194634605L;
|
||||||
|
|
||||||
Traceback(String message) {
|
Traceback(String message) {
|
@ -15,7 +15,7 @@
|
|||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api;
|
||||||
|
|
||||||
import io.netty.buffer.api.memseg.SegmentMemoryManagers;
|
import io.netty.buffer.api.bytebuffer.ByteBufferMemoryManagers;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.IdentityHashMap;
|
import java.util.IdentityHashMap;
|
||||||
@ -24,7 +24,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
final class MemoryManagersOverride {
|
final class MemoryManagersOverride {
|
||||||
private static final MemoryManagers DEFAULT = new SegmentMemoryManagers();
|
private static final MemoryManagers DEFAULT = new ByteBufferMemoryManagers();
|
||||||
private static final AtomicInteger OVERRIDES_AVAILABLE = new AtomicInteger();
|
private static final AtomicInteger OVERRIDES_AVAILABLE = new AtomicInteger();
|
||||||
private static final Map<Thread, MemoryManagers> OVERRIDES = Collections.synchronizedMap(new IdentityHashMap<>());
|
private static final Map<Thread, MemoryManagers> OVERRIDES = Collections.synchronizedMap(new IdentityHashMap<>());
|
||||||
|
|
@ -15,13 +15,14 @@
|
|||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.internal.Statics;
|
||||||
|
|
||||||
import java.lang.invoke.VarHandle;
|
import java.lang.invoke.VarHandle;
|
||||||
|
|
||||||
import static io.netty.buffer.api.internal.Statics.findVarHandle;
|
|
||||||
import static java.lang.invoke.MethodHandles.lookup;
|
import static java.lang.invoke.MethodHandles.lookup;
|
||||||
|
|
||||||
class TransferSend<I extends Rc<I>, T extends Rc<I>> implements Send<I> {
|
class TransferSend<I extends Rc<I>, T extends Rc<I>> implements Send<I> {
|
||||||
private static final VarHandle RECEIVED = findVarHandle(lookup(), TransferSend.class, "received", boolean.class);
|
private static final VarHandle RECEIVED = Statics.findVarHandle(lookup(), TransferSend.class, "received", boolean.class);
|
||||||
private final Owned<T> outgoing;
|
private final Owned<T> outgoing;
|
||||||
private final Drop<T> drop;
|
private final Drop<T> drop;
|
||||||
private final Class<?> concreteType;
|
private final Class<?> concreteType;
|
@ -22,8 +22,9 @@ import io.netty.buffer.ByteBufUtil;
|
|||||||
import io.netty.buffer.DuplicatedByteBuf;
|
import io.netty.buffer.DuplicatedByteBuf;
|
||||||
import io.netty.buffer.SlicedByteBuf;
|
import io.netty.buffer.SlicedByteBuf;
|
||||||
import io.netty.buffer.Unpooled;
|
import io.netty.buffer.Unpooled;
|
||||||
import io.netty.buffer.api.Buffer;
|
|
||||||
import io.netty.buffer.api.BufferAllocator;
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
|
import io.netty.buffer.api.internal.Statics;
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
import io.netty.buffer.api.RcSupport;
|
import io.netty.buffer.api.RcSupport;
|
||||||
import io.netty.util.ByteProcessor;
|
import io.netty.util.ByteProcessor;
|
||||||
import io.netty.util.IllegalReferenceCountException;
|
import io.netty.util.IllegalReferenceCountException;
|
||||||
@ -1514,7 +1515,7 @@ public final class ByteBufAdaptor extends ByteBuf {
|
|||||||
});
|
});
|
||||||
ByteBuffer buffer = bufRef.get();
|
ByteBuffer buffer = bufRef.get();
|
||||||
if (index != readerIndex() || length != readableBytes()) {
|
if (index != readerIndex() || length != readableBytes()) {
|
||||||
buffer = buffer.slice(index - readerIndex(), length);
|
buffer = Statics.bbslice(buffer, index - readerIndex(), length);
|
||||||
}
|
}
|
||||||
return buffer;
|
return buffer;
|
||||||
} else if (writerIndex() <= index && length <= writableBytes()) {
|
} else if (writerIndex() <= index && length <= writableBytes()) {
|
||||||
@ -1530,7 +1531,7 @@ public final class ByteBufAdaptor extends ByteBuf {
|
|||||||
});
|
});
|
||||||
ByteBuffer buffer = bufRef.get();
|
ByteBuffer buffer = bufRef.get();
|
||||||
if (index != writerIndex() || length != writableBytes()) {
|
if (index != writerIndex() || length != writableBytes()) {
|
||||||
buffer = buffer.slice(index - writerIndex(), length);
|
buffer = Statics.bbslice(buffer, index - writerIndex(), length);
|
||||||
}
|
}
|
||||||
return buffer;
|
return buffer;
|
||||||
} else {
|
} else {
|
@ -25,6 +25,7 @@ import java.lang.ref.Cleaner;
|
|||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.nio.ByteOrder;
|
import java.nio.ByteOrder;
|
||||||
|
|
||||||
|
import static io.netty.buffer.api.internal.Statics.bbslice;
|
||||||
import static io.netty.buffer.api.internal.Statics.convert;
|
import static io.netty.buffer.api.internal.Statics.convert;
|
||||||
|
|
||||||
public class ByteBufferMemoryManager implements MemoryManager {
|
public class ByteBufferMemoryManager implements MemoryManager {
|
||||||
@ -84,6 +85,6 @@ public class ByteBufferMemoryManager implements MemoryManager {
|
|||||||
@Override
|
@Override
|
||||||
public Object sliceMemory(Object memory, int offset, int length) {
|
public Object sliceMemory(Object memory, int offset, int length) {
|
||||||
var buffer = (ByteBuffer) memory;
|
var buffer = (ByteBuffer) memory;
|
||||||
return buffer.slice(offset, length);
|
return bbslice(buffer, offset, length);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -34,6 +34,8 @@ import java.nio.ByteBuffer;
|
|||||||
import java.nio.ByteOrder;
|
import java.nio.ByteOrder;
|
||||||
import java.nio.ReadOnlyBufferException;
|
import java.nio.ReadOnlyBufferException;
|
||||||
|
|
||||||
|
import static io.netty.buffer.api.internal.Statics.bbput;
|
||||||
|
import static io.netty.buffer.api.internal.Statics.bbslice;
|
||||||
import static io.netty.buffer.api.internal.Statics.bufferIsClosed;
|
import static io.netty.buffer.api.internal.Statics.bufferIsClosed;
|
||||||
import static io.netty.buffer.api.internal.Statics.bufferIsReadOnly;
|
import static io.netty.buffer.api.internal.Statics.bufferIsReadOnly;
|
||||||
|
|
||||||
@ -64,7 +66,7 @@ class NioBuffer extends RcSupport<Buffer, NioBuffer> implements Buffer, Readable
|
|||||||
super(new MakeInaccisbleOnDrop(new ArcDrop<>(ArcDrop.acquire(parent.unsafeGetDrop()))));
|
super(new MakeInaccisbleOnDrop(new ArcDrop<>(ArcDrop.acquire(parent.unsafeGetDrop()))));
|
||||||
control = parent.control;
|
control = parent.control;
|
||||||
base = parent.base;
|
base = parent.base;
|
||||||
rmem = parent.rmem.slice(0, parent.rmem.capacity()); // Need to slice to get independent byte orders.
|
rmem = bbslice(parent.rmem, 0, parent.rmem.capacity()); // Need to slice to get independent byte orders.
|
||||||
assert parent.wmem == CLOSED_BUFFER;
|
assert parent.wmem == CLOSED_BUFFER;
|
||||||
wmem = CLOSED_BUFFER;
|
wmem = CLOSED_BUFFER;
|
||||||
roff = parent.roff;
|
roff = parent.roff;
|
||||||
@ -193,7 +195,7 @@ class NioBuffer extends RcSupport<Buffer, NioBuffer> implements Buffer, Readable
|
|||||||
if (!isAccessible()) {
|
if (!isAccessible()) {
|
||||||
throw new IllegalStateException("This buffer is closed: " + this + '.');
|
throw new IllegalStateException("This buffer is closed: " + this + '.');
|
||||||
}
|
}
|
||||||
ByteBuffer slice = rmem.slice(offset, length);
|
ByteBuffer slice = bbslice(rmem, offset, length);
|
||||||
ArcDrop<NioBuffer> drop = (ArcDrop<NioBuffer>) unsafeGetDrop();
|
ArcDrop<NioBuffer> drop = (ArcDrop<NioBuffer>) unsafeGetDrop();
|
||||||
drop.increment();
|
drop.increment();
|
||||||
Buffer sliceBuffer = new NioBuffer(base, slice, control, drop)
|
Buffer sliceBuffer = new NioBuffer(base, slice, control, drop)
|
||||||
@ -226,7 +228,7 @@ class NioBuffer extends RcSupport<Buffer, NioBuffer> implements Buffer, Readable
|
|||||||
"srcPos = " + srcPos + ", length = " + length + '.');
|
"srcPos = " + srcPos + ", length = " + length + '.');
|
||||||
}
|
}
|
||||||
dest = dest.duplicate().clear();
|
dest = dest.duplicate().clear();
|
||||||
dest.put(destPos, rmem, srcPos, length);
|
bbput(dest, destPos, rmem, srcPos, length);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -453,12 +455,13 @@ class NioBuffer extends RcSupport<Buffer, NioBuffer> implements Buffer, Readable
|
|||||||
}
|
}
|
||||||
var drop = (ArcDrop<NioBuffer>) unsafeGetDrop();
|
var drop = (ArcDrop<NioBuffer>) unsafeGetDrop();
|
||||||
unsafeSetDrop(new ArcDrop<>(drop));
|
unsafeSetDrop(new ArcDrop<>(drop));
|
||||||
var splitByteBuffer = rmem.slice(0, splitOffset);
|
var splitByteBuffer = bbslice(rmem, 0, splitOffset);
|
||||||
// TODO maybe incrementing the existing ArcDrop is enough; maybe we don't need to wrap it in another ArcDrop.
|
// TODO maybe incrementing the existing ArcDrop is enough; maybe we don't need to wrap it in another ArcDrop.
|
||||||
var splitBuffer = new NioBuffer(base, splitByteBuffer, control, new ArcDrop<>(drop.increment()));
|
var splitBuffer = new NioBuffer(base, splitByteBuffer, control, new ArcDrop<>(drop.increment()));
|
||||||
splitBuffer.woff = Math.min(woff, splitOffset);
|
splitBuffer.woff = Math.min(woff, splitOffset);
|
||||||
splitBuffer.roff = Math.min(roff, splitOffset);
|
splitBuffer.roff = Math.min(roff, splitOffset);
|
||||||
splitBuffer.order(order());
|
ByteOrder order = order();
|
||||||
|
splitBuffer.order(order);
|
||||||
boolean readOnly = readOnly();
|
boolean readOnly = readOnly();
|
||||||
if (readOnly) {
|
if (readOnly) {
|
||||||
splitBuffer.makeReadOnly();
|
splitBuffer.makeReadOnly();
|
||||||
@ -466,12 +469,13 @@ class NioBuffer extends RcSupport<Buffer, NioBuffer> implements Buffer, Readable
|
|||||||
// Note that split, unlike slice, does not deconstify, because data changes in either buffer are not visible
|
// Note that split, unlike slice, does not deconstify, because data changes in either buffer are not visible
|
||||||
// in the other. The split buffers can later deconstify independently if needed.
|
// in the other. The split buffers can later deconstify independently if needed.
|
||||||
splitBuffer.constBuffer = constBuffer;
|
splitBuffer.constBuffer = constBuffer;
|
||||||
rmem = rmem.slice(splitOffset, rmem.capacity() - splitOffset);
|
rmem = bbslice(rmem, splitOffset, rmem.capacity() - splitOffset);
|
||||||
if (!readOnly) {
|
if (!readOnly) {
|
||||||
wmem = rmem;
|
wmem = rmem;
|
||||||
}
|
}
|
||||||
woff = Math.max(woff, splitOffset) - splitOffset;
|
woff = Math.max(woff, splitOffset) - splitOffset;
|
||||||
roff = Math.max(roff, splitOffset) - splitOffset;
|
roff = Math.max(roff, splitOffset) - splitOffset;
|
||||||
|
order(order);
|
||||||
return splitBuffer;
|
return splitBuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -534,7 +538,7 @@ class NioBuffer extends RcSupport<Buffer, NioBuffer> implements Buffer, Readable
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ByteBuffer readableBuffer() {
|
public ByteBuffer readableBuffer() {
|
||||||
return rmem.asReadOnlyBuffer().slice(readerOffset(), readableBytes()).order(order());
|
return bbslice(rmem.asReadOnlyBuffer(), readerOffset(), readableBytes()).order(order());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -564,7 +568,7 @@ class NioBuffer extends RcSupport<Buffer, NioBuffer> implements Buffer, Readable
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ByteBuffer writableBuffer() {
|
public ByteBuffer writableBuffer() {
|
||||||
return wmem.slice(writerOffset(), writableBytes()).order(order());
|
return bbslice(wmem, writerOffset(), writableBytes()).order(order());
|
||||||
}
|
}
|
||||||
// </editor-fold>
|
// </editor-fold>
|
||||||
|
|
@ -17,7 +17,6 @@ package io.netty.buffer.api.internal;
|
|||||||
|
|
||||||
import io.netty.buffer.api.Drop;
|
import io.netty.buffer.api.Drop;
|
||||||
|
|
||||||
import java.io.Serial;
|
|
||||||
import java.lang.ref.Cleaner;
|
import java.lang.ref.Cleaner;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
@ -62,7 +61,6 @@ public final class CleanerDrop<T> implements Drop<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static final class GatedRunner<T> extends AtomicReference<T> implements Runnable {
|
private static final class GatedRunner<T> extends AtomicReference<T> implements Runnable {
|
||||||
@Serial
|
|
||||||
private static final long serialVersionUID = 2685535951915798850L;
|
private static final long serialVersionUID = 2685535951915798850L;
|
||||||
final Drop<T> drop;
|
final Drop<T> drop;
|
||||||
|
|
@ -0,0 +1,170 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 The Netty Project
|
||||||
|
*
|
||||||
|
* The Netty Project licenses this file to you under the Apache License,
|
||||||
|
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at:
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package io.netty.buffer.api.internal;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.Drop;
|
||||||
|
|
||||||
|
import java.lang.invoke.MethodHandle;
|
||||||
|
import java.lang.invoke.MethodHandles;
|
||||||
|
import java.lang.invoke.MethodHandles.Lookup;
|
||||||
|
import java.lang.invoke.MethodType;
|
||||||
|
import java.lang.invoke.VarHandle;
|
||||||
|
import java.lang.ref.Cleaner;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.ByteOrder;
|
||||||
|
import java.util.concurrent.atomic.LongAdder;
|
||||||
|
|
||||||
|
public interface Statics {
|
||||||
|
LongAdder MEM_USAGE_NATIVE = new LongAdder();
|
||||||
|
Cleaner CLEANER = Cleaner.create();
|
||||||
|
Drop<Buffer> NO_OP_DROP = new Drop<Buffer>() {
|
||||||
|
@Override
|
||||||
|
public void drop(Buffer obj) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "NO_OP_DROP";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
MethodHandle BB_SLICE_OFFSETS = getByteBufferSliceOffsetsMethodHandle();
|
||||||
|
MethodHandle BB_PUT_OFFSETS = getByteBufferPutOffsetsMethodHandle();
|
||||||
|
|
||||||
|
static MethodHandle getByteBufferSliceOffsetsMethodHandle() {
|
||||||
|
try {
|
||||||
|
Lookup lookup = MethodHandles.lookup();
|
||||||
|
MethodType type = MethodType.methodType(ByteBuffer.class, int.class, int.class);
|
||||||
|
return lookup.findVirtual(ByteBuffer.class, "slice", type);
|
||||||
|
} catch (Exception ignore) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static MethodHandle getByteBufferPutOffsetsMethodHandle() {
|
||||||
|
try {
|
||||||
|
Lookup lookup = MethodHandles.lookup();
|
||||||
|
MethodType type = MethodType.methodType(ByteBuffer.class, int.class, ByteBuffer.class, int.class, int.class);
|
||||||
|
return lookup.findVirtual(ByteBuffer.class, "put", type);
|
||||||
|
} catch (Exception ignore) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static VarHandle findVarHandle(Lookup lookup, Class<?> recv, String name, Class<?> type) {
|
||||||
|
try {
|
||||||
|
return lookup.findVarHandle(recv, name, type);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new ExceptionInInitializerError(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
static <T, R> Drop<R> convert(Drop<T> drop) {
|
||||||
|
return (Drop<R>) drop;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void copyToViaReverseCursor(Buffer src, int srcPos, Buffer dest, int destPos, int length) {
|
||||||
|
// Iterate in reverse to account for src and dest buffer overlap.
|
||||||
|
var itr = src.openReverseCursor(srcPos + length - 1, length);
|
||||||
|
ByteOrder prevOrder = dest.order();
|
||||||
|
// We read longs in BE, in reverse, so they need to be flipped for writing.
|
||||||
|
dest.order(ByteOrder.LITTLE_ENDIAN);
|
||||||
|
try {
|
||||||
|
while (itr.readLong()) {
|
||||||
|
long val = itr.getLong();
|
||||||
|
length -= Long.BYTES;
|
||||||
|
dest.setLong(destPos + length, val);
|
||||||
|
}
|
||||||
|
while (itr.readByte()) {
|
||||||
|
dest.setByte(destPos + --length, itr.getByte());
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
dest.order(prevOrder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The ByteBuffer slice-with-offset-and-length method is only available from Java 13 and onwards, but we need to
|
||||||
|
* support Java 11.
|
||||||
|
*/
|
||||||
|
static ByteBuffer bbslice(ByteBuffer buffer, int fromOffset, int length) {
|
||||||
|
if (BB_SLICE_OFFSETS != null) {
|
||||||
|
return bbsliceJdk13(buffer, fromOffset, length);
|
||||||
|
}
|
||||||
|
return bbsliceFallback(buffer, fromOffset, length);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ByteBuffer bbsliceJdk13(ByteBuffer buffer, int fromOffset, int length) {
|
||||||
|
try {
|
||||||
|
return (ByteBuffer) BB_SLICE_OFFSETS.invokeExact(buffer, fromOffset, length);
|
||||||
|
} catch (RuntimeException re) {
|
||||||
|
throw re;
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
throw new LinkageError("Unexpected exception from ByteBuffer.slice(int,int).", throwable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ByteBuffer bbsliceFallback(ByteBuffer buffer, int fromOffset, int length) {
|
||||||
|
if (fromOffset < 0) {
|
||||||
|
throw new IndexOutOfBoundsException("The fromOffset must be positive: " + fromOffset + '.');
|
||||||
|
}
|
||||||
|
int newLimit = fromOffset + length;
|
||||||
|
if (newLimit > buffer.capacity()) {
|
||||||
|
throw new IndexOutOfBoundsException(
|
||||||
|
"The limit of " + newLimit + " would be greater than capacity: " + buffer.capacity() + '.');
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return buffer.position(fromOffset).limit(newLimit).slice();
|
||||||
|
} finally {
|
||||||
|
buffer.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The ByteBuffer put-buffer-with-offset-and-length method is not available in Java 11.
|
||||||
|
*/
|
||||||
|
static void bbput(ByteBuffer dest, int destPos, ByteBuffer src, int srcPos, int length) {
|
||||||
|
if (BB_PUT_OFFSETS != null) {
|
||||||
|
bbputJdk16(dest, destPos, src, srcPos, length);
|
||||||
|
} else {
|
||||||
|
bbputFallback(dest, destPos, src, srcPos, length);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void bbputJdk16(ByteBuffer dest, int destPos, ByteBuffer src, int srcPos, int length) {
|
||||||
|
try {
|
||||||
|
@SuppressWarnings("unused") // We need to cast the return type in order to invokeExact.
|
||||||
|
ByteBuffer ignore = (ByteBuffer) BB_PUT_OFFSETS.invokeExact(dest, destPos, src, srcPos, length);
|
||||||
|
} catch (RuntimeException re) {
|
||||||
|
throw re;
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
throw new LinkageError("Unexpected exception from ByteBuffer.put(int,ByteBuffer,int,int).", throwable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void bbputFallback(ByteBuffer dest, int destPos, ByteBuffer src, int srcPos, int length) {
|
||||||
|
dest.position(destPos).put(bbslice(src, srcPos, length));
|
||||||
|
}
|
||||||
|
|
||||||
|
static IllegalStateException bufferIsClosed() {
|
||||||
|
return new IllegalStateException("This buffer is closed.");
|
||||||
|
}
|
||||||
|
|
||||||
|
static IllegalStateException bufferIsReadOnly() {
|
||||||
|
return new IllegalStateException("This buffer is read-only.");
|
||||||
|
}
|
||||||
|
}
|
@ -20,6 +20,8 @@ import io.netty.buffer.api.Buffer;
|
|||||||
import io.netty.buffer.api.MemoryManager;
|
import io.netty.buffer.api.MemoryManager;
|
||||||
import io.netty.util.internal.StringUtil;
|
import io.netty.util.internal.StringUtil;
|
||||||
|
|
||||||
|
import java.lang.invoke.MethodHandles;
|
||||||
|
import java.lang.invoke.VarHandle;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
@ -29,6 +31,7 @@ import static io.netty.buffer.api.pool.PoolChunk.isSubpage;
|
|||||||
import static java.lang.Math.max;
|
import static java.lang.Math.max;
|
||||||
|
|
||||||
class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl {
|
class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl {
|
||||||
|
private static final VarHandle SUBPAGE_ARRAY = MethodHandles.arrayElementVarHandle(PoolSubpage[].class);
|
||||||
enum SizeClass {
|
enum SizeClass {
|
||||||
Small,
|
Small,
|
||||||
Normal
|
Normal
|
||||||
@ -76,9 +79,6 @@ class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl
|
|||||||
|
|
||||||
numSmallSubpagePools = nSubpages;
|
numSmallSubpagePools = nSubpages;
|
||||||
smallSubpagePools = newSubpagePoolArray(numSmallSubpagePools);
|
smallSubpagePools = newSubpagePoolArray(numSmallSubpagePools);
|
||||||
for (int i = 0; i < smallSubpagePools.length; i ++) {
|
|
||||||
smallSubpagePools[i] = newSubpagePoolHead();
|
|
||||||
}
|
|
||||||
|
|
||||||
q100 = new PoolChunkList(this, null, 100, Integer.MAX_VALUE, chunkSize);
|
q100 = new PoolChunkList(this, null, 100, Integer.MAX_VALUE, chunkSize);
|
||||||
q075 = new PoolChunkList(this, q100, 75, 100, chunkSize);
|
q075 = new PoolChunkList(this, q100, 75, 100, chunkSize);
|
||||||
@ -135,7 +135,7 @@ class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl
|
|||||||
* Synchronize on the head. This is needed as {@link PoolChunk#allocateSubpage(int)} and
|
* Synchronize on the head. This is needed as {@link PoolChunk#allocateSubpage(int)} and
|
||||||
* {@link PoolChunk#free(long)} may modify the doubly linked list as well.
|
* {@link PoolChunk#free(long)} may modify the doubly linked list as well.
|
||||||
*/
|
*/
|
||||||
final PoolSubpage head = smallSubpagePools[sizeIdx];
|
PoolSubpage head = findSubpagePoolHead(sizeIdx);
|
||||||
final boolean needsNormalAllocation;
|
final boolean needsNormalAllocation;
|
||||||
synchronized (head) {
|
synchronized (head) {
|
||||||
final PoolSubpage s = head.next;
|
final PoolSubpage s = head.next;
|
||||||
@ -246,7 +246,15 @@ class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl
|
|||||||
}
|
}
|
||||||
|
|
||||||
PoolSubpage findSubpagePoolHead(int sizeIdx) {
|
PoolSubpage findSubpagePoolHead(int sizeIdx) {
|
||||||
return smallSubpagePools[sizeIdx];
|
PoolSubpage head = (PoolSubpage) SUBPAGE_ARRAY.getVolatile(smallSubpagePools, sizeIdx);
|
||||||
|
if (head == null) {
|
||||||
|
head = newSubpagePoolHead();
|
||||||
|
if (!SUBPAGE_ARRAY.compareAndSet(smallSubpagePools, sizeIdx, null, head)) {
|
||||||
|
// We lost the race. Read the winning value.
|
||||||
|
head = (PoolSubpage) SUBPAGE_ARRAY.getVolatile(smallSubpagePools, sizeIdx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return head;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -288,8 +296,9 @@ class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl
|
|||||||
|
|
||||||
private static List<PoolSubpageMetric> subPageMetricList(PoolSubpage[] pages) {
|
private static List<PoolSubpageMetric> subPageMetricList(PoolSubpage[] pages) {
|
||||||
List<PoolSubpageMetric> metrics = new ArrayList<>();
|
List<PoolSubpageMetric> metrics = new ArrayList<>();
|
||||||
for (PoolSubpage head : pages) {
|
for (int i = 0, len = pages.length; i < len; i++) {
|
||||||
if (head.next == head) {
|
PoolSubpage head = (PoolSubpage) SUBPAGE_ARRAY.getVolatile(pages, i);
|
||||||
|
if (head == null || head.next == head) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
PoolSubpage s = head.next;
|
PoolSubpage s = head.next;
|
||||||
@ -311,11 +320,6 @@ class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl
|
|||||||
return allocationsSmall.longValue() + allocsNormal + allocationsHuge.longValue();
|
return allocationsSmall.longValue() + allocsNormal + allocationsHuge.longValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long numTinyAllocations() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long numSmallAllocations() {
|
public long numSmallAllocations() {
|
||||||
return allocationsSmall.longValue();
|
return allocationsSmall.longValue();
|
||||||
@ -335,11 +339,6 @@ class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl
|
|||||||
return deallocs + deallocationsHuge.longValue();
|
return deallocs + deallocationsHuge.longValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long numTinyDeallocations() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long numSmallDeallocations() {
|
public synchronized long numSmallDeallocations() {
|
||||||
return deallocationsSmall;
|
return deallocationsSmall;
|
||||||
@ -442,8 +441,8 @@ class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl
|
|||||||
|
|
||||||
private static void appendPoolSubPages(StringBuilder buf, PoolSubpage[] subpages) {
|
private static void appendPoolSubPages(StringBuilder buf, PoolSubpage[] subpages) {
|
||||||
for (int i = 0; i < subpages.length; i ++) {
|
for (int i = 0; i < subpages.length; i ++) {
|
||||||
PoolSubpage head = subpages[i];
|
PoolSubpage head = (PoolSubpage) SUBPAGE_ARRAY.getVolatile(subpages, i);
|
||||||
if (head.next == head) {
|
if (head == null || head.next == head) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -459,9 +458,12 @@ class PoolArena extends SizeClasses implements PoolArenaMetric, AllocatorControl
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void close() {
|
public void close() {
|
||||||
for (PoolSubpage page : smallSubpagePools) {
|
for (int i = 0, len = smallSubpagePools.length; i < len; i++) {
|
||||||
|
PoolSubpage page = (PoolSubpage) SUBPAGE_ARRAY.getVolatile(smallSubpagePools, i);
|
||||||
|
if (page != null) {
|
||||||
page.destroy();
|
page.destroy();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for (PoolChunkList list : new PoolChunkList[] {qInit, q000, q025, q050, q100}) {
|
for (PoolChunkList list : new PoolChunkList[] {qInit, q000, q025, q050, q100}) {
|
||||||
list.destroy();
|
list.destroy();
|
||||||
}
|
}
|
@ -52,14 +52,6 @@ public interface PoolArenaMetric extends SizeClassesMetric {
|
|||||||
*/
|
*/
|
||||||
long numAllocations();
|
long numAllocations();
|
||||||
|
|
||||||
/**
|
|
||||||
* Return the number of tiny allocations done via the arena.
|
|
||||||
*
|
|
||||||
* @deprecated Tiny allocations have been merged into small allocations.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
long numTinyAllocations();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the number of small allocations done via the arena.
|
* Return the number of small allocations done via the arena.
|
||||||
*/
|
*/
|
||||||
@ -80,14 +72,6 @@ public interface PoolArenaMetric extends SizeClassesMetric {
|
|||||||
*/
|
*/
|
||||||
long numDeallocations();
|
long numDeallocations();
|
||||||
|
|
||||||
/**
|
|
||||||
* Return the number of tiny deallocations done via the arena.
|
|
||||||
*
|
|
||||||
* @deprecated Tiny deallocations have been merged into small deallocations.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
long numTinyDeallocations();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the number of small deallocations done via the arena.
|
* Return the number of small deallocations done via the arena.
|
||||||
*/
|
*/
|
@ -15,12 +15,12 @@
|
|||||||
*/
|
*/
|
||||||
package io.netty.buffer.api.pool;
|
package io.netty.buffer.api.pool;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.internal.CleanerDrop;
|
||||||
import io.netty.buffer.api.AllocatorControl.UntetheredMemory;
|
import io.netty.buffer.api.AllocatorControl.UntetheredMemory;
|
||||||
import io.netty.buffer.api.Buffer;
|
import io.netty.buffer.api.Buffer;
|
||||||
import io.netty.buffer.api.Drop;
|
import io.netty.buffer.api.Drop;
|
||||||
import io.netty.buffer.api.MemoryManager;
|
import io.netty.buffer.api.MemoryManager;
|
||||||
import io.netty.buffer.api.internal.ArcDrop;
|
import io.netty.buffer.api.internal.ArcDrop;
|
||||||
import io.netty.buffer.api.internal.CleanerDrop;
|
|
||||||
import io.netty.buffer.api.internal.Statics;
|
import io.netty.buffer.api.internal.Statics;
|
||||||
|
|
||||||
import java.util.PriorityQueue;
|
import java.util.PriorityQueue;
|
@ -431,8 +431,9 @@ public class PooledBufferAllocator implements BufferAllocator, BufferAllocatorMe
|
|||||||
protected void onRemoval(PoolThreadCache threadCache) {
|
protected void onRemoval(PoolThreadCache threadCache) {
|
||||||
threadCache.free();
|
threadCache.free();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static PoolArena leastUsedArena(PoolArena[] arenas) {
|
static PoolArena leastUsedArena(PoolArena[] arenas) {
|
||||||
if (arenas == null || arenas.length == 0) {
|
if (arenas == null || arenas.length == 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -447,7 +448,6 @@ public class PooledBufferAllocator implements BufferAllocator, BufferAllocatorMe
|
|||||||
|
|
||||||
return minArena;
|
return minArena;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PooledBufferAllocatorMetric metric() {
|
public PooledBufferAllocatorMetric metric() {
|
@ -15,7 +15,7 @@
|
|||||||
*/
|
*/
|
||||||
package io.netty.buffer.api.pool;
|
package io.netty.buffer.api.pool;
|
||||||
|
|
||||||
import static io.netty.buffer.api.pool.PoolThreadCache.*;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SizeClasses requires {@code pageShifts} to be defined prior to inclusion,
|
* SizeClasses requires {@code pageShifts} to be defined prior to inclusion,
|
||||||
@ -79,13 +79,14 @@ import static io.netty.buffer.api.pool.PoolThreadCache.*;
|
|||||||
* ( 76, 24, 22, 1, yes, no, no)
|
* ( 76, 24, 22, 1, yes, no, no)
|
||||||
*/
|
*/
|
||||||
abstract class SizeClasses implements SizeClassesMetric {
|
abstract class SizeClasses implements SizeClassesMetric {
|
||||||
|
private static final ConcurrentHashMap<SizeClassKey, SizeClassValue> CACHE =
|
||||||
|
new ConcurrentHashMap<SizeClassKey, SizeClassValue>();
|
||||||
|
|
||||||
static final int LOG2_QUANTUM = 4;
|
static final int LOG2_QUANTUM = 4;
|
||||||
|
|
||||||
private static final int LOG2_SIZE_CLASS_GROUP = 2;
|
private static final int LOG2_SIZE_CLASS_GROUP = 2;
|
||||||
private static final int LOG2_MAX_LOOKUP_SIZE = 12;
|
private static final int LOG2_MAX_LOOKUP_SIZE = 12;
|
||||||
|
|
||||||
private static final int INDEX_IDX = 0;
|
|
||||||
private static final int LOG2GROUP_IDX = 1;
|
private static final int LOG2GROUP_IDX = 1;
|
||||||
private static final int LOG2DELTA_IDX = 2;
|
private static final int LOG2DELTA_IDX = 2;
|
||||||
private static final int NDELTA_IDX = 3;
|
private static final int NDELTA_IDX = 3;
|
||||||
@ -101,20 +102,17 @@ abstract class SizeClasses implements SizeClassesMetric {
|
|||||||
this.chunkSize = chunkSize;
|
this.chunkSize = chunkSize;
|
||||||
this.directMemoryCacheAlignment = directMemoryCacheAlignment;
|
this.directMemoryCacheAlignment = directMemoryCacheAlignment;
|
||||||
|
|
||||||
int group = log2(chunkSize) + 1 - LOG2_QUANTUM;
|
SizeClassValue value = CACHE.computeIfAbsent(
|
||||||
|
new SizeClassKey(pageSize, pageShifts, chunkSize, directMemoryCacheAlignment),
|
||||||
//generate size classes
|
SizeClassValue::new);
|
||||||
//[index, log2Group, log2Delta, nDelta, isMultiPageSize, isSubPage, log2DeltaLookup]
|
nSizes = value.nSizes;
|
||||||
sizeClasses = new short[group << LOG2_SIZE_CLASS_GROUP][7];
|
nSubpages = value.nSubpages;
|
||||||
nSizes = sizeClasses();
|
nPSizes = value.nPSizes;
|
||||||
|
smallMaxSizeIdx = value.smallMaxSizeIdx;
|
||||||
//generate lookup table
|
lookupMaxSize = value.lookupMaxSize;
|
||||||
sizeIdx2sizeTab = new int[nSizes];
|
pageIdx2sizeTab = value.pageIdx2sizeTab;
|
||||||
pageIdx2sizeTab = new int[nPSizes];
|
sizeIdx2sizeTab = value.sizeIdx2sizeTab;
|
||||||
idx2SizeTab(sizeIdx2sizeTab, pageIdx2sizeTab);
|
size2idxTab = value.size2idxTab;
|
||||||
|
|
||||||
size2idxTab = new int[lookupMaxSize >> LOG2_QUANTUM];
|
|
||||||
size2idxTab(size2idxTab);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final int pageSize;
|
protected final int pageSize;
|
||||||
@ -123,15 +121,11 @@ abstract class SizeClasses implements SizeClassesMetric {
|
|||||||
protected final int directMemoryCacheAlignment;
|
protected final int directMemoryCacheAlignment;
|
||||||
|
|
||||||
final int nSizes;
|
final int nSizes;
|
||||||
int nSubpages;
|
final int nSubpages;
|
||||||
int nPSizes;
|
final int nPSizes;
|
||||||
|
final int smallMaxSizeIdx;
|
||||||
int smallMaxSizeIdx;
|
|
||||||
|
|
||||||
private int lookupMaxSize;
|
|
||||||
|
|
||||||
private final short[][] sizeClasses;
|
|
||||||
|
|
||||||
|
private final int lookupMaxSize;
|
||||||
private final int[] pageIdx2sizeTab;
|
private final int[] pageIdx2sizeTab;
|
||||||
|
|
||||||
// lookup table for sizeIdx <= smallMaxSizeIdx
|
// lookup table for sizeIdx <= smallMaxSizeIdx
|
||||||
@ -141,125 +135,6 @@ abstract class SizeClasses implements SizeClassesMetric {
|
|||||||
// spacing is 1 << LOG2_QUANTUM, so the size of array is lookupMaxclass >> LOG2_QUANTUM
|
// spacing is 1 << LOG2_QUANTUM, so the size of array is lookupMaxclass >> LOG2_QUANTUM
|
||||||
private final int[] size2idxTab;
|
private final int[] size2idxTab;
|
||||||
|
|
||||||
private int sizeClasses() {
|
|
||||||
int normalMaxSize = -1;
|
|
||||||
|
|
||||||
int index = 0;
|
|
||||||
int size = 0;
|
|
||||||
|
|
||||||
int log2Group = LOG2_QUANTUM;
|
|
||||||
int log2Delta = LOG2_QUANTUM;
|
|
||||||
int ndeltaLimit = 1 << LOG2_SIZE_CLASS_GROUP;
|
|
||||||
|
|
||||||
//First small group, nDelta start at 0.
|
|
||||||
//first size class is 1 << LOG2_QUANTUM
|
|
||||||
int nDelta = 0;
|
|
||||||
while (nDelta < ndeltaLimit) {
|
|
||||||
size = sizeClass(index++, log2Group, log2Delta, nDelta++);
|
|
||||||
}
|
|
||||||
log2Group += LOG2_SIZE_CLASS_GROUP;
|
|
||||||
|
|
||||||
//All remaining groups, nDelta start at 1.
|
|
||||||
while (size < chunkSize) {
|
|
||||||
nDelta = 1;
|
|
||||||
|
|
||||||
while (nDelta <= ndeltaLimit && size < chunkSize) {
|
|
||||||
size = sizeClass(index++, log2Group, log2Delta, nDelta++);
|
|
||||||
normalMaxSize = size;
|
|
||||||
}
|
|
||||||
|
|
||||||
log2Group++;
|
|
||||||
log2Delta++;
|
|
||||||
}
|
|
||||||
|
|
||||||
//chunkSize must be normalMaxSize
|
|
||||||
assert chunkSize == normalMaxSize;
|
|
||||||
|
|
||||||
//return number of size index
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
|
|
||||||
//calculate size class
|
|
||||||
private int sizeClass(int index, int log2Group, int log2Delta, int nDelta) {
|
|
||||||
short isMultiPageSize;
|
|
||||||
if (log2Delta >= pageShifts) {
|
|
||||||
isMultiPageSize = yes;
|
|
||||||
} else {
|
|
||||||
int pageSize = 1 << pageShifts;
|
|
||||||
int size = (1 << log2Group) + (1 << log2Delta) * nDelta;
|
|
||||||
|
|
||||||
isMultiPageSize = size == size / pageSize * pageSize? yes : no;
|
|
||||||
}
|
|
||||||
|
|
||||||
int log2Ndelta = nDelta == 0? 0 : log2(nDelta);
|
|
||||||
|
|
||||||
byte remove = 1 << log2Ndelta < nDelta? yes : no;
|
|
||||||
|
|
||||||
int log2Size = log2Delta + log2Ndelta == log2Group? log2Group + 1 : log2Group;
|
|
||||||
if (log2Size == log2Group) {
|
|
||||||
remove = yes;
|
|
||||||
}
|
|
||||||
|
|
||||||
short isSubpage = log2Size < pageShifts + LOG2_SIZE_CLASS_GROUP? yes : no;
|
|
||||||
|
|
||||||
int log2DeltaLookup = log2Size < LOG2_MAX_LOOKUP_SIZE ||
|
|
||||||
log2Size == LOG2_MAX_LOOKUP_SIZE && remove == no
|
|
||||||
? log2Delta : no;
|
|
||||||
|
|
||||||
short[] sz = {
|
|
||||||
(short) index, (short) log2Group, (short) log2Delta,
|
|
||||||
(short) nDelta, isMultiPageSize, isSubpage, (short) log2DeltaLookup
|
|
||||||
};
|
|
||||||
|
|
||||||
sizeClasses[index] = sz;
|
|
||||||
int size = (1 << log2Group) + (nDelta << log2Delta);
|
|
||||||
|
|
||||||
if (sz[PAGESIZE_IDX] == yes) {
|
|
||||||
nPSizes++;
|
|
||||||
}
|
|
||||||
if (sz[SUBPAGE_IDX] == yes) {
|
|
||||||
nSubpages++;
|
|
||||||
smallMaxSizeIdx = index;
|
|
||||||
}
|
|
||||||
if (sz[LOG2_DELTA_LOOKUP_IDX] != no) {
|
|
||||||
lookupMaxSize = size;
|
|
||||||
}
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void idx2SizeTab(int[] sizeIdx2sizeTab, int[] pageIdx2sizeTab) {
|
|
||||||
int pageIdx = 0;
|
|
||||||
|
|
||||||
for (int i = 0; i < nSizes; i++) {
|
|
||||||
short[] sizeClass = sizeClasses[i];
|
|
||||||
int log2Group = sizeClass[LOG2GROUP_IDX];
|
|
||||||
int log2Delta = sizeClass[LOG2DELTA_IDX];
|
|
||||||
int nDelta = sizeClass[NDELTA_IDX];
|
|
||||||
|
|
||||||
int size = (1 << log2Group) + (nDelta << log2Delta);
|
|
||||||
sizeIdx2sizeTab[i] = size;
|
|
||||||
|
|
||||||
if (sizeClass[PAGESIZE_IDX] == yes) {
|
|
||||||
pageIdx2sizeTab[pageIdx++] = size;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void size2idxTab(int[] size2idxTab) {
|
|
||||||
int idx = 0;
|
|
||||||
int size = 0;
|
|
||||||
|
|
||||||
for (int i = 0; size <= lookupMaxSize; i++) {
|
|
||||||
int log2Delta = sizeClasses[i][LOG2DELTA_IDX];
|
|
||||||
int times = 1 << log2Delta - LOG2_QUANTUM;
|
|
||||||
|
|
||||||
while (size <= lookupMaxSize && times-- > 0) {
|
|
||||||
size2idxTab[idx++] = i;
|
|
||||||
size = idx + 1 << LOG2_QUANTUM;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int sizeIdx2size(int sizeIdx) {
|
public int sizeIdx2size(int sizeIdx) {
|
||||||
return sizeIdx2sizeTab[sizeIdx];
|
return sizeIdx2sizeTab[sizeIdx];
|
||||||
@ -318,7 +193,7 @@ abstract class SizeClasses implements SizeClassesMetric {
|
|||||||
return size2idxTab[size - 1 >> LOG2_QUANTUM];
|
return size2idxTab[size - 1 >> LOG2_QUANTUM];
|
||||||
}
|
}
|
||||||
|
|
||||||
int x = log2((size << 1) - 1);
|
int x = PoolThreadCache.log2((size << 1) - 1);
|
||||||
int shift = x < LOG2_SIZE_CLASS_GROUP + LOG2_QUANTUM + 1
|
int shift = x < LOG2_SIZE_CLASS_GROUP + LOG2_QUANTUM + 1
|
||||||
? 0 : x - (LOG2_SIZE_CLASS_GROUP + LOG2_QUANTUM);
|
? 0 : x - (LOG2_SIZE_CLASS_GROUP + LOG2_QUANTUM);
|
||||||
|
|
||||||
@ -350,7 +225,7 @@ abstract class SizeClasses implements SizeClassesMetric {
|
|||||||
return nPSizes;
|
return nPSizes;
|
||||||
}
|
}
|
||||||
|
|
||||||
int x = log2((pageSize << 1) - 1);
|
int x = PoolThreadCache.log2((pageSize << 1) - 1);
|
||||||
|
|
||||||
int shift = x < LOG2_SIZE_CLASS_GROUP + pageShifts
|
int shift = x < LOG2_SIZE_CLASS_GROUP + pageShifts
|
||||||
? 0 : x - (LOG2_SIZE_CLASS_GROUP + pageShifts);
|
? 0 : x - (LOG2_SIZE_CLASS_GROUP + pageShifts);
|
||||||
@ -397,11 +272,207 @@ abstract class SizeClasses implements SizeClassesMetric {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static int normalizeSizeCompute(int size) {
|
private static int normalizeSizeCompute(int size) {
|
||||||
int x = log2((size << 1) - 1);
|
int x = PoolThreadCache.log2((size << 1) - 1);
|
||||||
int log2Delta = x < LOG2_SIZE_CLASS_GROUP + LOG2_QUANTUM + 1
|
int log2Delta = x < LOG2_SIZE_CLASS_GROUP + LOG2_QUANTUM + 1
|
||||||
? LOG2_QUANTUM : x - LOG2_SIZE_CLASS_GROUP - 1;
|
? LOG2_QUANTUM : x - LOG2_SIZE_CLASS_GROUP - 1;
|
||||||
int delta = 1 << log2Delta;
|
int delta = 1 << log2Delta;
|
||||||
int delta_mask = delta - 1;
|
int delta_mask = delta - 1;
|
||||||
return size + delta_mask & ~delta_mask;
|
return size + delta_mask & ~delta_mask;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static final class SizeClassKey {
|
||||||
|
final int pageSize;
|
||||||
|
final int pageShifts;
|
||||||
|
final int chunkSize;
|
||||||
|
final int directMemoryCacheAlignment;
|
||||||
|
|
||||||
|
private SizeClassKey(int pageSize, int pageShifts, int chunkSize, int directMemoryCacheAlignment) {
|
||||||
|
this.pageSize = pageSize;
|
||||||
|
this.pageShifts = pageShifts;
|
||||||
|
this.chunkSize = chunkSize;
|
||||||
|
this.directMemoryCacheAlignment = directMemoryCacheAlignment;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
SizeClassKey that = (SizeClassKey) o;
|
||||||
|
|
||||||
|
if (pageSize != that.pageSize) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (pageShifts != that.pageShifts) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (chunkSize != that.chunkSize) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return directMemoryCacheAlignment == that.directMemoryCacheAlignment;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
int result = pageSize;
|
||||||
|
result = 31 * result + pageShifts;
|
||||||
|
result = 31 * result + chunkSize;
|
||||||
|
result = 31 * result + directMemoryCacheAlignment;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final class SizeClassValue {
|
||||||
|
final SizeClassKey key;
|
||||||
|
final int nSizes;
|
||||||
|
int nSubpages;
|
||||||
|
int nPSizes;
|
||||||
|
int smallMaxSizeIdx;
|
||||||
|
int lookupMaxSize;
|
||||||
|
final short[][] sizeClasses;
|
||||||
|
final int[] pageIdx2sizeTab;
|
||||||
|
final int[] sizeIdx2sizeTab;
|
||||||
|
final int[] size2idxTab;
|
||||||
|
|
||||||
|
SizeClassValue(SizeClassKey key) {
|
||||||
|
this.key = key;
|
||||||
|
int group = PoolThreadCache.log2(key.chunkSize) + 1 - LOG2_QUANTUM;
|
||||||
|
|
||||||
|
//generate size classes
|
||||||
|
//[index, log2Group, log2Delta, nDelta, isMultiPageSize, isSubPage, log2DeltaLookup]
|
||||||
|
sizeClasses = new short[group << LOG2_SIZE_CLASS_GROUP][7];
|
||||||
|
nSizes = sizeClasses();
|
||||||
|
|
||||||
|
//generate lookup table
|
||||||
|
sizeIdx2sizeTab = new int[nSizes];
|
||||||
|
pageIdx2sizeTab = new int[nPSizes];
|
||||||
|
idx2SizeTab(sizeIdx2sizeTab, pageIdx2sizeTab);
|
||||||
|
|
||||||
|
size2idxTab = new int[lookupMaxSize >> LOG2_QUANTUM];
|
||||||
|
size2idxTab(size2idxTab);
|
||||||
|
}
|
||||||
|
|
||||||
|
private int sizeClasses() {
|
||||||
|
int normalMaxSize = -1;
|
||||||
|
|
||||||
|
int index = 0;
|
||||||
|
int size = 0;
|
||||||
|
|
||||||
|
int log2Group = LOG2_QUANTUM;
|
||||||
|
int log2Delta = LOG2_QUANTUM;
|
||||||
|
int ndeltaLimit = 1 << LOG2_SIZE_CLASS_GROUP;
|
||||||
|
|
||||||
|
//First small group, nDelta start at 0.
|
||||||
|
//first size class is 1 << LOG2_QUANTUM
|
||||||
|
int nDelta = 0;
|
||||||
|
while (nDelta < ndeltaLimit) {
|
||||||
|
size = sizeClass(index++, log2Group, log2Delta, nDelta++);
|
||||||
|
}
|
||||||
|
log2Group += LOG2_SIZE_CLASS_GROUP;
|
||||||
|
|
||||||
|
//All remaining groups, nDelta start at 1.
|
||||||
|
while (size < key.chunkSize) {
|
||||||
|
nDelta = 1;
|
||||||
|
|
||||||
|
while (nDelta <= ndeltaLimit && size < key.chunkSize) {
|
||||||
|
size = sizeClass(index++, log2Group, log2Delta, nDelta++);
|
||||||
|
normalMaxSize = size;
|
||||||
|
}
|
||||||
|
|
||||||
|
log2Group++;
|
||||||
|
log2Delta++;
|
||||||
|
}
|
||||||
|
|
||||||
|
//chunkSize must be normalMaxSize
|
||||||
|
assert key.chunkSize == normalMaxSize;
|
||||||
|
|
||||||
|
//return number of size index
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
|
||||||
|
//calculate size class
|
||||||
|
private int sizeClass(int index, int log2Group, int log2Delta, int nDelta) {
|
||||||
|
short isMultiPageSize;
|
||||||
|
if (log2Delta >= key.pageShifts) {
|
||||||
|
isMultiPageSize = yes;
|
||||||
|
} else {
|
||||||
|
int pageSize = 1 << key.pageShifts;
|
||||||
|
int size = (1 << log2Group) + (1 << log2Delta) * nDelta;
|
||||||
|
|
||||||
|
isMultiPageSize = size == size / pageSize * pageSize? yes : no;
|
||||||
|
}
|
||||||
|
|
||||||
|
int log2Ndelta = nDelta == 0? 0 : PoolThreadCache.log2(nDelta);
|
||||||
|
|
||||||
|
byte remove = 1 << log2Ndelta < nDelta? yes : no;
|
||||||
|
|
||||||
|
int log2Size = log2Delta + log2Ndelta == log2Group? log2Group + 1 : log2Group;
|
||||||
|
if (log2Size == log2Group) {
|
||||||
|
remove = yes;
|
||||||
|
}
|
||||||
|
|
||||||
|
short isSubpage = log2Size < key.pageShifts + LOG2_SIZE_CLASS_GROUP? yes : no;
|
||||||
|
|
||||||
|
int log2DeltaLookup = log2Size < LOG2_MAX_LOOKUP_SIZE ||
|
||||||
|
log2Size == LOG2_MAX_LOOKUP_SIZE && remove == no
|
||||||
|
? log2Delta : no;
|
||||||
|
|
||||||
|
short[] sz = {
|
||||||
|
(short) index, (short) log2Group, (short) log2Delta,
|
||||||
|
(short) nDelta, isMultiPageSize, isSubpage, (short) log2DeltaLookup
|
||||||
|
};
|
||||||
|
|
||||||
|
sizeClasses[index] = sz;
|
||||||
|
int size = (1 << log2Group) + (nDelta << log2Delta);
|
||||||
|
|
||||||
|
if (sz[PAGESIZE_IDX] == yes) {
|
||||||
|
nPSizes++;
|
||||||
|
}
|
||||||
|
if (sz[SUBPAGE_IDX] == yes) {
|
||||||
|
nSubpages++;
|
||||||
|
smallMaxSizeIdx = index;
|
||||||
|
}
|
||||||
|
if (sz[LOG2_DELTA_LOOKUP_IDX] != no) {
|
||||||
|
lookupMaxSize = size;
|
||||||
|
}
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void idx2SizeTab(int[] sizeIdx2sizeTab, int[] pageIdx2sizeTab) {
|
||||||
|
int pageIdx = 0;
|
||||||
|
|
||||||
|
for (int i = 0; i < nSizes; i++) {
|
||||||
|
short[] sizeClass = sizeClasses[i];
|
||||||
|
int log2Group = sizeClass[LOG2GROUP_IDX];
|
||||||
|
int log2Delta = sizeClass[LOG2DELTA_IDX];
|
||||||
|
int nDelta = sizeClass[NDELTA_IDX];
|
||||||
|
|
||||||
|
int size = (1 << log2Group) + (nDelta << log2Delta);
|
||||||
|
sizeIdx2sizeTab[i] = size;
|
||||||
|
|
||||||
|
if (sizeClass[PAGESIZE_IDX] == yes) {
|
||||||
|
pageIdx2sizeTab[pageIdx++] = size;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void size2idxTab(int[] size2idxTab) {
|
||||||
|
int idx = 0;
|
||||||
|
int size = 0;
|
||||||
|
|
||||||
|
for (int i = 0; size <= lookupMaxSize; i++) {
|
||||||
|
int log2Delta = sizeClasses[i][LOG2DELTA_IDX];
|
||||||
|
int times = 1 << log2Delta - LOG2_QUANTUM;
|
||||||
|
|
||||||
|
while (size <= lookupMaxSize && times-- > 0) {
|
||||||
|
size2idxTab[idx++] = i;
|
||||||
|
size = idx + 1 << LOG2_QUANTUM;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@ -15,9 +15,9 @@
|
|||||||
*/
|
*/
|
||||||
package io.netty.buffer.api.unsafe;
|
package io.netty.buffer.api.unsafe;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import io.netty.buffer.api.AllocatorControl;
|
import io.netty.buffer.api.AllocatorControl;
|
||||||
import io.netty.buffer.api.Buffer;
|
import io.netty.buffer.api.Buffer;
|
||||||
import io.netty.buffer.api.BufferAllocator;
|
|
||||||
import io.netty.buffer.api.ByteCursor;
|
import io.netty.buffer.api.ByteCursor;
|
||||||
import io.netty.buffer.api.Drop;
|
import io.netty.buffer.api.Drop;
|
||||||
import io.netty.buffer.api.Owned;
|
import io.netty.buffer.api.Owned;
|
||||||
@ -34,6 +34,7 @@ import java.lang.ref.Reference;
|
|||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.nio.ByteOrder;
|
import java.nio.ByteOrder;
|
||||||
|
|
||||||
|
import static io.netty.buffer.api.internal.Statics.bbslice;
|
||||||
import static io.netty.buffer.api.internal.Statics.bufferIsClosed;
|
import static io.netty.buffer.api.internal.Statics.bufferIsClosed;
|
||||||
import static io.netty.buffer.api.internal.Statics.bufferIsReadOnly;
|
import static io.netty.buffer.api.internal.Statics.bufferIsReadOnly;
|
||||||
import static io.netty.util.internal.PlatformDependent.BIG_ENDIAN_NATIVE_ORDER;
|
import static io.netty.util.internal.PlatformDependent.BIG_ENDIAN_NATIVE_ORDER;
|
||||||
@ -596,7 +597,7 @@ class UnsafeBuffer extends RcSupport<Buffer, UnsafeBuffer> implements Buffer, Re
|
|||||||
public ByteBuffer readableBuffer() {
|
public ByteBuffer readableBuffer() {
|
||||||
final ByteBuffer buf;
|
final ByteBuffer buf;
|
||||||
if (hasReadableArray()) {
|
if (hasReadableArray()) {
|
||||||
buf = ByteBuffer.wrap(readableArray()).slice(readableArrayOffset(), readableArrayLength());
|
buf = bbslice(ByteBuffer.wrap(readableArray()), readableArrayOffset(), readableArrayLength());
|
||||||
} else {
|
} else {
|
||||||
buf = PlatformDependent.directBuffer(address + roff, readableBytes());
|
buf = PlatformDependent.directBuffer(address + roff, readableBytes());
|
||||||
}
|
}
|
||||||
@ -640,7 +641,7 @@ class UnsafeBuffer extends RcSupport<Buffer, UnsafeBuffer> implements Buffer, Re
|
|||||||
public ByteBuffer writableBuffer() {
|
public ByteBuffer writableBuffer() {
|
||||||
final ByteBuffer buf;
|
final ByteBuffer buf;
|
||||||
if (hasWritableArray()) {
|
if (hasWritableArray()) {
|
||||||
buf = ByteBuffer.wrap(writableArray()).slice(writableArrayOffset(), writableArrayLength());
|
buf = bbslice(ByteBuffer.wrap(writableArray()), writableArrayOffset(), writableArrayLength());
|
||||||
} else {
|
} else {
|
||||||
buf = PlatformDependent.directBuffer(address + woff, writableBytes());
|
buf = PlatformDependent.directBuffer(address + woff, writableBytes());
|
||||||
}
|
}
|
43
buffer-api/src/main/java/module-info.java
Normal file
43
buffer-api/src/main/java/module-info.java
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2021 The Netty Project
|
||||||
|
*
|
||||||
|
* The Netty Project licenses this file to you under the Apache License,
|
||||||
|
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at:
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
import io.netty.buffer.api.MemoryManagers;
|
||||||
|
import io.netty.buffer.api.bytebuffer.ByteBufferMemoryManagers;
|
||||||
|
import io.netty.buffer.api.unsafe.UnsafeMemoryManagers;
|
||||||
|
|
||||||
|
module netty.incubator.buffer {
|
||||||
|
requires io.netty.common;
|
||||||
|
requires io.netty.buffer;
|
||||||
|
|
||||||
|
// Optional dependencies, needed for some examples.
|
||||||
|
requires static java.logging;//todo remove
|
||||||
|
|
||||||
|
exports io.netty.buffer.api;
|
||||||
|
exports io.netty.buffer.api.adaptor;
|
||||||
|
|
||||||
|
exports io.netty.buffer.api.internal to
|
||||||
|
netty.incubator.buffer.memseg,
|
||||||
|
netty.incubator.buffer.tests;
|
||||||
|
|
||||||
|
uses MemoryManagers;
|
||||||
|
|
||||||
|
// Permit reflective access to non-public members.
|
||||||
|
// Also means we don't have to make all test methods etc. public for JUnit to access them.
|
||||||
|
opens io.netty.buffer.api;//todo remove
|
||||||
|
|
||||||
|
provides MemoryManagers with
|
||||||
|
ByteBufferMemoryManagers,
|
||||||
|
UnsafeMemoryManagers;
|
||||||
|
}
|
30
buffer-memseg-dummy/pom.xml
Normal file
30
buffer-memseg-dummy/pom.xml
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
~ Copyright 2021 The Netty Project
|
||||||
|
~
|
||||||
|
~ The Netty Project licenses this file to you under the Apache License,
|
||||||
|
~ version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
~ with the License. You may obtain a copy of the License at:
|
||||||
|
~
|
||||||
|
~ https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
~
|
||||||
|
~ Unless required by applicable law or agreed to in writing, software
|
||||||
|
~ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
~ License for the specific language governing permissions and limitations
|
||||||
|
~ under the License.
|
||||||
|
-->
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>io.netty.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-parent</artifactId>
|
||||||
|
<version>0.0.1.Final-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<artifactId>netty-incubator-buffer-memseg-dummy</artifactId>
|
||||||
|
<version>0.0.1.Final-SNAPSHOT</version>
|
||||||
|
</project>
|
19
buffer-memseg-dummy/src/main/java/module-info.java
Normal file
19
buffer-memseg-dummy/src/main/java/module-info.java
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2021 The Netty Project
|
||||||
|
*
|
||||||
|
* The Netty Project licenses this file to you under the Apache License,
|
||||||
|
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at:
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
module netty.incubator.buffer.memseg {
|
||||||
|
// Java 11 compatible stand-in module for the memory segment implementation.
|
||||||
|
// We need this module in order for the tests module to pull in the memseg module.
|
||||||
|
}
|
72
buffer-memseg/pom.xml
Normal file
72
buffer-memseg/pom.xml
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>io.netty.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-parent</artifactId>
|
||||||
|
<version>0.0.1.Final-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<artifactId>netty-incubator-buffer-memseg</artifactId>
|
||||||
|
<version>0.0.1.Final-SNAPSHOT</version>
|
||||||
|
<name>Netty/Incubator/Buffer MemorySegment</name>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-api</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-common</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-buffer</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.openjdk.jmh</groupId>
|
||||||
|
<artifactId>jmh-core</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.openjdk.jmh</groupId>
|
||||||
|
<artifactId>jmh-generator-annprocess</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
<artifactId>junit-jupiter-engine</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<version>3.8.1</version>
|
||||||
|
<configuration>
|
||||||
|
<compilerVersion>${java.version}</compilerVersion>
|
||||||
|
<fork>true</fork>
|
||||||
|
<source>${java.compatibility}</source>
|
||||||
|
<target>${java.compatibility}</target>
|
||||||
|
<release>${java.version}</release>
|
||||||
|
<debug>true</debug>
|
||||||
|
<optimize>true</optimize>
|
||||||
|
<showDeprecation>true</showDeprecation>
|
||||||
|
<showWarnings>true</showWarnings>
|
||||||
|
<compilerArgument>-Xlint:-options</compilerArgument>
|
||||||
|
<meminitial>256m</meminitial>
|
||||||
|
<maxmem>1024m</maxmem>
|
||||||
|
<compilerArgs>
|
||||||
|
<arg>--add-modules</arg>
|
||||||
|
<arg>jdk.incubator.foreign</arg>
|
||||||
|
</compilerArgs>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</project>
|
@ -15,23 +15,22 @@
|
|||||||
*/
|
*/
|
||||||
package io.netty.buffer.api.memseg;
|
package io.netty.buffer.api.memseg;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.internal.ArcDrop;
|
||||||
|
import io.netty.buffer.api.internal.Statics;
|
||||||
import io.netty.buffer.api.AllocatorControl;
|
import io.netty.buffer.api.AllocatorControl;
|
||||||
import io.netty.buffer.api.Buffer;
|
import io.netty.buffer.api.Buffer;
|
||||||
import io.netty.buffer.api.Drop;
|
import io.netty.buffer.api.Drop;
|
||||||
import io.netty.buffer.api.MemoryManager;
|
import io.netty.buffer.api.MemoryManager;
|
||||||
import io.netty.buffer.api.internal.ArcDrop;
|
|
||||||
import jdk.incubator.foreign.MemorySegment;
|
import jdk.incubator.foreign.MemorySegment;
|
||||||
import jdk.incubator.foreign.ResourceScope;
|
import jdk.incubator.foreign.ResourceScope;
|
||||||
|
|
||||||
import java.lang.ref.Cleaner;
|
import java.lang.ref.Cleaner;
|
||||||
|
|
||||||
import static io.netty.buffer.api.internal.Statics.convert;
|
|
||||||
|
|
||||||
public abstract class AbstractMemorySegmentManager implements MemoryManager {
|
public abstract class AbstractMemorySegmentManager implements MemoryManager {
|
||||||
@Override
|
@Override
|
||||||
public Buffer allocateShared(AllocatorControl allocatorControl, long size, Drop<Buffer> drop, Cleaner cleaner) {
|
public Buffer allocateShared(AllocatorControl allocatorControl, long size, Drop<Buffer> drop, Cleaner cleaner) {
|
||||||
var segment = createSegment(size, cleaner);
|
var segment = createSegment(size, cleaner);
|
||||||
return new MemSegBuffer(segment, segment, convert(drop), allocatorControl);
|
return new MemSegBuffer(segment, segment, Statics.convert(drop), allocatorControl);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -45,7 +44,7 @@ public abstract class AbstractMemorySegmentManager implements MemoryManager {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Drop<Buffer> drop() {
|
public Drop<Buffer> drop() {
|
||||||
return convert(MemSegBuffer.SEGMENT_CLOSE);
|
return Statics.convert(MemSegBuffer.SEGMENT_CLOSE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -71,7 +70,7 @@ public abstract class AbstractMemorySegmentManager implements MemoryManager {
|
|||||||
@Override
|
@Override
|
||||||
public Buffer recoverMemory(AllocatorControl allocatorControl, Object recoverableMemory, Drop<Buffer> drop) {
|
public Buffer recoverMemory(AllocatorControl allocatorControl, Object recoverableMemory, Drop<Buffer> drop) {
|
||||||
var segment = (MemorySegment) recoverableMemory;
|
var segment = (MemorySegment) recoverableMemory;
|
||||||
return new MemSegBuffer(segment, segment, convert(ArcDrop.acquire(drop)), allocatorControl);
|
return new MemSegBuffer(segment, segment, Statics.convert(ArcDrop.acquire(drop)), allocatorControl);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
@ -17,6 +17,11 @@ package io.netty.buffer.api.memseg;
|
|||||||
|
|
||||||
import io.netty.buffer.ByteBuf;
|
import io.netty.buffer.ByteBuf;
|
||||||
import io.netty.buffer.api.BufferAllocator;
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
|
import io.netty.buffer.api.adaptor.BufferIntegratable;
|
||||||
|
import io.netty.buffer.api.adaptor.ByteBufAdaptor;
|
||||||
|
import io.netty.buffer.api.adaptor.ByteBufAllocatorAdaptor;
|
||||||
|
import io.netty.buffer.api.internal.ArcDrop;
|
||||||
|
import io.netty.buffer.api.internal.Statics;
|
||||||
import io.netty.buffer.api.AllocatorControl;
|
import io.netty.buffer.api.AllocatorControl;
|
||||||
import io.netty.buffer.api.Buffer;
|
import io.netty.buffer.api.Buffer;
|
||||||
import io.netty.buffer.api.ByteCursor;
|
import io.netty.buffer.api.ByteCursor;
|
||||||
@ -27,19 +32,12 @@ import io.netty.buffer.api.WritableComponentProcessor;
|
|||||||
import io.netty.buffer.api.Drop;
|
import io.netty.buffer.api.Drop;
|
||||||
import io.netty.buffer.api.Owned;
|
import io.netty.buffer.api.Owned;
|
||||||
import io.netty.buffer.api.RcSupport;
|
import io.netty.buffer.api.RcSupport;
|
||||||
import io.netty.buffer.api.adaptor.BufferIntegratable;
|
|
||||||
import io.netty.buffer.api.adaptor.ByteBufAdaptor;
|
|
||||||
import io.netty.buffer.api.adaptor.ByteBufAllocatorAdaptor;
|
|
||||||
import io.netty.buffer.api.internal.ArcDrop;
|
|
||||||
import io.netty.buffer.api.internal.Statics;
|
|
||||||
import jdk.incubator.foreign.MemorySegment;
|
import jdk.incubator.foreign.MemorySegment;
|
||||||
import jdk.incubator.foreign.ResourceScope;
|
import jdk.incubator.foreign.ResourceScope;
|
||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.nio.ByteOrder;
|
import java.nio.ByteOrder;
|
||||||
|
|
||||||
import static io.netty.buffer.api.internal.Statics.bufferIsClosed;
|
|
||||||
import static io.netty.buffer.api.internal.Statics.bufferIsReadOnly;
|
|
||||||
import static jdk.incubator.foreign.MemoryAccess.getByteAtOffset;
|
import static jdk.incubator.foreign.MemoryAccess.getByteAtOffset;
|
||||||
import static jdk.incubator.foreign.MemoryAccess.getCharAtOffset;
|
import static jdk.incubator.foreign.MemoryAccess.getCharAtOffset;
|
||||||
import static jdk.incubator.foreign.MemoryAccess.getDoubleAtOffset;
|
import static jdk.incubator.foreign.MemoryAccess.getDoubleAtOffset;
|
||||||
@ -325,7 +323,7 @@ class MemSegBuffer extends RcSupport<Buffer, MemSegBuffer> implements Buffer, Re
|
|||||||
|
|
||||||
private void copyInto(int srcPos, MemorySegment dest, int destPos, int length) {
|
private void copyInto(int srcPos, MemorySegment dest, int destPos, int length) {
|
||||||
if (seg == CLOSED_SEGMENT) {
|
if (seg == CLOSED_SEGMENT) {
|
||||||
throw bufferIsClosed();
|
throw Statics.bufferIsClosed();
|
||||||
}
|
}
|
||||||
if (srcPos < 0) {
|
if (srcPos < 0) {
|
||||||
throw new IllegalArgumentException("The srcPos cannot be negative: " + srcPos + '.');
|
throw new IllegalArgumentException("The srcPos cannot be negative: " + srcPos + '.');
|
||||||
@ -360,7 +358,7 @@ class MemSegBuffer extends RcSupport<Buffer, MemSegBuffer> implements Buffer, Re
|
|||||||
@Override
|
@Override
|
||||||
public ByteCursor openCursor(int fromOffset, int length) {
|
public ByteCursor openCursor(int fromOffset, int length) {
|
||||||
if (seg == CLOSED_SEGMENT) {
|
if (seg == CLOSED_SEGMENT) {
|
||||||
throw bufferIsClosed();
|
throw Statics.bufferIsClosed();
|
||||||
}
|
}
|
||||||
if (fromOffset < 0) {
|
if (fromOffset < 0) {
|
||||||
throw new IllegalArgumentException("The fromOffset cannot be negative: " + fromOffset + '.');
|
throw new IllegalArgumentException("The fromOffset cannot be negative: " + fromOffset + '.');
|
||||||
@ -430,7 +428,7 @@ class MemSegBuffer extends RcSupport<Buffer, MemSegBuffer> implements Buffer, Re
|
|||||||
@Override
|
@Override
|
||||||
public ByteCursor openReverseCursor(int fromOffset, int length) {
|
public ByteCursor openReverseCursor(int fromOffset, int length) {
|
||||||
if (seg == CLOSED_SEGMENT) {
|
if (seg == CLOSED_SEGMENT) {
|
||||||
throw bufferIsClosed();
|
throw Statics.bufferIsClosed();
|
||||||
}
|
}
|
||||||
if (fromOffset < 0) {
|
if (fromOffset < 0) {
|
||||||
throw new IllegalArgumentException("The fromOffset cannot be negative: " + fromOffset + '.');
|
throw new IllegalArgumentException("The fromOffset cannot be negative: " + fromOffset + '.');
|
||||||
@ -508,7 +506,7 @@ class MemSegBuffer extends RcSupport<Buffer, MemSegBuffer> implements Buffer, Re
|
|||||||
throw new IllegalArgumentException("The minimum growth cannot be negative: " + minimumGrowth + '.');
|
throw new IllegalArgumentException("The minimum growth cannot be negative: " + minimumGrowth + '.');
|
||||||
}
|
}
|
||||||
if (seg != wseg) {
|
if (seg != wseg) {
|
||||||
throw bufferIsReadOnly();
|
throw Statics.bufferIsReadOnly();
|
||||||
}
|
}
|
||||||
if (writableBytes() >= size) {
|
if (writableBytes() >= size) {
|
||||||
// We already have enough space.
|
// We already have enough space.
|
||||||
@ -1172,27 +1170,27 @@ class MemSegBuffer extends RcSupport<Buffer, MemSegBuffer> implements Buffer, Re
|
|||||||
|
|
||||||
private RuntimeException checkWriteState(IndexOutOfBoundsException ioobe) {
|
private RuntimeException checkWriteState(IndexOutOfBoundsException ioobe) {
|
||||||
if (seg == CLOSED_SEGMENT) {
|
if (seg == CLOSED_SEGMENT) {
|
||||||
return bufferIsClosed();
|
return Statics.bufferIsClosed();
|
||||||
}
|
}
|
||||||
if (wseg != seg) {
|
if (wseg != seg) {
|
||||||
return bufferIsReadOnly();
|
return Statics.bufferIsReadOnly();
|
||||||
}
|
}
|
||||||
return ioobe;
|
return ioobe;
|
||||||
}
|
}
|
||||||
|
|
||||||
private RuntimeException readAccessCheckException(int index) {
|
private RuntimeException readAccessCheckException(int index) {
|
||||||
if (seg == CLOSED_SEGMENT) {
|
if (seg == CLOSED_SEGMENT) {
|
||||||
throw bufferIsClosed();
|
throw Statics.bufferIsClosed();
|
||||||
}
|
}
|
||||||
return outOfBounds(index);
|
return outOfBounds(index);
|
||||||
}
|
}
|
||||||
|
|
||||||
private RuntimeException writeAccessCheckException(int index) {
|
private RuntimeException writeAccessCheckException(int index) {
|
||||||
if (seg == CLOSED_SEGMENT) {
|
if (seg == CLOSED_SEGMENT) {
|
||||||
throw bufferIsClosed();
|
throw Statics.bufferIsClosed();
|
||||||
}
|
}
|
||||||
if (wseg != seg) {
|
if (wseg != seg) {
|
||||||
return bufferIsReadOnly();
|
return Statics.bufferIsReadOnly();
|
||||||
}
|
}
|
||||||
return outOfBounds(index);
|
return outOfBounds(index);
|
||||||
}
|
}
|
@ -15,18 +15,17 @@
|
|||||||
*/
|
*/
|
||||||
package io.netty.buffer.api.memseg;
|
package io.netty.buffer.api.memseg;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.internal.Statics;
|
||||||
import jdk.incubator.foreign.MemorySegment;
|
import jdk.incubator.foreign.MemorySegment;
|
||||||
import jdk.incubator.foreign.ResourceScope;
|
import jdk.incubator.foreign.ResourceScope;
|
||||||
|
|
||||||
import java.lang.ref.Cleaner;
|
import java.lang.ref.Cleaner;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.atomic.LongAdder;
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
import static jdk.incubator.foreign.ResourceScope.newSharedScope;
|
import static jdk.incubator.foreign.ResourceScope.newSharedScope;
|
||||||
|
|
||||||
public class NativeMemorySegmentManager extends AbstractMemorySegmentManager {
|
public class NativeMemorySegmentManager extends AbstractMemorySegmentManager {
|
||||||
public static final LongAdder MEM_USAGE_NATIVE = new LongAdder();
|
|
||||||
private static final ConcurrentHashMap<Long, Runnable> CLEANUP_ACTIONS = new ConcurrentHashMap<>();
|
private static final ConcurrentHashMap<Long, Runnable> CLEANUP_ACTIONS = new ConcurrentHashMap<>();
|
||||||
private static final Function<Long, Runnable> CLEANUP_ACTION_MAKER = s -> new ReduceNativeMemoryUsage(s);
|
private static final Function<Long, Runnable> CLEANUP_ACTION_MAKER = s -> new ReduceNativeMemoryUsage(s);
|
||||||
|
|
||||||
@ -43,7 +42,7 @@ public class NativeMemorySegmentManager extends AbstractMemorySegmentManager {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
MEM_USAGE_NATIVE.add(-size);
|
Statics.MEM_USAGE_NATIVE.add(-size);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -62,7 +61,7 @@ public class NativeMemorySegmentManager extends AbstractMemorySegmentManager {
|
|||||||
final ResourceScope scope = cleaner == null ? newSharedScope() : newSharedScope(cleaner);
|
final ResourceScope scope = cleaner == null ? newSharedScope() : newSharedScope(cleaner);
|
||||||
scope.addCloseAction(getCleanupAction(size));
|
scope.addCloseAction(getCleanupAction(size));
|
||||||
var segment = MemorySegment.allocateNative(size, scope);
|
var segment = MemorySegment.allocateNative(size, scope);
|
||||||
MEM_USAGE_NATIVE.add(size);
|
Statics.MEM_USAGE_NATIVE.add(size);
|
||||||
return segment;
|
return segment;
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -13,25 +13,22 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
module netty.incubator.buffer {
|
import io.netty.buffer.api.MemoryManagers;
|
||||||
|
import io.netty.buffer.api.memseg.SegmentMemoryManagers;
|
||||||
|
|
||||||
|
module netty.incubator.buffer.memseg {
|
||||||
requires jdk.incubator.foreign;
|
requires jdk.incubator.foreign;
|
||||||
requires io.netty.common;
|
requires io.netty.common;
|
||||||
requires io.netty.buffer;
|
requires io.netty.buffer;
|
||||||
|
requires netty.incubator.buffer;
|
||||||
|
|
||||||
// Optional dependencies, needed for some examples.
|
// Optional dependencies, needed for some examples.
|
||||||
requires static java.logging;
|
requires static java.logging;
|
||||||
|
|
||||||
exports io.netty.buffer.api;
|
|
||||||
exports io.netty.buffer.api.adaptor;
|
|
||||||
|
|
||||||
uses io.netty.buffer.api.MemoryManagers;
|
|
||||||
|
|
||||||
// Permit reflective access to non-public members.
|
// Permit reflective access to non-public members.
|
||||||
// Also means we don't have to make all test methods etc. public for JUnit to access them.
|
// Also means we don't have to make all test methods etc. public for JUnit to access them.
|
||||||
opens io.netty.buffer.api;
|
opens io.netty.buffer.api.memseg;
|
||||||
|
|
||||||
provides io.netty.buffer.api.MemoryManagers with
|
provides MemoryManagers with
|
||||||
io.netty.buffer.api.memseg.SegmentMemoryManagers,
|
SegmentMemoryManagers;
|
||||||
io.netty.buffer.api.bytebuffer.ByteBufferMemoryManagers,
|
|
||||||
io.netty.buffer.api.unsafe.UnsafeMemoryManagers;
|
|
||||||
}
|
}
|
@ -13,7 +13,7 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api.benchmarks;
|
package io.netty.buffer.api.memseg.benchmarks;
|
||||||
|
|
||||||
import io.netty.buffer.api.BufferAllocator;
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import io.netty.buffer.api.Buffer;
|
import io.netty.buffer.api.Buffer;
|
@ -13,7 +13,7 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api.benchmarks;
|
package io.netty.buffer.api.memseg.benchmarks;
|
||||||
|
|
||||||
import jdk.incubator.foreign.MemorySegment;
|
import jdk.incubator.foreign.MemorySegment;
|
||||||
import jdk.incubator.foreign.ResourceScope;
|
import jdk.incubator.foreign.ResourceScope;
|
@ -13,10 +13,12 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api.benchmarks;
|
package io.netty.buffer.api.memseg.benchmarks;
|
||||||
|
|
||||||
import io.netty.buffer.api.BufferAllocator;
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
|
import io.netty.buffer.api.memseg.SegmentMemoryManagers;
|
||||||
import io.netty.buffer.api.Buffer;
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.MemoryManagers;
|
||||||
import org.openjdk.jmh.annotations.Benchmark;
|
import org.openjdk.jmh.annotations.Benchmark;
|
||||||
import org.openjdk.jmh.annotations.BenchmarkMode;
|
import org.openjdk.jmh.annotations.BenchmarkMode;
|
||||||
import org.openjdk.jmh.annotations.Fork;
|
import org.openjdk.jmh.annotations.Fork;
|
||||||
@ -41,10 +43,37 @@ import static java.util.concurrent.CompletableFuture.completedFuture;
|
|||||||
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
||||||
@State(Scope.Benchmark)
|
@State(Scope.Benchmark)
|
||||||
public class MemorySegmentClosedByCleanerBenchmark {
|
public class MemorySegmentClosedByCleanerBenchmark {
|
||||||
private static final BufferAllocator heap = BufferAllocator.heap();
|
private static final BufferAllocator heap;
|
||||||
private static final BufferAllocator heapPooled = BufferAllocator.pooledHeap();
|
private static final BufferAllocator heapPooled;
|
||||||
private static final BufferAllocator direct = BufferAllocator.direct();
|
private static final BufferAllocator direct;
|
||||||
private static final BufferAllocator directPooled = BufferAllocator.pooledDirect();
|
private static final BufferAllocator directPooled;
|
||||||
|
|
||||||
|
static {
|
||||||
|
class Allocators {
|
||||||
|
final BufferAllocator heap;
|
||||||
|
final BufferAllocator pooledHeap;
|
||||||
|
final BufferAllocator direct;
|
||||||
|
final BufferAllocator pooledDirect;
|
||||||
|
|
||||||
|
Allocators(BufferAllocator heap, BufferAllocator pooledHeap,
|
||||||
|
BufferAllocator direct, BufferAllocator pooledDirect) {
|
||||||
|
this.heap = heap;
|
||||||
|
this.pooledHeap = pooledHeap;
|
||||||
|
this.direct = direct;
|
||||||
|
this.pooledDirect = pooledDirect;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var allocs = MemoryManagers.using(new SegmentMemoryManagers(), () -> {
|
||||||
|
return new Allocators(BufferAllocator.heap(), BufferAllocator.pooledHeap(),
|
||||||
|
BufferAllocator.direct(), BufferAllocator.pooledDirect());
|
||||||
|
});
|
||||||
|
|
||||||
|
heap = allocs.heap;
|
||||||
|
heapPooled = allocs.pooledHeap;
|
||||||
|
direct = allocs.direct;
|
||||||
|
directPooled = allocs.pooledDirect;
|
||||||
|
}
|
||||||
|
|
||||||
@Param({"heavy", "light"})
|
@Param({"heavy", "light"})
|
||||||
public String workload;
|
public String workload;
|
123
buffer-tests/pom.xml
Normal file
123
buffer-tests/pom.xml
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
~ Copyright 2021 The Netty Project
|
||||||
|
~
|
||||||
|
~ The Netty Project licenses this file to you under the Apache License,
|
||||||
|
~ version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
~ with the License. You may obtain a copy of the License at:
|
||||||
|
~
|
||||||
|
~ https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
~
|
||||||
|
~ Unless required by applicable law or agreed to in writing, software
|
||||||
|
~ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
~ License for the specific language governing permissions and limitations
|
||||||
|
~ under the License.
|
||||||
|
-->
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>io.netty.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-parent</artifactId>
|
||||||
|
<version>0.0.1.Final-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<artifactId>netty-incubator-buffer-tests</artifactId>
|
||||||
|
<version>0.0.1.Final-SNAPSHOT</version>
|
||||||
|
<name>Netty/Incubator/Buffer Tests</name>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-api</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
<artifactId>junit-jupiter-api</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
<artifactId>junit-jupiter-engine</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
<artifactId>junit-jupiter-params</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.junit.vintage</groupId>
|
||||||
|
<artifactId>junit-vintage-engine</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>junit</groupId>
|
||||||
|
<artifactId>junit</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.assertj</groupId>
|
||||||
|
<artifactId>assertj-core</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.mockito</groupId>
|
||||||
|
<artifactId>mockito-core</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-build-common</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-handler</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-codec-http</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.openjdk.jmh</groupId>
|
||||||
|
<artifactId>jmh-core</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.openjdk.jmh</groupId>
|
||||||
|
<artifactId>jmh-generator-annprocess</artifactId>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<profiles>
|
||||||
|
<profile>
|
||||||
|
<id>Java 17 support</id>
|
||||||
|
<activation>
|
||||||
|
<jdk>17</jdk>
|
||||||
|
</activation>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-memseg</artifactId>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
</profile>
|
||||||
|
<profile>
|
||||||
|
<id>Java 11 support for tests</id>
|
||||||
|
<activation>
|
||||||
|
<jdk>!17</jdk>
|
||||||
|
</activation>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-memseg-dummy</artifactId>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
</profile>
|
||||||
|
</profiles>
|
||||||
|
</project>
|
27
buffer-tests/src/main/java/module-info.java
Normal file
27
buffer-tests/src/main/java/module-info.java
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2021 The Netty Project
|
||||||
|
*
|
||||||
|
* The Netty Project licenses this file to you under the Apache License,
|
||||||
|
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at:
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
open module netty.incubator.buffer.tests {
|
||||||
|
requires io.netty.common;
|
||||||
|
requires io.netty.buffer;
|
||||||
|
|
||||||
|
// Optional dependencies, needed for some examples.
|
||||||
|
requires static java.logging;
|
||||||
|
|
||||||
|
requires netty.incubator.buffer;
|
||||||
|
// We need to require memseg in order for its implementation to be service loaded.
|
||||||
|
// Just having it on the module path is not enough.
|
||||||
|
requires netty.incubator.buffer.memseg;
|
||||||
|
}
|
@ -13,8 +13,12 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
|
import io.netty.buffer.api.CompositeBuffer;
|
||||||
|
import io.netty.buffer.api.Scope;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
@ -13,8 +13,10 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
@ -13,8 +13,10 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
@ -13,13 +13,17 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
import io.netty.buffer.api.memseg.NativeMemorySegmentManager;
|
import io.netty.buffer.api.MemoryManagers;
|
||||||
|
import io.netty.buffer.api.internal.Statics;
|
||||||
|
import org.junit.jupiter.api.condition.DisabledForJreRange;
|
||||||
|
import org.junit.jupiter.api.condition.JRE;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static io.netty.buffer.api.MemoryManagers.using;
|
import static io.netty.buffer.api.MemoryManagers.using;
|
||||||
@ -36,21 +40,23 @@ public class BufferCleanerTest extends BufferTestSupport {
|
|||||||
Stream.Builder<Fixture> builder = Stream.builder();
|
Stream.Builder<Fixture> builder = Stream.builder();
|
||||||
builder.add(new Fixture(f + "/" + managers, () -> using(managers, f), f.getProperties()));
|
builder.add(new Fixture(f + "/" + managers, () -> using(managers, f), f.getProperties()));
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}).toList();
|
}).collect(Collectors.toList());
|
||||||
return fixtureCombinations(initFixtures).filter(f -> f.isDirect()).toArray(Fixture[]::new);
|
return fixtureCombinations(initFixtures).filter(f -> f.isDirect()).toArray(Fixture[]::new);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Only run this one on JDK 17.
|
||||||
|
@DisabledForJreRange(min = JRE.JAVA_11, max = JRE.JAVA_16)
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@MethodSource("memorySegmentAllocators")
|
@MethodSource("memorySegmentAllocators")
|
||||||
public void bufferMustBeClosedByCleaner(Fixture fixture) throws InterruptedException {
|
public void bufferMustBeClosedByCleaner(Fixture fixture) throws InterruptedException {
|
||||||
var initial = NativeMemorySegmentManager.MEM_USAGE_NATIVE.sum();
|
var initial = Statics.MEM_USAGE_NATIVE.sum();
|
||||||
int allocationSize = 1024;
|
int allocationSize = 1024;
|
||||||
allocateAndForget(fixture, allocationSize);
|
allocateAndForget(fixture, allocationSize);
|
||||||
long sum = 0;
|
long sum = 0;
|
||||||
for (int i = 0; i < 15; i++) {
|
for (int i = 0; i < 15; i++) {
|
||||||
System.gc();
|
System.gc();
|
||||||
System.runFinalization();
|
System.runFinalization();
|
||||||
sum = NativeMemorySegmentManager.MEM_USAGE_NATIVE.sum() - initial;
|
sum = Statics.MEM_USAGE_NATIVE.sum() - initial;
|
||||||
if (sum < allocationSize) {
|
if (sum < allocationSize) {
|
||||||
// The memory must have been cleaned.
|
// The memory must have been cleaned.
|
||||||
return;
|
return;
|
@ -13,8 +13,10 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
@ -13,8 +13,12 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
|
import io.netty.buffer.api.ByteCursor;
|
||||||
|
import io.netty.buffer.api.CompositeBuffer;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
@ -13,8 +13,11 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
|
import io.netty.buffer.api.CompositeBuffer;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
@ -13,8 +13,10 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
@ -13,8 +13,11 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
|
import io.netty.buffer.api.CompositeBuffer;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
@ -13,8 +13,10 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
@ -13,8 +13,10 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
@ -13,8 +13,10 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
@ -13,8 +13,10 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
@ -13,8 +13,10 @@
|
|||||||
* License for the specific language governing permissions and limitations
|
* License for the specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package io.netty.buffer.api;
|
package io.netty.buffer.api.tests;
|
||||||
|
|
||||||
|
import io.netty.buffer.api.Buffer;
|
||||||
|
import io.netty.buffer.api.BufferAllocator;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user