Merge pull request #464 from netty/buffer_always_dynamic
Make buffers always dynamic
This commit is contained in:
commit
961fc40b29
@ -32,18 +32,25 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
|
||||
private final SwappedByteBuf swappedBuf;
|
||||
private final ByteOrder order;
|
||||
private final int maxCapacity;
|
||||
|
||||
private int readerIndex;
|
||||
private int writerIndex;
|
||||
private int markedReaderIndex;
|
||||
private int markedWriterIndex;
|
||||
|
||||
protected AbstractByteBuf(ByteOrder endianness) {
|
||||
int refCnt = 1;
|
||||
|
||||
protected AbstractByteBuf(ByteOrder endianness, int maxCapacity) {
|
||||
if (endianness == null) {
|
||||
throw new NullPointerException("endianness");
|
||||
}
|
||||
if (maxCapacity < 0) {
|
||||
throw new IllegalArgumentException("maxCapacity: " + maxCapacity + " (expected: >= 0)");
|
||||
}
|
||||
order = endianness;
|
||||
swappedBuf = new SwappedByteBuf(this);
|
||||
this.maxCapacity = maxCapacity;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -56,6 +63,11 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
return ChannelBufType.BYTE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int maxCapacity() {
|
||||
return maxCapacity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readerIndex() {
|
||||
return readerIndex;
|
||||
@ -149,22 +161,69 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
if (readerIndex != writerIndex) {
|
||||
setBytes(0, this, readerIndex, writerIndex - readerIndex);
|
||||
writerIndex -= readerIndex;
|
||||
markedReaderIndex = Math.max(markedReaderIndex - readerIndex, 0);
|
||||
markedWriterIndex = Math.max(markedWriterIndex - readerIndex, 0);
|
||||
adjustMarkers(readerIndex);
|
||||
readerIndex = 0;
|
||||
} else {
|
||||
markedReaderIndex = Math.max(markedReaderIndex - readerIndex, 0);
|
||||
markedWriterIndex = Math.max(markedWriterIndex - readerIndex, 0);
|
||||
adjustMarkers(readerIndex);
|
||||
writerIndex = readerIndex = 0;
|
||||
}
|
||||
}
|
||||
|
||||
protected void adjustMarkers(int decrement) {
|
||||
markedReaderIndex = Math.max(markedReaderIndex - decrement, 0);
|
||||
markedWriterIndex = Math.max(markedWriterIndex - decrement, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void ensureWritableBytes(int writableBytes) {
|
||||
if (writableBytes > writableBytes()) {
|
||||
throw new IndexOutOfBoundsException("Writable bytes exceeded: Got "
|
||||
+ writableBytes + ", maximum is " + writableBytes());
|
||||
public void ensureWritableBytes(int minWritableBytes) {
|
||||
if (minWritableBytes <= writableBytes()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (minWritableBytes > maxCapacity - writerIndex) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"minWritableBytes(%d) + writerIndex(%d) > maxCapacity(%d)",
|
||||
minWritableBytes, writerIndex, maxCapacity));
|
||||
}
|
||||
|
||||
int minNewCapacity = writerIndex + minWritableBytes;
|
||||
|
||||
if (minNewCapacity > maxCapacity) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"minWritableBytes: %d (exceeds maxCapacity(%d))", minWritableBytes, maxCapacity));
|
||||
}
|
||||
|
||||
// Normalize the current capacity to the power of 2.
|
||||
int newCapacity = calculateNewCapacity(minNewCapacity);
|
||||
|
||||
// Adjust to the new capacity.
|
||||
capacity(newCapacity);
|
||||
}
|
||||
|
||||
private int calculateNewCapacity(int minNewCapacity) {
|
||||
final int threshold = 1048576 * 4; // 4 MiB page
|
||||
|
||||
if (minNewCapacity == threshold) {
|
||||
return minNewCapacity;
|
||||
}
|
||||
|
||||
// If over threshold, do not double but just increase by threshold.
|
||||
if (minNewCapacity > threshold) {
|
||||
int newCapacity = minNewCapacity / threshold * threshold;
|
||||
if (newCapacity > maxCapacity - threshold) {
|
||||
newCapacity = maxCapacity;
|
||||
} else {
|
||||
newCapacity += threshold;
|
||||
}
|
||||
return newCapacity;
|
||||
}
|
||||
|
||||
// Not over threshold. Double up to 4 MiB, starting from 64.
|
||||
int newCapacity = 64;
|
||||
while (newCapacity < minNewCapacity) {
|
||||
newCapacity <<= 1;
|
||||
}
|
||||
return newCapacity;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -411,7 +470,7 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
ByteBuf buf = factory().getBuffer(order(), length);
|
||||
ByteBuf buf = unsafe().newBuffer(length);
|
||||
buf.writeBytes(this, readerIndex, length);
|
||||
readerIndex += length;
|
||||
return buf;
|
||||
@ -499,29 +558,34 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
|
||||
@Override
|
||||
public void writeByte(int value) {
|
||||
ensureWritableBytes(1);
|
||||
setByte(writerIndex ++, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeShort(int value) {
|
||||
ensureWritableBytes(2);
|
||||
setShort(writerIndex, value);
|
||||
writerIndex += 2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeMedium(int value) {
|
||||
ensureWritableBytes(3);
|
||||
setMedium(writerIndex, value);
|
||||
writerIndex += 3;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeInt(int value) {
|
||||
ensureWritableBytes(4);
|
||||
setInt(writerIndex, value);
|
||||
writerIndex += 4;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeLong(long value) {
|
||||
ensureWritableBytes(8);
|
||||
setLong(writerIndex, value);
|
||||
writerIndex += 8;
|
||||
}
|
||||
@ -543,6 +607,7 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
|
||||
@Override
|
||||
public void writeBytes(byte[] src, int srcIndex, int length) {
|
||||
ensureWritableBytes(length);
|
||||
setBytes(writerIndex, src, srcIndex, length);
|
||||
writerIndex += length;
|
||||
}
|
||||
@ -569,6 +634,7 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
|
||||
@Override
|
||||
public void writeBytes(ByteBuf src, int srcIndex, int length) {
|
||||
ensureWritableBytes(length);
|
||||
setBytes(writerIndex, src, srcIndex, length);
|
||||
writerIndex += length;
|
||||
}
|
||||
@ -576,6 +642,7 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
@Override
|
||||
public void writeBytes(ByteBuffer src) {
|
||||
int length = src.remaining();
|
||||
ensureWritableBytes(length);
|
||||
setBytes(writerIndex, src);
|
||||
writerIndex += length;
|
||||
}
|
||||
@ -583,6 +650,7 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
@Override
|
||||
public int writeBytes(InputStream in, int length)
|
||||
throws IOException {
|
||||
ensureWritableBytes(length);
|
||||
int writtenBytes = setBytes(writerIndex, in, length);
|
||||
if (writtenBytes > 0) {
|
||||
writerIndex += writtenBytes;
|
||||
@ -593,6 +661,7 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
@Override
|
||||
public int writeBytes(ScatteringByteChannel in, int length)
|
||||
throws IOException {
|
||||
ensureWritableBytes(length);
|
||||
int writtenBytes = setBytes(writerIndex, in, length);
|
||||
if (writtenBytes > 0) {
|
||||
writerIndex += writtenBytes;
|
||||
@ -633,11 +702,25 @@ public abstract class AbstractByteBuf implements ByteBuf {
|
||||
return copy(readerIndex, readableBytes());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf duplicate() {
|
||||
return new DuplicatedByteBuf(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf slice() {
|
||||
return slice(readerIndex, readableBytes());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf slice(int index, int length) {
|
||||
if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
|
||||
return new SlicedByteBuf(this, index, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer nioBuffer() {
|
||||
return nioBuffer(readerIndex, readableBytes());
|
||||
|
@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
/**
|
||||
* A skeletal implementation of {@link ByteBufFactory}.
|
||||
*/
|
||||
public abstract class AbstractByteBufFactory implements ByteBufFactory {
|
||||
|
||||
private final ByteOrder defaultOrder;
|
||||
|
||||
/**
|
||||
* Creates a new factory whose default {@link ByteOrder} is
|
||||
* {@link ByteOrder#BIG_ENDIAN}.
|
||||
*/
|
||||
protected AbstractByteBufFactory() {
|
||||
this(ByteOrder.BIG_ENDIAN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new factory with the specified default {@link ByteOrder}.
|
||||
*
|
||||
* @param defaultOrder the default {@link ByteOrder} of this factory
|
||||
*/
|
||||
protected AbstractByteBufFactory(ByteOrder defaultOrder) {
|
||||
if (defaultOrder == null) {
|
||||
throw new NullPointerException("defaultOrder");
|
||||
}
|
||||
this.defaultOrder = defaultOrder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf getBuffer(int capacity) {
|
||||
return getBuffer(getDefaultOrder(), capacity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf getBuffer(byte[] array, int offset, int length) {
|
||||
return getBuffer(getDefaultOrder(), array, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteOrder getDefaultOrder() {
|
||||
return defaultOrder;
|
||||
}
|
||||
}
|
@ -230,17 +230,27 @@ import java.nio.charset.UnsupportedCharsetException;
|
||||
*/
|
||||
public interface ByteBuf extends ChannelBuf, Comparable<ByteBuf> {
|
||||
|
||||
/**
|
||||
* Returns the factory which creates a {@link ByteBuf} whose
|
||||
* type and default {@link ByteOrder} are same with this buffer.
|
||||
*/
|
||||
ByteBufFactory factory();
|
||||
|
||||
/**
|
||||
* Returns the number of bytes (octets) this buffer can contain.
|
||||
*/
|
||||
int capacity();
|
||||
|
||||
/**
|
||||
* Adjusts the capacity of this buffer. If the {@code newCapacity} is less than the current
|
||||
* capacity, the content of this buffer is truncated. If the {@code newCapacity} is greater
|
||||
* than the current capacity, the buffer is appended with unspecified data whose length is
|
||||
* {@code (newCapacity - currentCapacity)}.
|
||||
*/
|
||||
void capacity(int newCapacity);
|
||||
|
||||
/**
|
||||
* Returns the maximum allowed capacity of this buffer. If a user attempts to increase the
|
||||
* capacity of this buffer beyond the maximum capacity using {@link #capacity(int)} or
|
||||
* {@link #ensureWritableBytes(int)}, those methods will raise an
|
||||
* {@link IllegalArgumentException}.
|
||||
*/
|
||||
int maxCapacity();
|
||||
|
||||
/**
|
||||
* Returns the <a href="http://en.wikipedia.org/wiki/Endianness">endianness</a>
|
||||
* of this buffer.
|
||||
@ -1776,4 +1786,38 @@ public interface ByteBuf extends ChannelBuf, Comparable<ByteBuf> {
|
||||
*/
|
||||
@Override
|
||||
String toString();
|
||||
|
||||
/**
|
||||
* Returns an object that exposes unsafe expert-only operations which can lead to unspecified
|
||||
* behavior.
|
||||
*/
|
||||
Unsafe unsafe();
|
||||
|
||||
interface Unsafe {
|
||||
/**
|
||||
* Returns the internal NIO buffer that is reused for I/O.
|
||||
*
|
||||
* @throws UnsupportedOperationException if the buffer has no internal NIO buffer
|
||||
*/
|
||||
ByteBuffer nioBuffer();
|
||||
|
||||
/**
|
||||
* Returns a new buffer whose type is identical to the callee.
|
||||
*
|
||||
* @param initialCapacity the initial capacity of the new buffer
|
||||
*/
|
||||
ByteBuf newBuffer(int initialCapacity);
|
||||
|
||||
/**
|
||||
* Increases the reference count of the buffer.
|
||||
*/
|
||||
void acquire();
|
||||
|
||||
/**
|
||||
* Decreases the reference count of the buffer. If decreased to 0, the internal memory
|
||||
* block of the buffer will be deallocated. The result of accessing a freed buffer is
|
||||
* unspecified and can even cause JVM crash.
|
||||
*/
|
||||
void release();
|
||||
}
|
||||
}
|
||||
|
@ -1,104 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
/**
|
||||
* A factory that creates or pools {@link ByteBuf}s.
|
||||
*/
|
||||
public interface ByteBufFactory {
|
||||
|
||||
/**
|
||||
* Returns a {@link ByteBuf} with the specified {@code capacity}.
|
||||
* This method is identical to {@code getBuffer(getDefaultOrder(), capacity)}.
|
||||
*
|
||||
* @param capacity the capacity of the returned {@link ByteBuf}
|
||||
* @return a {@link ByteBuf} with the specified {@code capacity},
|
||||
* whose {@code readerIndex} and {@code writerIndex} are {@code 0}
|
||||
*/
|
||||
ByteBuf getBuffer(int capacity);
|
||||
|
||||
/**
|
||||
* Returns a {@link ByteBuf} with the specified {@code endianness}
|
||||
* and {@code capacity}.
|
||||
*
|
||||
* @param endianness the endianness of the returned {@link ByteBuf}
|
||||
* @param capacity the capacity of the returned {@link ByteBuf}
|
||||
* @return a {@link ByteBuf} with the specified {@code endianness} and
|
||||
* {@code capacity}, whose {@code readerIndex} and {@code writerIndex}
|
||||
* are {@code 0}
|
||||
*/
|
||||
ByteBuf getBuffer(ByteOrder endianness, int capacity);
|
||||
|
||||
/**
|
||||
* Returns a {@link ByteBuf} whose content is equal to the sub-region
|
||||
* of the specified {@code array}. Depending on the factory implementation,
|
||||
* the returned buffer could wrap the {@code array} or create a new copy of
|
||||
* the {@code array}.
|
||||
* This method is identical to {@code getBuffer(getDefaultOrder(), array, offset, length)}.
|
||||
*
|
||||
* @param array the byte array
|
||||
* @param offset the offset of the byte array
|
||||
* @param length the length of the byte array
|
||||
*
|
||||
* @return a {@link ByteBuf} with the specified content,
|
||||
* whose {@code readerIndex} and {@code writerIndex}
|
||||
* are {@code 0} and {@code (length - offset)} respectively
|
||||
*/
|
||||
ByteBuf getBuffer(byte[] array, int offset, int length);
|
||||
|
||||
/**
|
||||
* Returns a {@link ByteBuf} whose content is equal to the sub-region
|
||||
* of the specified {@code array}. Depending on the factory implementation,
|
||||
* the returned buffer could wrap the {@code array} or create a new copy of
|
||||
* the {@code array}.
|
||||
*
|
||||
* @param endianness the endianness of the returned {@link ByteBuf}
|
||||
* @param array the byte array
|
||||
* @param offset the offset of the byte array
|
||||
* @param length the length of the byte array
|
||||
*
|
||||
* @return a {@link ByteBuf} with the specified content,
|
||||
* whose {@code readerIndex} and {@code writerIndex}
|
||||
* are {@code 0} and {@code (length - offset)} respectively
|
||||
*/
|
||||
ByteBuf getBuffer(ByteOrder endianness, byte[] array, int offset, int length);
|
||||
|
||||
/**
|
||||
* Returns a {@link ByteBuf} whose content is equal to the sub-region
|
||||
* of the specified {@code nioBuffer}. Depending on the factory
|
||||
* implementation, the returned buffer could wrap the {@code nioBuffer} or
|
||||
* create a new copy of the {@code nioBuffer}.
|
||||
*
|
||||
* @param nioBuffer the NIO {@link ByteBuffer}
|
||||
*
|
||||
* @return a {@link ByteBuf} with the specified content,
|
||||
* whose {@code readerIndex} and {@code writerIndex}
|
||||
* are {@code 0} and {@code nioBuffer.remaining()} respectively
|
||||
*/
|
||||
ByteBuf getBuffer(ByteBuffer nioBuffer);
|
||||
|
||||
/**
|
||||
* Returns the default endianness of the {@link ByteBuf} which is
|
||||
* returned by {@link #getBuffer(int)}.
|
||||
*
|
||||
* @return the default endianness of the {@link ByteBuf} which is
|
||||
* returned by {@link #getBuffer(int)}
|
||||
*/
|
||||
ByteOrder getDefaultOrder();
|
||||
}
|
@ -15,764 +15,35 @@
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import io.netty.util.internal.DetectionUtil;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.channels.GatheringByteChannel;
|
||||
import java.nio.channels.ScatteringByteChannel;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public interface CompositeByteBuf extends ByteBuf, Iterable<ByteBuf> {
|
||||
|
||||
/**
|
||||
* A virtual buffer which shows multiple buffers as a single merged buffer. It
|
||||
* is recommended to use {@link Unpooled#wrappedBuffer(ByteBuf...)}
|
||||
* instead of calling the constructor explicitly.
|
||||
*/
|
||||
public class CompositeByteBuf extends AbstractByteBuf {
|
||||
void addComponent(ByteBuf buffer);
|
||||
void addComponent(int cIndex, ByteBuf buffer);
|
||||
|
||||
private ByteBuf[] components;
|
||||
private int[] indices;
|
||||
private int lastAccessedComponentId;
|
||||
void addComponents(ByteBuf... buffers);
|
||||
void addComponents(Iterable<ByteBuf> buffers);
|
||||
void addComponents(int cIndex, ByteBuf... buffers);
|
||||
void addComponents(int cIndex, Iterable<ByteBuf> buffers);
|
||||
|
||||
public CompositeByteBuf(ByteOrder endianness, List<ByteBuf> buffers) {
|
||||
super(endianness);
|
||||
setComponents(buffers);
|
||||
}
|
||||
void removeComponent(int cIndex);
|
||||
void removeComponents(int cIndex, int numComponents);
|
||||
|
||||
int numComponents();
|
||||
int maxNumComponents();
|
||||
|
||||
ByteBuf component(int cIndex);
|
||||
ByteBuf componentAtOffset(int offset);
|
||||
|
||||
void discardReadComponents();
|
||||
void consolidate(int cIndex, int numComponents);
|
||||
|
||||
int toComponentIndex(int offset);
|
||||
int toByteIndex(int cIndex);
|
||||
|
||||
/**
|
||||
* Same with {@link #slice(int, int)} except that this method returns a list.
|
||||
*/
|
||||
public List<ByteBuf> decompose(int index, int length) {
|
||||
if (length == 0) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
if (index + length > capacity()) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to decompose - Need "
|
||||
+ (index + length) + ", capacity is " + capacity());
|
||||
}
|
||||
|
||||
int componentId = componentId(index);
|
||||
List<ByteBuf> slice = new ArrayList<ByteBuf>(components.length);
|
||||
|
||||
// The first component
|
||||
ByteBuf first = components[componentId].duplicate();
|
||||
first.readerIndex(index - indices[componentId]);
|
||||
|
||||
ByteBuf buf = first;
|
||||
int bytesToSlice = length;
|
||||
do {
|
||||
int readableBytes = buf.readableBytes();
|
||||
if (bytesToSlice <= readableBytes) {
|
||||
// Last component
|
||||
buf.writerIndex(buf.readerIndex() + bytesToSlice);
|
||||
slice.add(buf);
|
||||
break;
|
||||
} else {
|
||||
// Not the last component
|
||||
slice.add(buf);
|
||||
bytesToSlice -= readableBytes;
|
||||
componentId ++;
|
||||
|
||||
// Fetch the next component.
|
||||
buf = components[componentId].duplicate();
|
||||
}
|
||||
} while (bytesToSlice > 0);
|
||||
|
||||
// Slice all components because only readable bytes are interesting.
|
||||
for (int i = 0; i < slice.size(); i ++) {
|
||||
slice.set(i, slice.get(i).slice());
|
||||
}
|
||||
|
||||
return slice;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup this ChannelBuffer from the list
|
||||
*/
|
||||
private void setComponents(List<ByteBuf> newComponents) {
|
||||
assert !newComponents.isEmpty();
|
||||
|
||||
// Clear the cache.
|
||||
lastAccessedComponentId = 0;
|
||||
|
||||
// Build the component array.
|
||||
components = new ByteBuf[newComponents.size()];
|
||||
for (int i = 0; i < components.length; i ++) {
|
||||
ByteBuf c = newComponents.get(i);
|
||||
if (c.order() != order()) {
|
||||
throw new IllegalArgumentException(
|
||||
"All buffers must have the same endianness.");
|
||||
}
|
||||
|
||||
assert c.readerIndex() == 0;
|
||||
assert c.writerIndex() == c.capacity();
|
||||
|
||||
components[i] = c;
|
||||
}
|
||||
|
||||
// Build the component lookup table.
|
||||
indices = new int[components.length + 1];
|
||||
indices[0] = 0;
|
||||
for (int i = 1; i <= components.length; i ++) {
|
||||
indices[i] = indices[i - 1] + components[i - 1].capacity();
|
||||
}
|
||||
|
||||
// Reset the indexes.
|
||||
setIndex(0, capacity());
|
||||
}
|
||||
|
||||
private CompositeByteBuf(CompositeByteBuf buffer) {
|
||||
super(buffer.order());
|
||||
components = buffer.components.clone();
|
||||
indices = buffer.indices.clone();
|
||||
setIndex(buffer.readerIndex(), buffer.writerIndex());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
return HeapByteBufFactory.getInstance(order());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDirect() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] array() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int arrayOffset() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int capacity() {
|
||||
return indices[components.length];
|
||||
}
|
||||
|
||||
public int numComponents() {
|
||||
return components.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte getByte(int index) {
|
||||
int componentId = componentId(index);
|
||||
return components[componentId].getByte(index - indices[componentId]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short getShort(int index) {
|
||||
int componentId = componentId(index);
|
||||
if (index + 2 <= indices[componentId + 1]) {
|
||||
return components[componentId].getShort(index - indices[componentId]);
|
||||
} else if (order() == ByteOrder.BIG_ENDIAN) {
|
||||
return (short) ((getByte(index) & 0xff) << 8 | getByte(index + 1) & 0xff);
|
||||
} else {
|
||||
return (short) (getByte(index) & 0xff | (getByte(index + 1) & 0xff) << 8);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getUnsignedMedium(int index) {
|
||||
int componentId = componentId(index);
|
||||
if (index + 3 <= indices[componentId + 1]) {
|
||||
return components[componentId].getUnsignedMedium(index - indices[componentId]);
|
||||
} else if (order() == ByteOrder.BIG_ENDIAN) {
|
||||
return (getShort(index) & 0xffff) << 8 | getByte(index + 2) & 0xff;
|
||||
} else {
|
||||
return getShort(index) & 0xFFFF | (getByte(index + 2) & 0xFF) << 16;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getInt(int index) {
|
||||
int componentId = componentId(index);
|
||||
if (index + 4 <= indices[componentId + 1]) {
|
||||
return components[componentId].getInt(index - indices[componentId]);
|
||||
} else if (order() == ByteOrder.BIG_ENDIAN) {
|
||||
return (getShort(index) & 0xffff) << 16 | getShort(index + 2) & 0xffff;
|
||||
} else {
|
||||
return getShort(index) & 0xFFFF | (getShort(index + 2) & 0xFFFF) << 16;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLong(int index) {
|
||||
int componentId = componentId(index);
|
||||
if (index + 8 <= indices[componentId + 1]) {
|
||||
return components[componentId].getLong(index - indices[componentId]);
|
||||
} else if (order() == ByteOrder.BIG_ENDIAN) {
|
||||
return (getInt(index) & 0xffffffffL) << 32 | getInt(index + 4) & 0xffffffffL;
|
||||
} else {
|
||||
return getInt(index) & 0xFFFFFFFFL | (getInt(index + 4) & 0xFFFFFFFFL) << 32;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, byte[] dst, int dstIndex, int length) {
|
||||
int componentId = componentId(index);
|
||||
if (index > capacity() - length || dstIndex > dst.length - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to read - Needs "
|
||||
+ (index + length) + ", maximum is " + capacity() + " or "
|
||||
+ dst.length);
|
||||
}
|
||||
|
||||
int i = componentId;
|
||||
while (length > 0) {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
s.getBytes(index - adjustment, dst, dstIndex, localLength);
|
||||
index += localLength;
|
||||
dstIndex += localLength;
|
||||
length -= localLength;
|
||||
i ++;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuffer dst) {
|
||||
int componentId = componentId(index);
|
||||
int limit = dst.limit();
|
||||
int length = dst.remaining();
|
||||
if (index > capacity() - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to be read - Needs "
|
||||
+ (index + length) + ", maximum is " + capacity());
|
||||
}
|
||||
|
||||
int i = componentId;
|
||||
try {
|
||||
while (length > 0) {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
dst.limit(dst.position() + localLength);
|
||||
s.getBytes(index - adjustment, dst);
|
||||
index += localLength;
|
||||
length -= localLength;
|
||||
i ++;
|
||||
}
|
||||
} finally {
|
||||
dst.limit(limit);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuf dst, int dstIndex, int length) {
|
||||
int componentId = componentId(index);
|
||||
if (index > capacity() - length || dstIndex > dst.capacity() - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to be read - Needs "
|
||||
+ (index + length) + " or " + (dstIndex + length) + ", maximum is "
|
||||
+ capacity() + " or " + dst.capacity());
|
||||
}
|
||||
|
||||
int i = componentId;
|
||||
while (length > 0) {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
s.getBytes(index - adjustment, dst, dstIndex, localLength);
|
||||
index += localLength;
|
||||
dstIndex += localLength;
|
||||
length -= localLength;
|
||||
i ++;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBytes(int index, GatheringByteChannel out, int length)
|
||||
throws IOException {
|
||||
if (DetectionUtil.javaVersion() < 7) {
|
||||
// XXX Gathering write is not supported because of a known issue.
|
||||
// See http://bugs.sun.com/view_bug.do?bug_id=6210541
|
||||
return out.write(copiedNioBuffer(index, length));
|
||||
} else {
|
||||
long writtenBytes = out.write(nioBuffers(index, length));
|
||||
if (writtenBytes > Integer.MAX_VALUE) {
|
||||
return Integer.MAX_VALUE;
|
||||
} else {
|
||||
return (int) writtenBytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, OutputStream out, int length)
|
||||
throws IOException {
|
||||
int componentId = componentId(index);
|
||||
if (index > capacity() - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to be read - needs "
|
||||
+ (index + length) + ", maximum of " + capacity());
|
||||
}
|
||||
|
||||
int i = componentId;
|
||||
while (length > 0) {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
s.getBytes(index - adjustment, out, localLength);
|
||||
index += localLength;
|
||||
length -= localLength;
|
||||
i ++;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setByte(int index, int value) {
|
||||
int componentId = componentId(index);
|
||||
components[componentId].setByte(index - indices[componentId], value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setShort(int index, int value) {
|
||||
int componentId = componentId(index);
|
||||
if (index + 2 <= indices[componentId + 1]) {
|
||||
components[componentId].setShort(index - indices[componentId], value);
|
||||
} else if (order() == ByteOrder.BIG_ENDIAN) {
|
||||
setByte(index, (byte) (value >>> 8));
|
||||
setByte(index + 1, (byte) value);
|
||||
} else {
|
||||
setByte(index , (byte) value);
|
||||
setByte(index + 1, (byte) (value >>> 8));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMedium(int index, int value) {
|
||||
int componentId = componentId(index);
|
||||
if (index + 3 <= indices[componentId + 1]) {
|
||||
components[componentId].setMedium(index - indices[componentId], value);
|
||||
} else if (order() == ByteOrder.BIG_ENDIAN) {
|
||||
setShort(index, (short) (value >> 8));
|
||||
setByte(index + 2, (byte) value);
|
||||
} else {
|
||||
setShort(index , (short) value);
|
||||
setByte(index + 2, (byte) (value >>> 16));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setInt(int index, int value) {
|
||||
int componentId = componentId(index);
|
||||
if (index + 4 <= indices[componentId + 1]) {
|
||||
components[componentId].setInt(index - indices[componentId], value);
|
||||
} else if (order() == ByteOrder.BIG_ENDIAN) {
|
||||
setShort(index, (short) (value >>> 16));
|
||||
setShort(index + 2, (short) value);
|
||||
} else {
|
||||
setShort(index , (short) value);
|
||||
setShort(index + 2, (short) (value >>> 16));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLong(int index, long value) {
|
||||
int componentId = componentId(index);
|
||||
if (index + 8 <= indices[componentId + 1]) {
|
||||
components[componentId].setLong(index - indices[componentId], value);
|
||||
} else if (order() == ByteOrder.BIG_ENDIAN) {
|
||||
setInt(index, (int) (value >>> 32));
|
||||
setInt(index + 4, (int) value);
|
||||
} else {
|
||||
setInt(index , (int) value);
|
||||
setInt(index + 4, (int) (value >>> 32));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, byte[] src, int srcIndex, int length) {
|
||||
int componentId = componentId(index);
|
||||
if (index > capacity() - length || srcIndex > src.length - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to read - needs "
|
||||
+ (index + length) + " or " + (srcIndex + length) + ", maximum is "
|
||||
+ capacity() + " or " + src.length);
|
||||
}
|
||||
|
||||
int i = componentId;
|
||||
while (length > 0) {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
s.setBytes(index - adjustment, src, srcIndex, localLength);
|
||||
index += localLength;
|
||||
srcIndex += localLength;
|
||||
length -= localLength;
|
||||
i ++;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuffer src) {
|
||||
int componentId = componentId(index);
|
||||
int limit = src.limit();
|
||||
int length = src.remaining();
|
||||
if (index > capacity() - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to be written - Needs "
|
||||
+ (index + length) + ", maximum is " + capacity());
|
||||
}
|
||||
|
||||
int i = componentId;
|
||||
try {
|
||||
while (length > 0) {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
src.limit(src.position() + localLength);
|
||||
s.setBytes(index - adjustment, src);
|
||||
index += localLength;
|
||||
length -= localLength;
|
||||
i ++;
|
||||
}
|
||||
} finally {
|
||||
src.limit(limit);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuf src, int srcIndex, int length) {
|
||||
int componentId = componentId(index);
|
||||
if (index > capacity() - length || srcIndex > src.capacity() - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to be written - Needs "
|
||||
+ (index + length) + " or " + (srcIndex + length) + ", maximum is "
|
||||
+ capacity() + " or " + src.capacity());
|
||||
}
|
||||
|
||||
int i = componentId;
|
||||
while (length > 0) {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
s.setBytes(index - adjustment, src, srcIndex, localLength);
|
||||
index += localLength;
|
||||
srcIndex += localLength;
|
||||
length -= localLength;
|
||||
i ++;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, InputStream in, int length)
|
||||
throws IOException {
|
||||
int componentId = componentId(index);
|
||||
if (index > capacity() - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to write - Needs "
|
||||
+ (index + length) + ", maximum is " + capacity());
|
||||
}
|
||||
|
||||
int i = componentId;
|
||||
int readBytes = 0;
|
||||
|
||||
do {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
int localReadBytes = s.setBytes(index - adjustment, in, localLength);
|
||||
if (localReadBytes < 0) {
|
||||
if (readBytes == 0) {
|
||||
return -1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (localReadBytes == localLength) {
|
||||
index += localLength;
|
||||
length -= localLength;
|
||||
readBytes += localLength;
|
||||
i ++;
|
||||
} else {
|
||||
index += localReadBytes;
|
||||
length -= localReadBytes;
|
||||
readBytes += localReadBytes;
|
||||
}
|
||||
} while (length > 0);
|
||||
|
||||
return readBytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, ScatteringByteChannel in, int length)
|
||||
throws IOException {
|
||||
int componentId = componentId(index);
|
||||
if (index > capacity() - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to write - Needs "
|
||||
+ (index + length) + ", maximum is " + capacity());
|
||||
}
|
||||
|
||||
int i = componentId;
|
||||
int readBytes = 0;
|
||||
do {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
int localReadBytes = s.setBytes(index - adjustment, in, localLength);
|
||||
|
||||
if (localReadBytes == localLength) {
|
||||
index += localLength;
|
||||
length -= localLength;
|
||||
readBytes += localLength;
|
||||
i ++;
|
||||
} else {
|
||||
index += localReadBytes;
|
||||
length -= localReadBytes;
|
||||
readBytes += localReadBytes;
|
||||
}
|
||||
} while (length > 0);
|
||||
|
||||
return readBytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf duplicate() {
|
||||
ByteBuf duplicate = new CompositeByteBuf(this);
|
||||
duplicate.setIndex(readerIndex(), writerIndex());
|
||||
return duplicate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf copy(int index, int length) {
|
||||
int componentId = componentId(index);
|
||||
if (index > capacity() - length) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to copy - Needs "
|
||||
+ (index + length) + ", maximum is " + capacity());
|
||||
}
|
||||
|
||||
ByteBuf dst = factory().getBuffer(order(), length);
|
||||
copyTo(index, length, componentId, dst);
|
||||
return dst;
|
||||
}
|
||||
|
||||
private void copyTo(int index, int length, int componentId, ByteBuf dst) {
|
||||
int dstIndex = 0;
|
||||
int i = componentId;
|
||||
|
||||
while (length > 0) {
|
||||
ByteBuf s = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, s.capacity() - (index - adjustment));
|
||||
s.getBytes(index - adjustment, dst, dstIndex, localLength);
|
||||
index += localLength;
|
||||
dstIndex += localLength;
|
||||
length -= localLength;
|
||||
i ++;
|
||||
}
|
||||
|
||||
dst.writerIndex(dst.capacity());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link ByteBuf} portion of this {@link CompositeByteBuf} that
|
||||
* contains the specified {@code index}. <strong>This is an expert method!</strong>
|
||||
*
|
||||
* <p>
|
||||
* Please note that since a {@link CompositeByteBuf} is made up of
|
||||
* multiple {@link ByteBuf}s, this does <em>not</em> return the full buffer.
|
||||
* Instead, it only returns a portion of the composite buffer where the
|
||||
* index is located
|
||||
* </p>
|
||||
*
|
||||
*
|
||||
* @param index the {@code index} to search for and include in the returned {@link ByteBuf}
|
||||
* @return the {@link ByteBuf} that contains the specified {@code index}
|
||||
* @throws IndexOutOfBoundsException when the specified {@code index} is
|
||||
* less than zero, or greater than {@code capacity()}
|
||||
*/
|
||||
public ByteBuf getBuffer(int index) throws IndexOutOfBoundsException {
|
||||
if (index < 0 || index >= capacity()) {
|
||||
throw new IndexOutOfBoundsException("Invalid index: " + index
|
||||
+ " - Bytes needed: " + index + ", maximum is "
|
||||
+ capacity());
|
||||
}
|
||||
|
||||
//Return the component byte buffer
|
||||
return components[componentId(index)];
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf slice(int index, int length) {
|
||||
if (index == 0) {
|
||||
if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
} else if (index < 0 || index > capacity() - length) {
|
||||
throw new IndexOutOfBoundsException("Invalid index: " + index
|
||||
+ " - Bytes needed: " + (index + length) + ", maximum is "
|
||||
+ capacity());
|
||||
} else if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
|
||||
List<ByteBuf> components = decompose(index, length);
|
||||
switch (components.size()) {
|
||||
case 0:
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
case 1:
|
||||
return components.get(0);
|
||||
default:
|
||||
return new CompositeByteBuf(order(), components);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNioBuffer() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public ByteBuffer nioBuffer(int index, int length) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private ByteBuffer copiedNioBuffer(int index, int length) {
|
||||
if (components.length == 1) {
|
||||
return toNioBuffer(components[0], index, length);
|
||||
}
|
||||
|
||||
ByteBuffer[] buffers = nioBuffers(index, length);
|
||||
ByteBuffer merged = ByteBuffer.allocate(length).order(order());
|
||||
for (ByteBuffer b: buffers) {
|
||||
merged.put(b);
|
||||
}
|
||||
merged.flip();
|
||||
return merged;
|
||||
}
|
||||
|
||||
private ByteBuffer[] nioBuffers(int index, int length) {
|
||||
int componentId = componentId(index);
|
||||
if (index + length > capacity()) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to convert - Needs"
|
||||
+ (index + length) + ", maximum is " + capacity());
|
||||
}
|
||||
|
||||
List<ByteBuffer> buffers = new ArrayList<ByteBuffer>(components.length);
|
||||
|
||||
int i = componentId;
|
||||
while (length > 0) {
|
||||
ByteBuf c = components[i];
|
||||
int adjustment = indices[i];
|
||||
int localLength = Math.min(length, c.capacity() - (index - adjustment));
|
||||
buffers.add(toNioBuffer(c, index - adjustment, localLength));
|
||||
index += localLength;
|
||||
length -= localLength;
|
||||
i ++;
|
||||
}
|
||||
|
||||
return buffers.toArray(new ByteBuffer[buffers.size()]);
|
||||
}
|
||||
|
||||
private static ByteBuffer toNioBuffer(ByteBuf buf, int index, int length) {
|
||||
if (buf.hasNioBuffer()) {
|
||||
return buf.nioBuffer(index, length);
|
||||
} else {
|
||||
return buf.copy(index, length).nioBuffer(0, length);
|
||||
}
|
||||
}
|
||||
|
||||
private int componentId(int index) {
|
||||
int lastComponentId = lastAccessedComponentId;
|
||||
if (index >= indices[lastComponentId]) {
|
||||
if (index < indices[lastComponentId + 1]) {
|
||||
return lastComponentId;
|
||||
}
|
||||
|
||||
// Search right
|
||||
for (int i = lastComponentId + 1; i < components.length; i ++) {
|
||||
if (index < indices[i + 1]) {
|
||||
lastAccessedComponentId = i;
|
||||
return i;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Search left
|
||||
for (int i = lastComponentId - 1; i >= 0; i --) {
|
||||
if (index >= indices[i]) {
|
||||
lastAccessedComponentId = i;
|
||||
return i;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new IndexOutOfBoundsException("Invalid index: " + index + ", maximum: " + indices.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void discardReadBytes() {
|
||||
// Only the bytes between readerIndex and writerIndex will be kept.
|
||||
// New readerIndex and writerIndex will become 0 and
|
||||
// (previous writerIndex - previous readerIndex) respectively.
|
||||
|
||||
final int localReaderIndex = readerIndex();
|
||||
if (localReaderIndex == 0) {
|
||||
return;
|
||||
}
|
||||
int localWriterIndex = writerIndex();
|
||||
|
||||
final int bytesToMove = capacity() - localReaderIndex;
|
||||
List<ByteBuf> list = decompose(localReaderIndex, bytesToMove);
|
||||
|
||||
|
||||
// If the list is empty we need to assign a new one because
|
||||
// we get a List that is immutable.
|
||||
//
|
||||
// See https://github.com/netty/netty/issues/325
|
||||
if (list.isEmpty()) {
|
||||
list = new ArrayList<ByteBuf>(1);
|
||||
}
|
||||
|
||||
// Add a new buffer so that the capacity of this composite buffer does
|
||||
// not decrease due to the discarded components.
|
||||
// XXX Might create too many components if discarded by small amount.
|
||||
final ByteBuf padding = Unpooled.buffer(localReaderIndex).order(order());
|
||||
padding.writerIndex(localReaderIndex);
|
||||
list.add(padding);
|
||||
|
||||
// Reset the index markers to get the index marker values.
|
||||
int localMarkedReaderIndex = localReaderIndex;
|
||||
try {
|
||||
resetReaderIndex();
|
||||
localMarkedReaderIndex = readerIndex();
|
||||
} catch (IndexOutOfBoundsException e) {
|
||||
// ignore
|
||||
}
|
||||
int localMarkedWriterIndex = localWriterIndex;
|
||||
try {
|
||||
resetWriterIndex();
|
||||
localMarkedWriterIndex = writerIndex();
|
||||
} catch (IndexOutOfBoundsException e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
setComponents(list);
|
||||
|
||||
// reset marked Indexes
|
||||
localMarkedReaderIndex = Math.max(localMarkedReaderIndex - localReaderIndex, 0);
|
||||
localMarkedWriterIndex = Math.max(localMarkedWriterIndex - localReaderIndex, 0);
|
||||
setIndex(localMarkedReaderIndex, localMarkedWriterIndex);
|
||||
markReaderIndex();
|
||||
markWriterIndex();
|
||||
// reset real indexes
|
||||
localWriterIndex = Math.max(localWriterIndex - localReaderIndex, 0);
|
||||
setIndex(0, localWriterIndex);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String result = super.toString();
|
||||
result = result.substring(0, result.length() - 1);
|
||||
return result + ", components=" + components.length + ")";
|
||||
}
|
||||
List<ByteBuf> decompose(int offset, int length);
|
||||
}
|
||||
|
1208
buffer/src/main/java/io/netty/buffer/DefaultCompositeByteBuf.java
Normal file
1208
buffer/src/main/java/io/netty/buffer/DefaultCompositeByteBuf.java
Normal file
File diff suppressed because it is too large
Load Diff
442
buffer/src/main/java/io/netty/buffer/DirectByteBuf.java
Normal file
442
buffer/src/main/java/io/netty/buffer/DirectByteBuf.java
Normal file
@ -0,0 +1,442 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.lang.reflect.Field;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.channels.ClosedChannelException;
|
||||
import java.nio.channels.GatheringByteChannel;
|
||||
import java.nio.channels.ScatteringByteChannel;
|
||||
|
||||
import sun.misc.Cleaner;
|
||||
|
||||
/**
|
||||
* A NIO {@link ByteBuffer} based buffer. It is recommended to use {@link Unpooled#directBuffer(int)}
|
||||
* and {@link Unpooled#wrappedBuffer(ByteBuffer)} instead of calling the
|
||||
* constructor explicitly.
|
||||
*/
|
||||
@SuppressWarnings("restriction")
|
||||
public class DirectByteBuf extends AbstractByteBuf {
|
||||
|
||||
private static final Field CLEANER_FIELD;
|
||||
|
||||
static {
|
||||
ByteBuffer direct = ByteBuffer.allocateDirect(1);
|
||||
Field cleanerField;
|
||||
try {
|
||||
cleanerField = direct.getClass().getDeclaredField("cleaner");
|
||||
cleanerField.setAccessible(true);
|
||||
Cleaner cleaner = (Cleaner) cleanerField.get(direct);
|
||||
cleaner.clean();
|
||||
} catch (Throwable t) {
|
||||
cleanerField = null;
|
||||
}
|
||||
CLEANER_FIELD = cleanerField;
|
||||
}
|
||||
|
||||
private static void freeDirect(ByteBuffer buffer) {
|
||||
Cleaner cleaner;
|
||||
try {
|
||||
cleaner = (Cleaner) CLEANER_FIELD.get(buffer);
|
||||
cleaner.clean();
|
||||
} catch (Throwable t) {
|
||||
// Nothing we can do here.
|
||||
}
|
||||
}
|
||||
|
||||
private final Unsafe unsafe = new DirectUnsafe();
|
||||
|
||||
private boolean doNotFree;
|
||||
private ByteBuffer buffer;
|
||||
private ByteBuffer tmpBuf;
|
||||
private int capacity;
|
||||
|
||||
/**
|
||||
* Creates a new direct buffer.
|
||||
*
|
||||
* @param initialCapacity the initial capacity of the underlying direct buffer
|
||||
* @param maxCapacity the maximum capacity of the underlying direct buffer
|
||||
*/
|
||||
public DirectByteBuf(int initialCapacity, int maxCapacity) {
|
||||
super(ByteOrder.BIG_ENDIAN, maxCapacity);
|
||||
if (initialCapacity < 0) {
|
||||
throw new IllegalArgumentException("initialCapacity: " + initialCapacity);
|
||||
}
|
||||
if (maxCapacity < 0) {
|
||||
throw new IllegalArgumentException("maxCapacity: " + maxCapacity);
|
||||
}
|
||||
if (initialCapacity > maxCapacity) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"initialCapacity(%d) > maxCapacity(%d)", initialCapacity, maxCapacity));
|
||||
}
|
||||
|
||||
setByteBuffer(ByteBuffer.allocateDirect(initialCapacity));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new direct buffer by wrapping the specified initial buffer.
|
||||
*
|
||||
* @param maxCapacity the maximum capacity of the underlying direct buffer
|
||||
*/
|
||||
public DirectByteBuf(ByteBuffer initialBuffer, int maxCapacity) {
|
||||
super(ByteOrder.BIG_ENDIAN, maxCapacity);
|
||||
|
||||
if (initialBuffer == null) {
|
||||
throw new NullPointerException("initialBuffer");
|
||||
}
|
||||
if (!initialBuffer.isDirect()) {
|
||||
throw new IllegalArgumentException("initialBuffer is not a direct buffer.");
|
||||
}
|
||||
if (initialBuffer.isReadOnly()) {
|
||||
throw new IllegalArgumentException("initialBuffer is a read-only buffer.");
|
||||
}
|
||||
|
||||
int initialCapacity = initialBuffer.remaining();
|
||||
if (initialCapacity > maxCapacity) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"initialCapacity(%d) > maxCapacity(%d)", initialCapacity, maxCapacity));
|
||||
}
|
||||
|
||||
doNotFree = true;
|
||||
setByteBuffer(initialBuffer.slice().order(ByteOrder.BIG_ENDIAN));
|
||||
writerIndex(initialCapacity);
|
||||
}
|
||||
|
||||
private void setByteBuffer(ByteBuffer buffer) {
|
||||
ByteBuffer oldBuffer = this.buffer;
|
||||
if (oldBuffer != null) {
|
||||
if (doNotFree) {
|
||||
doNotFree = false;
|
||||
} else {
|
||||
freeDirect(oldBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
this.buffer = buffer;
|
||||
tmpBuf = buffer.duplicate();
|
||||
capacity = buffer.remaining();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDirect() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int capacity() {
|
||||
return capacity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void capacity(int newCapacity) {
|
||||
if (newCapacity < 0 || newCapacity > maxCapacity()) {
|
||||
throw new IllegalArgumentException("newCapacity: " + newCapacity);
|
||||
}
|
||||
|
||||
int readerIndex = readerIndex();
|
||||
int writerIndex = writerIndex();
|
||||
|
||||
int oldCapacity = capacity;
|
||||
if (newCapacity > oldCapacity) {
|
||||
ByteBuffer oldBuffer = buffer;
|
||||
ByteBuffer newBuffer = ByteBuffer.allocateDirect(newCapacity);
|
||||
oldBuffer.position(readerIndex).limit(writerIndex);
|
||||
newBuffer.position(readerIndex).limit(writerIndex);
|
||||
newBuffer.put(oldBuffer);
|
||||
newBuffer.clear();
|
||||
setByteBuffer(newBuffer);
|
||||
} else if (newCapacity < oldCapacity) {
|
||||
ByteBuffer oldBuffer = buffer;
|
||||
ByteBuffer newBuffer = ByteBuffer.allocateDirect(newCapacity);
|
||||
if (readerIndex < newCapacity) {
|
||||
if (writerIndex > newCapacity) {
|
||||
writerIndex(writerIndex = newCapacity);
|
||||
}
|
||||
oldBuffer.position(readerIndex).limit(writerIndex);
|
||||
newBuffer.position(readerIndex).limit(writerIndex);
|
||||
newBuffer.put(oldBuffer);
|
||||
newBuffer.clear();
|
||||
} else {
|
||||
setIndex(newCapacity, newCapacity);
|
||||
}
|
||||
setByteBuffer(newBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] array() {
|
||||
throw new UnsupportedOperationException("direct buffer");
|
||||
}
|
||||
|
||||
@Override
|
||||
public int arrayOffset() {
|
||||
throw new UnsupportedOperationException("direct buffer");
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte getByte(int index) {
|
||||
return buffer.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short getShort(int index) {
|
||||
return buffer.getShort(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getUnsignedMedium(int index) {
|
||||
return (getByte(index) & 0xff) << 16 | (getByte(index + 1) & 0xff) << 8 |
|
||||
(getByte(index + 2) & 0xff) << 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getInt(int index) {
|
||||
return buffer.getInt(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLong(int index) {
|
||||
return buffer.getLong(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuf dst, int dstIndex, int length) {
|
||||
if (dst instanceof DirectByteBuf) {
|
||||
DirectByteBuf bbdst = (DirectByteBuf) dst;
|
||||
ByteBuffer data = bbdst.tmpBuf;
|
||||
data.clear().position(dstIndex).limit(dstIndex + length);
|
||||
getBytes(index, data);
|
||||
} else if (buffer.hasArray()) {
|
||||
dst.setBytes(dstIndex, buffer.array(), index + buffer.arrayOffset(), length);
|
||||
} else {
|
||||
dst.setBytes(dstIndex, this, index, length);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, byte[] dst, int dstIndex, int length) {
|
||||
try {
|
||||
tmpBuf.clear().position(index).limit(index + length);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to read - Need " +
|
||||
(index + length) + ", maximum is " + buffer.limit());
|
||||
}
|
||||
tmpBuf.get(dst, dstIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuffer dst) {
|
||||
int bytesToCopy = Math.min(capacity() - index, dst.remaining());
|
||||
try {
|
||||
tmpBuf.clear().position(index).limit(index + bytesToCopy);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to read - Need " +
|
||||
(index + bytesToCopy) + ", maximum is " + buffer.limit());
|
||||
}
|
||||
dst.put(tmpBuf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setByte(int index, int value) {
|
||||
buffer.put(index, (byte) value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setShort(int index, int value) {
|
||||
buffer.putShort(index, (short) value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMedium(int index, int value) {
|
||||
setByte(index, (byte) (value >>> 16));
|
||||
setByte(index + 1, (byte) (value >>> 8));
|
||||
setByte(index + 2, (byte) (value >>> 0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setInt(int index, int value) {
|
||||
buffer.putInt(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLong(int index, long value) {
|
||||
buffer.putLong(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuf src, int srcIndex, int length) {
|
||||
if (src instanceof DirectByteBuf) {
|
||||
DirectByteBuf bbsrc = (DirectByteBuf) src;
|
||||
ByteBuffer data = bbsrc.tmpBuf;
|
||||
|
||||
data.clear().position(srcIndex).limit(srcIndex + length);
|
||||
setBytes(index, data);
|
||||
} else if (buffer.hasArray()) {
|
||||
src.getBytes(srcIndex, buffer.array(), index + buffer.arrayOffset(), length);
|
||||
} else {
|
||||
src.getBytes(srcIndex, this, index, length);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, byte[] src, int srcIndex, int length) {
|
||||
tmpBuf.clear().position(index).limit(index + length);
|
||||
tmpBuf.put(src, srcIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuffer src) {
|
||||
if (src == tmpBuf) {
|
||||
src = src.duplicate();
|
||||
}
|
||||
|
||||
tmpBuf.clear().position(index).limit(index + src.remaining());
|
||||
tmpBuf.put(src);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, OutputStream out, int length) throws IOException {
|
||||
if (length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (buffer.hasArray()) {
|
||||
out.write(buffer.array(), index + buffer.arrayOffset(), length);
|
||||
} else {
|
||||
byte[] tmp = new byte[length];
|
||||
tmpBuf.clear().position(index);
|
||||
tmpBuf.get(tmp);
|
||||
out.write(tmp);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBytes(int index, GatheringByteChannel out, int length) throws IOException {
|
||||
if (length == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
tmpBuf.clear().position(index).limit(index + length);
|
||||
return out.write(tmpBuf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, InputStream in, int length) throws IOException {
|
||||
|
||||
if (buffer.hasArray()) {
|
||||
return in.read(buffer.array(), buffer.arrayOffset() + index, length);
|
||||
} else {
|
||||
byte[] tmp = new byte[length];
|
||||
int readBytes = in.read(tmp);
|
||||
tmpBuf.clear().position(index);
|
||||
tmpBuf.put(tmp);
|
||||
return readBytes;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException {
|
||||
tmpBuf.clear().position(index).limit(index + length);
|
||||
try {
|
||||
return in.read(tmpBuf);
|
||||
} catch (ClosedChannelException e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNioBuffer() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer nioBuffer(int index, int length) {
|
||||
if (index == 0 && length == capacity()) {
|
||||
return buffer.duplicate();
|
||||
} else {
|
||||
return ((ByteBuffer) tmpBuf.clear().position(index).limit(index + length)).slice();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf copy(int index, int length) {
|
||||
ByteBuffer src;
|
||||
try {
|
||||
src = (ByteBuffer) tmpBuf.clear().position(index).limit(index + length);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to read - Need " + (index + length));
|
||||
}
|
||||
|
||||
ByteBuffer dst =
|
||||
src.isDirect()? ByteBuffer.allocateDirect(length) : ByteBuffer.allocate(length);
|
||||
dst.put(src);
|
||||
dst.order(order());
|
||||
dst.clear();
|
||||
return new DirectByteBuf(dst, maxCapacity());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Unsafe unsafe() {
|
||||
return unsafe;
|
||||
}
|
||||
|
||||
private class DirectUnsafe implements Unsafe {
|
||||
@Override
|
||||
public ByteBuffer nioBuffer() {
|
||||
return tmpBuf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf newBuffer(int initialCapacity) {
|
||||
return new DirectByteBuf(initialCapacity, Math.max(initialCapacity, maxCapacity()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void acquire() {
|
||||
if (refCnt <= 0) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
refCnt ++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (refCnt <= 0) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
refCnt --;
|
||||
if (refCnt == 0) {
|
||||
if (doNotFree) {
|
||||
doNotFree = false;
|
||||
} else {
|
||||
freeDirect(buffer);
|
||||
}
|
||||
|
||||
buffer = null;
|
||||
tmpBuf = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,202 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import java.lang.ref.ReferenceQueue;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
/**
|
||||
* A {@link ByteBufFactory} which pre-allocates a large chunk of direct
|
||||
* buffer and returns its slice on demand. Direct buffers are reclaimed via
|
||||
* {@link ReferenceQueue} in most JDK implementations, and therefore they are
|
||||
* deallocated less efficiently than an ordinary heap buffer. Consequently,
|
||||
* a user will get {@link OutOfMemoryError} when one tries to allocate small
|
||||
* direct buffers more often than the GC throughput of direct buffers, which
|
||||
* is much lower than the GC throughput of heap buffers. This factory avoids
|
||||
* this problem by allocating a large chunk of pre-allocated direct buffer and
|
||||
* reducing the number of the garbage collected internal direct buffer objects.
|
||||
*/
|
||||
public class DirectByteBufFactory extends AbstractByteBufFactory {
|
||||
|
||||
private static final DirectByteBufFactory INSTANCE_BE =
|
||||
new DirectByteBufFactory(ByteOrder.BIG_ENDIAN);
|
||||
|
||||
private static final DirectByteBufFactory INSTANCE_LE =
|
||||
new DirectByteBufFactory(ByteOrder.LITTLE_ENDIAN);
|
||||
|
||||
public static ByteBufFactory getInstance() {
|
||||
return INSTANCE_BE;
|
||||
}
|
||||
|
||||
public static ByteBufFactory getInstance(ByteOrder defaultEndianness) {
|
||||
if (defaultEndianness == ByteOrder.BIG_ENDIAN) {
|
||||
return INSTANCE_BE;
|
||||
} else if (defaultEndianness == ByteOrder.LITTLE_ENDIAN) {
|
||||
return INSTANCE_LE;
|
||||
} else if (defaultEndianness == null) {
|
||||
throw new NullPointerException("defaultEndianness");
|
||||
} else {
|
||||
throw new IllegalStateException("Should not reach here");
|
||||
}
|
||||
}
|
||||
|
||||
private final Object bigEndianLock = new Object();
|
||||
private final Object littleEndianLock = new Object();
|
||||
private final int preallocatedBufCapacity;
|
||||
private ByteBuf preallocatedBEBuf;
|
||||
private int preallocatedBEBufPos;
|
||||
private ByteBuf preallocatedLEBuf;
|
||||
private int preallocatedLEBufPos;
|
||||
|
||||
/**
|
||||
* Creates a new factory whose default {@link ByteOrder} is
|
||||
* {@link ByteOrder#BIG_ENDIAN}.
|
||||
*/
|
||||
public DirectByteBufFactory() {
|
||||
this(ByteOrder.BIG_ENDIAN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new factory whose default {@link ByteOrder} is
|
||||
* {@link ByteOrder#BIG_ENDIAN}.
|
||||
*/
|
||||
public DirectByteBufFactory(int preallocatedBufferCapacity) {
|
||||
this(ByteOrder.BIG_ENDIAN, preallocatedBufferCapacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new factory with the specified default {@link ByteOrder}.
|
||||
*
|
||||
* @param defaultOrder the default {@link ByteOrder} of this factory
|
||||
*/
|
||||
public DirectByteBufFactory(ByteOrder defaultOrder) {
|
||||
this(defaultOrder, 1048576);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new factory with the specified default {@link ByteOrder}.
|
||||
*
|
||||
* @param defaultOrder the default {@link ByteOrder} of this factory
|
||||
*/
|
||||
public DirectByteBufFactory(ByteOrder defaultOrder, int preallocatedBufferCapacity) {
|
||||
super(defaultOrder);
|
||||
if (preallocatedBufferCapacity <= 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"preallocatedBufCapacity must be greater than 0: " + preallocatedBufferCapacity);
|
||||
}
|
||||
|
||||
preallocatedBufCapacity = preallocatedBufferCapacity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf getBuffer(ByteOrder order, int capacity) {
|
||||
if (order == null) {
|
||||
throw new NullPointerException("order");
|
||||
}
|
||||
if (capacity < 0) {
|
||||
throw new IllegalArgumentException("capacity: " + capacity);
|
||||
}
|
||||
if (capacity == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
if (capacity >= preallocatedBufCapacity) {
|
||||
return Unpooled.directBuffer(capacity).order(order);
|
||||
}
|
||||
|
||||
ByteBuf slice;
|
||||
if (order == ByteOrder.BIG_ENDIAN) {
|
||||
slice = allocateBigEndianBuffer(capacity);
|
||||
} else {
|
||||
slice = allocateLittleEndianBuffer(capacity);
|
||||
}
|
||||
slice.clear();
|
||||
return slice;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf getBuffer(ByteOrder order, byte[] array, int offset, int length) {
|
||||
if (array == null) {
|
||||
throw new NullPointerException("array");
|
||||
}
|
||||
if (offset < 0) {
|
||||
throw new IndexOutOfBoundsException("offset: " + offset);
|
||||
}
|
||||
if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
if (offset + length > array.length) {
|
||||
throw new IndexOutOfBoundsException("length: " + length);
|
||||
}
|
||||
|
||||
ByteBuf buf = getBuffer(order, length);
|
||||
buf.writeBytes(array, offset, length);
|
||||
return buf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf getBuffer(ByteBuffer nioBuffer) {
|
||||
if (!nioBuffer.isReadOnly() && nioBuffer.isDirect()) {
|
||||
return Unpooled.wrappedBuffer(nioBuffer);
|
||||
}
|
||||
|
||||
ByteBuf buf = getBuffer(nioBuffer.order(), nioBuffer.remaining());
|
||||
int pos = nioBuffer.position();
|
||||
buf.writeBytes(nioBuffer);
|
||||
nioBuffer.position(pos);
|
||||
return buf;
|
||||
}
|
||||
|
||||
private ByteBuf allocateBigEndianBuffer(int capacity) {
|
||||
ByteBuf slice;
|
||||
synchronized (bigEndianLock) {
|
||||
if (preallocatedBEBuf == null) {
|
||||
preallocatedBEBuf = Unpooled.directBuffer(preallocatedBufCapacity);
|
||||
slice = preallocatedBEBuf.slice(0, capacity);
|
||||
preallocatedBEBufPos = capacity;
|
||||
} else if (preallocatedBEBuf.capacity() - preallocatedBEBufPos >= capacity) {
|
||||
slice = preallocatedBEBuf.slice(preallocatedBEBufPos, capacity);
|
||||
preallocatedBEBufPos += capacity;
|
||||
} else {
|
||||
preallocatedBEBuf = Unpooled.directBuffer(preallocatedBufCapacity);
|
||||
slice = preallocatedBEBuf.slice(0, capacity);
|
||||
preallocatedBEBufPos = capacity;
|
||||
}
|
||||
}
|
||||
return slice;
|
||||
}
|
||||
|
||||
private ByteBuf allocateLittleEndianBuffer(int capacity) {
|
||||
ByteBuf slice;
|
||||
synchronized (littleEndianLock) {
|
||||
if (preallocatedLEBuf == null) {
|
||||
preallocatedLEBuf = Unpooled.directBuffer(
|
||||
preallocatedBufCapacity).order(ByteOrder.LITTLE_ENDIAN);
|
||||
slice = preallocatedLEBuf.slice(0, capacity);
|
||||
preallocatedLEBufPos = capacity;
|
||||
} else if (preallocatedLEBuf.capacity() - preallocatedLEBufPos >= capacity) {
|
||||
slice = preallocatedLEBuf.slice(preallocatedLEBufPos, capacity);
|
||||
preallocatedLEBufPos += capacity;
|
||||
} else {
|
||||
preallocatedLEBuf = Unpooled.directBuffer(
|
||||
preallocatedBufCapacity).order(ByteOrder.LITTLE_ENDIAN);
|
||||
slice = preallocatedLEBuf.slice(0, capacity);
|
||||
preallocatedLEBufPos = capacity;
|
||||
}
|
||||
}
|
||||
return slice;
|
||||
}
|
||||
}
|
@ -30,18 +30,21 @@ import java.nio.channels.ScatteringByteChannel;
|
||||
*/
|
||||
public class DuplicatedByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
|
||||
private final ByteBuf buffer;
|
||||
private final Unsafe unsafe = new DuplicatedUnsafe();
|
||||
final ByteBuf buffer;
|
||||
|
||||
public DuplicatedByteBuf(ByteBuf buffer) {
|
||||
super(buffer.order());
|
||||
this.buffer = buffer;
|
||||
setIndex(buffer.readerIndex(), buffer.writerIndex());
|
||||
}
|
||||
super(buffer.order(), buffer.maxCapacity());
|
||||
|
||||
if (buffer instanceof DuplicatedByteBuf) {
|
||||
this.buffer = ((DuplicatedByteBuf) buffer).buffer;
|
||||
} else {
|
||||
this.buffer = buffer;
|
||||
}
|
||||
|
||||
private DuplicatedByteBuf(DuplicatedByteBuf buffer) {
|
||||
super(buffer.buffer.order());
|
||||
this.buffer = buffer.buffer;
|
||||
setIndex(buffer.readerIndex(), buffer.writerIndex());
|
||||
|
||||
buffer.unsafe().acquire();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -49,11 +52,6 @@ public class DuplicatedByteBuf extends AbstractByteBuf implements WrappedByteBuf
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
return buffer.factory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDirect() {
|
||||
return buffer.isDirect();
|
||||
@ -64,6 +62,11 @@ public class DuplicatedByteBuf extends AbstractByteBuf implements WrappedByteBuf
|
||||
return buffer.capacity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void capacity(int newCapacity) {
|
||||
buffer.capacity(newCapacity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return buffer.hasArray();
|
||||
@ -207,4 +210,32 @@ public class DuplicatedByteBuf extends AbstractByteBuf implements WrappedByteBuf
|
||||
public ByteBuffer nioBuffer(int index, int length) {
|
||||
return buffer.nioBuffer(index, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Unsafe unsafe() {
|
||||
return unsafe;
|
||||
}
|
||||
|
||||
private final class DuplicatedUnsafe implements Unsafe {
|
||||
|
||||
@Override
|
||||
public ByteBuffer nioBuffer() {
|
||||
return buffer.unsafe().nioBuffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf newBuffer(int initialCapacity) {
|
||||
return buffer.unsafe().newBuffer(initialCapacity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void acquire() {
|
||||
buffer.unsafe().acquire();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
buffer.unsafe().release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,320 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.channels.GatheringByteChannel;
|
||||
import java.nio.channels.ScatteringByteChannel;
|
||||
|
||||
|
||||
/**
|
||||
* A dynamic capacity buffer which increases its capacity as needed. It is
|
||||
* recommended to use {@link Unpooled#dynamicBuffer(int)} instead of
|
||||
* calling the constructor explicitly.
|
||||
*/
|
||||
public class DynamicByteBuf extends AbstractByteBuf {
|
||||
|
||||
private final ByteBufFactory factory;
|
||||
private ByteBuf buffer;
|
||||
|
||||
public DynamicByteBuf(int estimatedLength) {
|
||||
this(estimatedLength, HeapByteBufFactory.getInstance(ByteOrder.BIG_ENDIAN));
|
||||
}
|
||||
|
||||
public DynamicByteBuf(int estimatedLength, ByteBufFactory factory) {
|
||||
super(ByteOrder.BIG_ENDIAN);
|
||||
if (estimatedLength < 0) {
|
||||
throw new IllegalArgumentException("estimatedLength: " + estimatedLength);
|
||||
}
|
||||
if (factory == null) {
|
||||
throw new NullPointerException("factory");
|
||||
}
|
||||
this.factory = factory;
|
||||
buffer = factory.getBuffer(ByteOrder.BIG_ENDIAN, estimatedLength);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void ensureWritableBytes(int minWritableBytes) {
|
||||
if (minWritableBytes <= writableBytes()) {
|
||||
return;
|
||||
}
|
||||
|
||||
int newCapacity;
|
||||
if (capacity() == 0) {
|
||||
newCapacity = 1;
|
||||
} else {
|
||||
newCapacity = capacity();
|
||||
}
|
||||
int minNewCapacity = writerIndex() + minWritableBytes;
|
||||
while (newCapacity < minNewCapacity) {
|
||||
newCapacity <<= 1;
|
||||
// Check if we exceeded the maximum size of 2gb if this is the case then
|
||||
// newCapacity == 0
|
||||
//
|
||||
// https://github.com/netty/netty/issues/258
|
||||
if (newCapacity == 0) {
|
||||
throw new IllegalStateException("buffer capacity over 2GiB");
|
||||
}
|
||||
}
|
||||
|
||||
ByteBuf newBuffer = factory().getBuffer(order(), newCapacity);
|
||||
newBuffer.writeBytes(buffer, 0, writerIndex());
|
||||
buffer = newBuffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDirect() {
|
||||
return buffer.isDirect();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int capacity() {
|
||||
return buffer.capacity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return buffer.hasArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] array() {
|
||||
return buffer.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int arrayOffset() {
|
||||
return buffer.arrayOffset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte getByte(int index) {
|
||||
return buffer.getByte(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short getShort(int index) {
|
||||
return buffer.getShort(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getUnsignedMedium(int index) {
|
||||
return buffer.getUnsignedMedium(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getInt(int index) {
|
||||
return buffer.getInt(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLong(int index) {
|
||||
return buffer.getLong(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, byte[] dst, int dstIndex, int length) {
|
||||
buffer.getBytes(index, dst, dstIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuf dst, int dstIndex, int length) {
|
||||
buffer.getBytes(index, dst, dstIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuffer dst) {
|
||||
buffer.getBytes(index, dst);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBytes(int index, GatheringByteChannel out, int length)
|
||||
throws IOException {
|
||||
return buffer.getBytes(index, out, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, OutputStream out, int length)
|
||||
throws IOException {
|
||||
buffer.getBytes(index, out, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setByte(int index, int value) {
|
||||
buffer.setByte(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setShort(int index, int value) {
|
||||
buffer.setShort(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMedium(int index, int value) {
|
||||
buffer.setMedium(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setInt(int index, int value) {
|
||||
buffer.setInt(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLong(int index, long value) {
|
||||
buffer.setLong(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, byte[] src, int srcIndex, int length) {
|
||||
buffer.setBytes(index, src, srcIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuf src, int srcIndex, int length) {
|
||||
buffer.setBytes(index, src, srcIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuffer src) {
|
||||
buffer.setBytes(index, src);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, InputStream in, int length)
|
||||
throws IOException {
|
||||
return buffer.setBytes(index, in, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, ScatteringByteChannel in, int length)
|
||||
throws IOException {
|
||||
return buffer.setBytes(index, in, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeByte(int value) {
|
||||
ensureWritableBytes(1);
|
||||
super.writeByte(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeShort(int value) {
|
||||
ensureWritableBytes(2);
|
||||
super.writeShort(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeMedium(int value) {
|
||||
ensureWritableBytes(3);
|
||||
super.writeMedium(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeInt(int value) {
|
||||
ensureWritableBytes(4);
|
||||
super.writeInt(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeLong(long value) {
|
||||
ensureWritableBytes(8);
|
||||
super.writeLong(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBytes(byte[] src, int srcIndex, int length) {
|
||||
ensureWritableBytes(length);
|
||||
super.writeBytes(src, srcIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBytes(ByteBuf src, int srcIndex, int length) {
|
||||
ensureWritableBytes(length);
|
||||
super.writeBytes(src, srcIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBytes(ByteBuffer src) {
|
||||
ensureWritableBytes(src.remaining());
|
||||
super.writeBytes(src);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int writeBytes(InputStream in, int length) throws IOException {
|
||||
ensureWritableBytes(length);
|
||||
return super.writeBytes(in, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int writeBytes(ScatteringByteChannel in, int length)
|
||||
throws IOException {
|
||||
ensureWritableBytes(length);
|
||||
return super.writeBytes(in, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeZero(int length) {
|
||||
ensureWritableBytes(length);
|
||||
super.writeZero(length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf duplicate() {
|
||||
return new DuplicatedByteBuf(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf copy(int index, int length) {
|
||||
DynamicByteBuf copiedBuffer = new DynamicByteBuf(Math.max(length, 64), factory());
|
||||
copiedBuffer.buffer = buffer.copy(index, length);
|
||||
copiedBuffer.setIndex(0, length);
|
||||
return copiedBuffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf slice(int index, int length) {
|
||||
if (index == 0) {
|
||||
if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
return new TruncatedByteBuf(this, length);
|
||||
} else {
|
||||
if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
return new SlicedByteBuf(this, index, length);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNioBuffer() {
|
||||
return buffer.hasNioBuffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer nioBuffer(int index, int length) {
|
||||
return buffer.nioBuffer(index, length);
|
||||
}
|
||||
}
|
@ -29,46 +29,48 @@ import java.nio.channels.ScatteringByteChannel;
|
||||
*/
|
||||
public class HeapByteBuf extends AbstractByteBuf {
|
||||
|
||||
/**
|
||||
* The underlying heap byte array that this buffer is wrapping.
|
||||
*/
|
||||
protected final byte[] array;
|
||||
private final Unsafe unsafe = new HeapUnsafe();
|
||||
|
||||
protected final ByteBuffer nioBuf;
|
||||
private byte[] array;
|
||||
private ByteBuffer nioBuf;
|
||||
|
||||
/**
|
||||
* Creates a new heap buffer with a newly allocated byte array.
|
||||
*
|
||||
* @param length the length of the new byte array
|
||||
* @param initialCapacity the initial capacity of the underlying byte array
|
||||
* @param maxCapacity the max capacity of the underlying byte array
|
||||
*/
|
||||
public HeapByteBuf(int length) {
|
||||
this(new byte[length], 0, 0);
|
||||
public HeapByteBuf(int initialCapacity, int maxCapacity) {
|
||||
this(new byte[initialCapacity], 0, 0, maxCapacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new heap buffer with an existing byte array.
|
||||
*
|
||||
* @param array the byte array to wrap
|
||||
* @param initialArray the initial underlying byte array
|
||||
* @param maxCapacity the max capacity of the underlying byte array
|
||||
*/
|
||||
public HeapByteBuf(byte[] array) {
|
||||
this(array, 0, array.length);
|
||||
public HeapByteBuf(byte[] initialArray, int maxCapacity) {
|
||||
this(initialArray, 0, initialArray.length, maxCapacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new heap buffer with an existing byte array.
|
||||
*
|
||||
* @param array the byte array to wrap
|
||||
* @param readerIndex the initial reader index of this buffer
|
||||
* @param writerIndex the initial writer index of this buffer
|
||||
*/
|
||||
protected HeapByteBuf(byte[] array, int readerIndex, int writerIndex) {
|
||||
super(ByteOrder.BIG_ENDIAN);
|
||||
if (array == null) {
|
||||
throw new NullPointerException("array");
|
||||
private HeapByteBuf(byte[] initialArray, int readerIndex, int writerIndex, int maxCapacity) {
|
||||
super(ByteOrder.BIG_ENDIAN, maxCapacity);
|
||||
if (initialArray == null) {
|
||||
throw new NullPointerException("initialArray");
|
||||
}
|
||||
this.array = array;
|
||||
if (initialArray.length > maxCapacity) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"initialCapacity(%d) > maxCapacity(%d)", initialArray.length, maxCapacity));
|
||||
}
|
||||
|
||||
setArray(initialArray);
|
||||
setIndex(readerIndex, writerIndex);
|
||||
nioBuf = ByteBuffer.wrap(array);
|
||||
}
|
||||
|
||||
private void setArray(byte[] initialArray) {
|
||||
array = initialArray;
|
||||
nioBuf = ByteBuffer.wrap(initialArray);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -81,6 +83,33 @@ public class HeapByteBuf extends AbstractByteBuf {
|
||||
return array.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void capacity(int newCapacity) {
|
||||
if (newCapacity < 0 || newCapacity > maxCapacity()) {
|
||||
throw new IllegalArgumentException("newCapacity: " + newCapacity);
|
||||
}
|
||||
|
||||
int oldCapacity = array.length;
|
||||
if (newCapacity > oldCapacity) {
|
||||
byte[] newArray = new byte[newCapacity];
|
||||
System.arraycopy(array, readerIndex(), newArray, readerIndex(), readableBytes());
|
||||
setArray(newArray);
|
||||
} else if (newCapacity < oldCapacity) {
|
||||
byte[] newArray = new byte[newCapacity];
|
||||
int readerIndex = readerIndex();
|
||||
if (readerIndex < newCapacity) {
|
||||
int writerIndex = writerIndex();
|
||||
if (writerIndex > newCapacity) {
|
||||
writerIndex(writerIndex = newCapacity);
|
||||
}
|
||||
System.arraycopy(array, readerIndex, newArray, readerIndex, writerIndex - readerIndex);
|
||||
} else {
|
||||
setIndex(newCapacity, newCapacity);
|
||||
}
|
||||
setArray(newArray);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return true;
|
||||
@ -170,27 +199,6 @@ public class HeapByteBuf extends AbstractByteBuf {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf slice(int index, int length) {
|
||||
if (index == 0) {
|
||||
if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
if (length == array.length) {
|
||||
ByteBuf slice = duplicate();
|
||||
slice.setIndex(0, length);
|
||||
return slice;
|
||||
} else {
|
||||
return new TruncatedByteBuf(this, length);
|
||||
}
|
||||
} else {
|
||||
if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
return new SlicedByteBuf(this, index, length);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNioBuffer() {
|
||||
return true;
|
||||
@ -198,12 +206,7 @@ public class HeapByteBuf extends AbstractByteBuf {
|
||||
|
||||
@Override
|
||||
public ByteBuffer nioBuffer(int index, int length) {
|
||||
return ByteBuffer.wrap(array, index, length).order(order());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
return HeapByteBufFactory.getInstance(ByteOrder.BIG_ENDIAN);
|
||||
return ByteBuffer.wrap(array, index, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -271,11 +274,6 @@ public class HeapByteBuf extends AbstractByteBuf {
|
||||
array[index + 7] = (byte) (value >>> 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf duplicate() {
|
||||
return new HeapByteBuf(array, readerIndex(), writerIndex());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf copy(int index, int length) {
|
||||
if (index < 0 || length < 0 || index + length > array.length) {
|
||||
@ -285,6 +283,43 @@ public class HeapByteBuf extends AbstractByteBuf {
|
||||
|
||||
byte[] copiedArray = new byte[length];
|
||||
System.arraycopy(array, index, copiedArray, 0, length);
|
||||
return new HeapByteBuf(copiedArray);
|
||||
return new HeapByteBuf(copiedArray, maxCapacity());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Unsafe unsafe() {
|
||||
return unsafe;
|
||||
}
|
||||
|
||||
private class HeapUnsafe implements Unsafe {
|
||||
@Override
|
||||
public ByteBuffer nioBuffer() {
|
||||
return nioBuf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf newBuffer(int initialCapacity) {
|
||||
return new HeapByteBuf(initialCapacity, Math.max(initialCapacity, maxCapacity()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void acquire() {
|
||||
if (refCnt <= 0) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
refCnt ++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (refCnt <= 0) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
refCnt --;
|
||||
if (refCnt == 0) {
|
||||
array = null;
|
||||
nioBuf = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,89 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
/**
|
||||
* A {@link ByteBufFactory} which merely allocates a heap buffer with
|
||||
* the specified capacity. {@link HeapByteBufFactory} should perform
|
||||
* very well in most situations because it relies on the JVM garbage collector,
|
||||
* which is highly optimized for heap allocation.
|
||||
*/
|
||||
public class HeapByteBufFactory extends AbstractByteBufFactory {
|
||||
|
||||
private static final HeapByteBufFactory INSTANCE_BE =
|
||||
new HeapByteBufFactory(ByteOrder.BIG_ENDIAN);
|
||||
|
||||
private static final HeapByteBufFactory INSTANCE_LE =
|
||||
new HeapByteBufFactory(ByteOrder.LITTLE_ENDIAN);
|
||||
|
||||
public static ByteBufFactory getInstance() {
|
||||
return INSTANCE_BE;
|
||||
}
|
||||
|
||||
public static ByteBufFactory getInstance(ByteOrder endianness) {
|
||||
if (endianness == ByteOrder.BIG_ENDIAN) {
|
||||
return INSTANCE_BE;
|
||||
} else if (endianness == ByteOrder.LITTLE_ENDIAN) {
|
||||
return INSTANCE_LE;
|
||||
} else if (endianness == null) {
|
||||
throw new NullPointerException("endianness");
|
||||
} else {
|
||||
throw new IllegalStateException("Should not reach here");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new factory whose default {@link ByteOrder} is
|
||||
* {@link ByteOrder#BIG_ENDIAN}.
|
||||
*/
|
||||
public HeapByteBufFactory() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new factory with the specified default {@link ByteOrder}.
|
||||
*
|
||||
* @param defaultOrder the default {@link ByteOrder} of this factory
|
||||
*/
|
||||
public HeapByteBufFactory(ByteOrder defaultOrder) {
|
||||
super(defaultOrder);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf getBuffer(ByteOrder order, int capacity) {
|
||||
return Unpooled.buffer(capacity).order(order);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf getBuffer(ByteOrder order, byte[] array, int offset, int length) {
|
||||
return Unpooled.wrappedBuffer(array, offset, length).order(order);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf getBuffer(ByteBuffer nioBuffer) {
|
||||
if (nioBuffer.hasArray()) {
|
||||
return Unpooled.wrappedBuffer(nioBuffer);
|
||||
}
|
||||
|
||||
ByteBuf buf = getBuffer(nioBuffer.order(), nioBuffer.remaining());
|
||||
int pos = nioBuffer.position();
|
||||
buf.writeBytes(nioBuffer);
|
||||
nioBuffer.position(pos);
|
||||
return buf;
|
||||
}
|
||||
}
|
@ -1,320 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.ClosedChannelException;
|
||||
import java.nio.channels.GatheringByteChannel;
|
||||
import java.nio.channels.ScatteringByteChannel;
|
||||
|
||||
/**
|
||||
* A NIO {@link ByteBuffer} based buffer. It is recommended to use {@link Unpooled#directBuffer(int)}
|
||||
* and {@link Unpooled#wrappedBuffer(ByteBuffer)} instead of calling the
|
||||
* constructor explicitly.
|
||||
*/
|
||||
public class NioBufferBackedByteBuf extends AbstractByteBuf {
|
||||
|
||||
private final ByteBuffer buffer;
|
||||
private final ByteBuffer tmpBuf;
|
||||
private final int capacity;
|
||||
|
||||
/**
|
||||
* Creates a new buffer which wraps the specified buffer's slice.
|
||||
*/
|
||||
public NioBufferBackedByteBuf(ByteBuffer buffer) {
|
||||
super(buffer.order());
|
||||
this.buffer = buffer.slice().order(order());
|
||||
tmpBuf = this.buffer.duplicate();
|
||||
capacity = buffer.remaining();
|
||||
writerIndex(capacity);
|
||||
}
|
||||
|
||||
private NioBufferBackedByteBuf(NioBufferBackedByteBuf buffer) {
|
||||
super(buffer.order());
|
||||
this.buffer = buffer.buffer;
|
||||
tmpBuf = this.buffer.duplicate();
|
||||
capacity = buffer.capacity;
|
||||
setIndex(buffer.readerIndex(), buffer.writerIndex());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
if (buffer.isDirect()) {
|
||||
return DirectByteBufFactory.getInstance(order());
|
||||
} else {
|
||||
return HeapByteBufFactory.getInstance(order());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDirect() {
|
||||
return buffer.isDirect();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int capacity() {
|
||||
return capacity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return buffer.hasArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] array() {
|
||||
return buffer.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int arrayOffset() {
|
||||
return buffer.arrayOffset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte getByte(int index) {
|
||||
return buffer.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short getShort(int index) {
|
||||
return buffer.getShort(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getUnsignedMedium(int index) {
|
||||
return (getByte(index) & 0xff) << 16 |
|
||||
(getByte(index + 1) & 0xff) << 8 |
|
||||
(getByte(index + 2) & 0xff) << 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getInt(int index) {
|
||||
return buffer.getInt(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLong(int index) {
|
||||
return buffer.getLong(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuf dst, int dstIndex, int length) {
|
||||
if (dst instanceof NioBufferBackedByteBuf) {
|
||||
NioBufferBackedByteBuf bbdst = (NioBufferBackedByteBuf) dst;
|
||||
ByteBuffer data = bbdst.tmpBuf;
|
||||
data.clear().position(dstIndex).limit(dstIndex + length);
|
||||
getBytes(index, data);
|
||||
} else if (buffer.hasArray()) {
|
||||
dst.setBytes(dstIndex, buffer.array(), index + buffer.arrayOffset(), length);
|
||||
} else {
|
||||
dst.setBytes(dstIndex, this, index, length);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, byte[] dst, int dstIndex, int length) {
|
||||
try {
|
||||
tmpBuf.clear().position(index).limit(index + length);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to read - Need "
|
||||
+ (index + length) + ", maximum is " + buffer.limit());
|
||||
}
|
||||
tmpBuf.get(dst, dstIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuffer dst) {
|
||||
int bytesToCopy = Math.min(capacity() - index, dst.remaining());
|
||||
try {
|
||||
tmpBuf.clear().position(index).limit(index + bytesToCopy);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to read - Need "
|
||||
+ (index + bytesToCopy) + ", maximum is " + buffer.limit());
|
||||
}
|
||||
dst.put(tmpBuf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setByte(int index, int value) {
|
||||
buffer.put(index, (byte) value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setShort(int index, int value) {
|
||||
buffer.putShort(index, (short) value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMedium(int index, int value) {
|
||||
setByte(index, (byte) (value >>> 16));
|
||||
setByte(index + 1, (byte) (value >>> 8));
|
||||
setByte(index + 2, (byte) (value >>> 0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setInt(int index, int value) {
|
||||
buffer.putInt(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLong(int index, long value) {
|
||||
buffer.putLong(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuf src, int srcIndex, int length) {
|
||||
if (src instanceof NioBufferBackedByteBuf) {
|
||||
NioBufferBackedByteBuf bbsrc = (NioBufferBackedByteBuf) src;
|
||||
ByteBuffer data = bbsrc.tmpBuf;
|
||||
|
||||
data.clear().position(srcIndex).limit(srcIndex + length);
|
||||
setBytes(index, data);
|
||||
} else if (buffer.hasArray()) {
|
||||
src.getBytes(srcIndex, buffer.array(), index + buffer.arrayOffset(), length);
|
||||
} else {
|
||||
src.getBytes(srcIndex, this, index, length);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, byte[] src, int srcIndex, int length) {
|
||||
tmpBuf.clear().position(index).limit(index + length);
|
||||
tmpBuf.put(src, srcIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuffer src) {
|
||||
if (src == tmpBuf) {
|
||||
src = src.duplicate();
|
||||
}
|
||||
|
||||
tmpBuf.clear().position(index).limit(index + src.remaining());
|
||||
tmpBuf.put(src);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, OutputStream out, int length) throws IOException {
|
||||
if (length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (buffer.hasArray()) {
|
||||
out.write(
|
||||
buffer.array(),
|
||||
index + buffer.arrayOffset(),
|
||||
length);
|
||||
} else {
|
||||
byte[] tmp = new byte[length];
|
||||
tmpBuf.clear().position(index);
|
||||
tmpBuf.get(tmp);
|
||||
out.write(tmp);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBytes(int index, GatheringByteChannel out, int length) throws IOException {
|
||||
if (length == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
tmpBuf.clear().position(index).limit(index + length);
|
||||
return out.write(tmpBuf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, InputStream in, int length)
|
||||
throws IOException {
|
||||
|
||||
if (buffer.hasArray()) {
|
||||
return in.read(buffer.array(), buffer.arrayOffset() + index, length);
|
||||
} else {
|
||||
byte[] tmp = new byte[length];
|
||||
int readBytes = in.read(tmp);
|
||||
tmpBuf.clear().position(index);
|
||||
tmpBuf.put(tmp);
|
||||
return readBytes;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, ScatteringByteChannel in, int length)
|
||||
throws IOException {
|
||||
|
||||
tmpBuf.clear().position(index).limit(index + length);
|
||||
try {
|
||||
return in.read(tmpBuf);
|
||||
} catch (ClosedChannelException e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNioBuffer() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer nioBuffer(int index, int length) {
|
||||
if (index == 0 && length == capacity()) {
|
||||
return buffer.duplicate().order(order());
|
||||
} else {
|
||||
return ((ByteBuffer) tmpBuf.clear().position(
|
||||
index).limit(index + length)).slice().order(order());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf slice(int index, int length) {
|
||||
if (index == 0 && length == capacity()) {
|
||||
ByteBuf slice = duplicate();
|
||||
slice.setIndex(0, length);
|
||||
return slice;
|
||||
} else {
|
||||
if (index >= 0 && length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
return new NioBufferBackedByteBuf(
|
||||
((ByteBuffer) tmpBuf.clear().position(
|
||||
index).limit(index + length)).order(order()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf duplicate() {
|
||||
return new NioBufferBackedByteBuf(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf copy(int index, int length) {
|
||||
ByteBuffer src;
|
||||
try {
|
||||
src = (ByteBuffer) tmpBuf.clear().position(index).limit(index + length);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IndexOutOfBoundsException("Too many bytes to read - Need "
|
||||
+ (index + length));
|
||||
}
|
||||
|
||||
ByteBuffer dst = src.isDirect() ? ByteBuffer.allocateDirect(length) : ByteBuffer.allocate(length);
|
||||
dst.put(src);
|
||||
dst.order(order());
|
||||
dst.clear();
|
||||
return new NioBufferBackedByteBuf(dst);
|
||||
}
|
||||
}
|
@ -33,13 +33,13 @@ public class ReadOnlyByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
private final ByteBuf buffer;
|
||||
|
||||
public ReadOnlyByteBuf(ByteBuf buffer) {
|
||||
super(buffer.order());
|
||||
super(buffer.order(), buffer.maxCapacity());
|
||||
this.buffer = buffer;
|
||||
setIndex(buffer.readerIndex(), buffer.writerIndex());
|
||||
}
|
||||
|
||||
private ReadOnlyByteBuf(ReadOnlyByteBuf buffer) {
|
||||
super(buffer.buffer.order());
|
||||
super(buffer.buffer.order(), buffer.maxCapacity());
|
||||
this.buffer = buffer.buffer;
|
||||
setIndex(buffer.readerIndex(), buffer.writerIndex());
|
||||
}
|
||||
@ -49,11 +49,6 @@ public class ReadOnlyByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
return buffer.factory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDirect() {
|
||||
return buffer.isDirect();
|
||||
@ -212,4 +207,14 @@ public class ReadOnlyByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
public int capacity() {
|
||||
return buffer.capacity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void capacity(int newCapacity) {
|
||||
throw new ReadOnlyBufferException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Unsafe unsafe() {
|
||||
return buffer.unsafe();
|
||||
}
|
||||
}
|
||||
|
@ -31,12 +31,13 @@ import java.nio.channels.ScatteringByteChannel;
|
||||
*/
|
||||
public class SlicedByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
|
||||
private final Unsafe unsafe = new SlicedUnsafe();
|
||||
private final ByteBuf buffer;
|
||||
private final int adjustment;
|
||||
private final int length;
|
||||
|
||||
public SlicedByteBuf(ByteBuf buffer, int index, int length) {
|
||||
super(buffer.order());
|
||||
super(buffer.order(), length);
|
||||
if (index < 0 || index > buffer.capacity()) {
|
||||
throw new IndexOutOfBoundsException("Invalid index of " + index
|
||||
+ ", maximum is " + buffer.capacity());
|
||||
@ -47,10 +48,21 @@ public class SlicedByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
+ (index + length) + ", maximum is " + buffer.capacity());
|
||||
}
|
||||
|
||||
this.buffer = buffer;
|
||||
adjustment = index;
|
||||
if (buffer instanceof SlicedByteBuf) {
|
||||
this.buffer = ((SlicedByteBuf) buffer).buffer;
|
||||
adjustment = ((SlicedByteBuf) buffer).adjustment + index;
|
||||
} else if (buffer instanceof DuplicatedByteBuf) {
|
||||
this.buffer = ((DuplicatedByteBuf) buffer).buffer;
|
||||
adjustment = index;
|
||||
} else {
|
||||
this.buffer = buffer;
|
||||
adjustment = index;
|
||||
}
|
||||
this.length = length;
|
||||
|
||||
writerIndex(length);
|
||||
|
||||
buffer.unsafe().acquire();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -58,11 +70,6 @@ public class SlicedByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
return buffer.factory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDirect() {
|
||||
return buffer.isDirect();
|
||||
@ -73,6 +80,11 @@ public class SlicedByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
return length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void capacity(int newCapacity) {
|
||||
throw new UnsupportedOperationException("sliced buffer");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return buffer.hasArray();
|
||||
@ -265,4 +277,32 @@ public class SlicedByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
+ (startIndex + length) + ", maximum is " + capacity());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Unsafe unsafe() {
|
||||
return unsafe;
|
||||
}
|
||||
|
||||
private final class SlicedUnsafe implements Unsafe {
|
||||
|
||||
@Override
|
||||
public ByteBuffer nioBuffer() {
|
||||
return buffer.nioBuffer(adjustment, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf newBuffer(int initialCapacity) {
|
||||
return buffer.unsafe().newBuffer(initialCapacity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void acquire() {
|
||||
buffer.unsafe().acquire();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
buffer.unsafe().release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -73,13 +73,18 @@ public class SwappedByteBuf implements WrappedByteBuf {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
return buf.factory();
|
||||
public int capacity() {
|
||||
return buf.capacity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int capacity() {
|
||||
return buf.capacity();
|
||||
public void capacity(int newCapacity) {
|
||||
buf.capacity(newCapacity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int maxCapacity() {
|
||||
return buf.maxCapacity();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -677,6 +682,11 @@ public class SwappedByteBuf implements WrappedByteBuf {
|
||||
return buf.toString(index, length, charset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Unsafe unsafe() {
|
||||
return buf.unsafe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return buf.hashCode();
|
||||
|
@ -1,258 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.GatheringByteChannel;
|
||||
import java.nio.channels.ScatteringByteChannel;
|
||||
|
||||
|
||||
/**
|
||||
* A derived buffer which hides its parent's tail data beyond a certain index.
|
||||
* It is recommended to use {@link ByteBuf#slice()} and
|
||||
* {@link ByteBuf#slice(int, int)} instead of calling the constructor
|
||||
* explicitly.
|
||||
*/
|
||||
public class TruncatedByteBuf extends AbstractByteBuf implements WrappedByteBuf {
|
||||
|
||||
private final ByteBuf buffer;
|
||||
private final int length;
|
||||
|
||||
public TruncatedByteBuf(ByteBuf buffer, int length) {
|
||||
super(buffer.order());
|
||||
if (length > buffer.capacity()) {
|
||||
throw new IndexOutOfBoundsException("Length is too large, got "
|
||||
+ length + " but can't go higher than " + buffer.capacity());
|
||||
}
|
||||
|
||||
this.buffer = buffer;
|
||||
this.length = length;
|
||||
writerIndex(length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf unwrap() {
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
return buffer.factory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDirect() {
|
||||
return buffer.isDirect();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int capacity() {
|
||||
return length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return buffer.hasArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] array() {
|
||||
return buffer.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int arrayOffset() {
|
||||
return buffer.arrayOffset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte getByte(int index) {
|
||||
checkIndex(index);
|
||||
return buffer.getByte(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short getShort(int index) {
|
||||
checkIndex(index, 2);
|
||||
return buffer.getShort(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getUnsignedMedium(int index) {
|
||||
checkIndex(index, 3);
|
||||
return buffer.getUnsignedMedium(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getInt(int index) {
|
||||
checkIndex(index, 4);
|
||||
return buffer.getInt(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLong(int index) {
|
||||
checkIndex(index, 8);
|
||||
return buffer.getLong(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf duplicate() {
|
||||
ByteBuf duplicate = new TruncatedByteBuf(buffer, length);
|
||||
duplicate.setIndex(readerIndex(), writerIndex());
|
||||
return duplicate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf copy(int index, int length) {
|
||||
checkIndex(index, length);
|
||||
return buffer.copy(index, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf slice(int index, int length) {
|
||||
checkIndex(index, length);
|
||||
if (length == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
return buffer.slice(index, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuf dst, int dstIndex, int length) {
|
||||
checkIndex(index, length);
|
||||
buffer.getBytes(index, dst, dstIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, byte[] dst, int dstIndex, int length) {
|
||||
checkIndex(index, length);
|
||||
buffer.getBytes(index, dst, dstIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, ByteBuffer dst) {
|
||||
checkIndex(index, dst.remaining());
|
||||
buffer.getBytes(index, dst);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setByte(int index, int value) {
|
||||
checkIndex(index);
|
||||
buffer.setByte(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setShort(int index, int value) {
|
||||
checkIndex(index, 2);
|
||||
buffer.setShort(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMedium(int index, int value) {
|
||||
checkIndex(index, 3);
|
||||
buffer.setMedium(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setInt(int index, int value) {
|
||||
checkIndex(index, 4);
|
||||
buffer.setInt(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLong(int index, long value) {
|
||||
checkIndex(index, 8);
|
||||
buffer.setLong(index, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, byte[] src, int srcIndex, int length) {
|
||||
checkIndex(index, length);
|
||||
buffer.setBytes(index, src, srcIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuf src, int srcIndex, int length) {
|
||||
checkIndex(index, length);
|
||||
buffer.setBytes(index, src, srcIndex, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int index, ByteBuffer src) {
|
||||
checkIndex(index, src.remaining());
|
||||
buffer.setBytes(index, src);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBytes(int index, OutputStream out, int length)
|
||||
throws IOException {
|
||||
checkIndex(index, length);
|
||||
buffer.getBytes(index, out, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBytes(int index, GatheringByteChannel out, int length)
|
||||
throws IOException {
|
||||
checkIndex(index, length);
|
||||
return buffer.getBytes(index, out, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, InputStream in, int length)
|
||||
throws IOException {
|
||||
checkIndex(index, length);
|
||||
return buffer.setBytes(index, in, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setBytes(int index, ScatteringByteChannel in, int length)
|
||||
throws IOException {
|
||||
checkIndex(index, length);
|
||||
return buffer.setBytes(index, in, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNioBuffer() {
|
||||
return buffer.hasNioBuffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer nioBuffer(int index, int length) {
|
||||
checkIndex(index, length);
|
||||
return buffer.nioBuffer(index, length);
|
||||
}
|
||||
|
||||
private void checkIndex(int index) {
|
||||
if (index < 0 || index >= capacity()) {
|
||||
throw new IndexOutOfBoundsException("Invalid index of " + index
|
||||
+ ", maximum is " + capacity());
|
||||
}
|
||||
}
|
||||
|
||||
private void checkIndex(int index, int length) {
|
||||
if (length < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"length is negative: " + length);
|
||||
}
|
||||
if (index + length > capacity()) {
|
||||
throw new IndexOutOfBoundsException("Invalid index of "
|
||||
+ (index + length) + ", maximum is " + capacity());
|
||||
}
|
||||
}
|
||||
}
|
@ -36,7 +36,6 @@ import java.util.Queue;
|
||||
*
|
||||
* {@link ByteBuf} heapBuffer = buffer(128);
|
||||
* {@link ByteBuf} directBuffer = directBuffer(256);
|
||||
* {@link ByteBuf} dynamicBuffer = dynamicBuffer(512);
|
||||
* {@link ByteBuf} wrappedBuffer = wrappedBuffer(new byte[128], new byte[256]);
|
||||
* {@link ByteBuf} copiedBuffe r = copiedBuffer({@link ByteBuffer}.allocate(128));
|
||||
* </pre>
|
||||
@ -48,9 +47,6 @@ import java.util.Queue;
|
||||
* <ul>
|
||||
* <li>{@link #buffer(int)} allocates a new fixed-capacity heap buffer.</li>
|
||||
* <li>{@link #directBuffer(int)} allocates a new fixed-capacity direct buffer.</li>
|
||||
* <li>{@link #dynamicBuffer(int)} allocates a new dynamic-capacity heap
|
||||
* buffer, whose capacity increases automatically as needed by a write
|
||||
* operation.</li>
|
||||
* </ul>
|
||||
*
|
||||
* <h3>Creating a wrapped buffer</h3>
|
||||
@ -94,7 +90,7 @@ public final class Unpooled {
|
||||
/**
|
||||
* A buffer whose capacity is {@code 0}.
|
||||
*/
|
||||
public static final ByteBuf EMPTY_BUFFER = new HeapByteBuf(0) {
|
||||
public static final ByteBuf EMPTY_BUFFER = new HeapByteBuf(0, 0) {
|
||||
@Override
|
||||
public ByteBuf order(ByteOrder endianness) {
|
||||
if (endianness == null) {
|
||||
@ -119,16 +115,50 @@ public final class Unpooled {
|
||||
return new QueueBackedMessageBuf<T>(queue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian Java heap buffer with reasonably small initial capacity, which
|
||||
* expands its capacity boundlessly on demand.
|
||||
*/
|
||||
public static ByteBuf buffer() {
|
||||
return buffer(256, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian direct buffer with resaonably small initial capacity, which
|
||||
* expands its capacity boundlessly on demand.
|
||||
*/
|
||||
public static ByteBuf directBuffer() {
|
||||
return directBuffer(256, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian Java heap buffer with the specified {@code capacity}, which
|
||||
* expands its capacity boundlessly on demand. The new buffer's {@code readerIndex} and
|
||||
* {@code writerIndex} are {@code 0}.
|
||||
*/
|
||||
public static ByteBuf buffer(int initialCapacity) {
|
||||
return buffer(initialCapacity, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian direct buffer with the specified {@code capacity}, which
|
||||
* expands its capacity boundlessly on demand. The new buffer's {@code readerIndex} and
|
||||
* {@code writerIndex} are {@code 0}.
|
||||
*/
|
||||
public static ByteBuf directBuffer(int initialCapacity) {
|
||||
return directBuffer(initialCapacity, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian Java heap buffer with the specified
|
||||
* {@code capacity}. The new buffer's {@code readerIndex} and
|
||||
* {@code writerIndex} are {@code 0}.
|
||||
*/
|
||||
public static ByteBuf buffer(int capacity) {
|
||||
if (capacity == 0) {
|
||||
public static ByteBuf buffer(int initialCapacity, int maxCapacity) {
|
||||
if (initialCapacity == 0 && maxCapacity == 0) {
|
||||
return EMPTY_BUFFER;
|
||||
}
|
||||
return new HeapByteBuf(capacity);
|
||||
return new HeapByteBuf(initialCapacity, maxCapacity);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -136,60 +166,11 @@ public final class Unpooled {
|
||||
* {@code capacity}. The new buffer's {@code readerIndex} and
|
||||
* {@code writerIndex} are {@code 0}.
|
||||
*/
|
||||
public static ByteBuf directBuffer(int capacity) {
|
||||
if (capacity == 0) {
|
||||
public static ByteBuf directBuffer(int initialCapacity, int maxCapacity) {
|
||||
if (initialCapacity == 0 && maxCapacity == 0) {
|
||||
return EMPTY_BUFFER;
|
||||
}
|
||||
|
||||
ByteBuf buffer = new NioBufferBackedByteBuf(ByteBuffer.allocateDirect(capacity));
|
||||
buffer.clear();
|
||||
return buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian dynamic buffer whose estimated data length is
|
||||
* {@code 256} bytes. The new buffer's {@code readerIndex} and
|
||||
* {@code writerIndex} are {@code 0}.
|
||||
*/
|
||||
public static ByteBuf dynamicBuffer() {
|
||||
return dynamicBuffer(256);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian dynamic buffer whose estimated data length is
|
||||
* {@code 256} bytes. The new buffer's {@code readerIndex} and
|
||||
* {@code writerIndex} are {@code 0}.
|
||||
*/
|
||||
public static ByteBuf dynamicBuffer(ByteBufFactory factory) {
|
||||
if (factory == null) {
|
||||
throw new NullPointerException("factory");
|
||||
}
|
||||
|
||||
return new DynamicByteBuf(256, factory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian dynamic buffer with the specified estimated
|
||||
* data length. More accurate estimation yields less unexpected
|
||||
* reallocation overhead. The new buffer's {@code readerIndex} and
|
||||
* {@code writerIndex} are {@code 0}.
|
||||
*/
|
||||
public static ByteBuf dynamicBuffer(int estimatedLength) {
|
||||
return new DynamicByteBuf(estimatedLength);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian dynamic buffer with the specified estimated
|
||||
* data length using the specified factory. More accurate estimation yields
|
||||
* less unexpected reallocation overhead. The new buffer's {@code readerIndex}
|
||||
* and {@code writerIndex} are {@code 0}.
|
||||
*/
|
||||
public static ByteBuf dynamicBuffer(int estimatedLength, ByteBufFactory factory) {
|
||||
if (factory == null) {
|
||||
throw new NullPointerException("factory");
|
||||
}
|
||||
|
||||
return new DynamicByteBuf(estimatedLength, factory);
|
||||
return new DirectByteBuf(initialCapacity, maxCapacity);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -201,7 +182,7 @@ public final class Unpooled {
|
||||
if (array.length == 0) {
|
||||
return EMPTY_BUFFER;
|
||||
}
|
||||
return new HeapByteBuf(array);
|
||||
return new HeapByteBuf(array, array.length);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -210,23 +191,15 @@ public final class Unpooled {
|
||||
* content will be visible to the returned buffer.
|
||||
*/
|
||||
public static ByteBuf wrappedBuffer(byte[] array, int offset, int length) {
|
||||
if (offset == 0) {
|
||||
if (length == array.length) {
|
||||
return wrappedBuffer(array);
|
||||
} else {
|
||||
if (length == 0) {
|
||||
return EMPTY_BUFFER;
|
||||
} else {
|
||||
return new TruncatedByteBuf(wrappedBuffer(array), length);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (length == 0) {
|
||||
return EMPTY_BUFFER;
|
||||
} else {
|
||||
return new SlicedByteBuf(wrappedBuffer(array), offset, length);
|
||||
}
|
||||
if (length == 0) {
|
||||
return EMPTY_BUFFER;
|
||||
}
|
||||
|
||||
if (offset == 0 && length == array.length) {
|
||||
return wrappedBuffer(array);
|
||||
}
|
||||
|
||||
return new SlicedByteBuf(wrappedBuffer(array), offset, length);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -244,7 +217,7 @@ public final class Unpooled {
|
||||
buffer.arrayOffset() + buffer.position(),
|
||||
buffer.remaining()).order(buffer.order());
|
||||
} else {
|
||||
return new NioBufferBackedByteBuf(buffer);
|
||||
return new DirectByteBuf(buffer, buffer.remaining());
|
||||
}
|
||||
}
|
||||
|
||||
@ -267,6 +240,33 @@ public final class Unpooled {
|
||||
* content will be visible to the returned buffer.
|
||||
*/
|
||||
public static ByteBuf wrappedBuffer(byte[]... arrays) {
|
||||
return wrappedBuffer(16, arrays);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian composite buffer which wraps the readable bytes of the
|
||||
* specified buffers without copying them. A modification on the content
|
||||
* of the specified buffers will be visible to the returned buffer.
|
||||
*/
|
||||
public static ByteBuf wrappedBuffer(ByteBuf... buffers) {
|
||||
return wrappedBuffer(16, buffers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian composite buffer which wraps the slices of the specified
|
||||
* NIO buffers without copying them. A modification on the content of the
|
||||
* specified buffers will be visible to the returned buffer.
|
||||
*/
|
||||
public static ByteBuf wrappedBuffer(ByteBuffer... buffers) {
|
||||
return wrappedBuffer(16, buffers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian composite buffer which wraps the specified
|
||||
* arrays without copying them. A modification on the specified arrays'
|
||||
* content will be visible to the returned buffer.
|
||||
*/
|
||||
public static ByteBuf wrappedBuffer(int maxNumComponents, byte[]... arrays) {
|
||||
switch (arrays.length) {
|
||||
case 0:
|
||||
break;
|
||||
@ -286,119 +286,87 @@ public final class Unpooled {
|
||||
components.add(wrappedBuffer(a));
|
||||
}
|
||||
}
|
||||
return compositeBuffer(BIG_ENDIAN, components);
|
||||
|
||||
if (!components.isEmpty()) {
|
||||
return new DefaultCompositeByteBuf(maxNumComponents, components);
|
||||
}
|
||||
}
|
||||
|
||||
return EMPTY_BUFFER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new composite buffer which wraps the specified
|
||||
* components without copying them. A modification on the specified components'
|
||||
* content will be visible to the returned buffer.
|
||||
*/
|
||||
private static ByteBuf compositeBuffer(ByteOrder endianness, List<ByteBuf> components) {
|
||||
switch (components.size()) {
|
||||
case 0:
|
||||
return EMPTY_BUFFER;
|
||||
case 1:
|
||||
return components.get(0);
|
||||
default:
|
||||
return new CompositeByteBuf(endianness, components);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new composite buffer which wraps the readable bytes of the
|
||||
* Creates a new big-endian composite buffer which wraps the readable bytes of the
|
||||
* specified buffers without copying them. A modification on the content
|
||||
* of the specified buffers will be visible to the returned buffer.
|
||||
*
|
||||
* @throws IllegalArgumentException
|
||||
* if the specified buffers' endianness are different from each
|
||||
* other
|
||||
*/
|
||||
public static ByteBuf wrappedBuffer(ByteBuf... buffers) {
|
||||
public static ByteBuf wrappedBuffer(int maxNumComponents, ByteBuf... buffers) {
|
||||
switch (buffers.length) {
|
||||
case 0:
|
||||
break;
|
||||
case 1:
|
||||
if (buffers[0].readable()) {
|
||||
return wrappedBuffer(buffers[0]);
|
||||
return wrappedBuffer(buffers[0].order(BIG_ENDIAN));
|
||||
}
|
||||
break;
|
||||
default:
|
||||
ByteOrder order = null;
|
||||
final List<ByteBuf> components = new ArrayList<ByteBuf>(buffers.length);
|
||||
for (ByteBuf c: buffers) {
|
||||
if (c == null) {
|
||||
break;
|
||||
}
|
||||
if (c.readable()) {
|
||||
if (order != null) {
|
||||
if (!order.equals(c.order())) {
|
||||
throw new IllegalArgumentException("inconsistent byte order");
|
||||
}
|
||||
} else {
|
||||
order = c.order();
|
||||
}
|
||||
if (c instanceof CompositeByteBuf) {
|
||||
// Expand nested composition.
|
||||
components.addAll(
|
||||
((CompositeByteBuf) c).decompose(
|
||||
c.readerIndex(), c.readableBytes()));
|
||||
} else {
|
||||
// An ordinary buffer (non-composite)
|
||||
components.add(c.slice());
|
||||
}
|
||||
for (ByteBuf b: buffers) {
|
||||
if (b.readable()) {
|
||||
return new DefaultCompositeByteBuf(maxNumComponents, buffers);
|
||||
}
|
||||
}
|
||||
return compositeBuffer(order, components);
|
||||
}
|
||||
return EMPTY_BUFFER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new composite buffer which wraps the slices of the specified
|
||||
* Creates a new big-endian composite buffer which wraps the slices of the specified
|
||||
* NIO buffers without copying them. A modification on the content of the
|
||||
* specified buffers will be visible to the returned buffer.
|
||||
*
|
||||
* @throws IllegalArgumentException
|
||||
* if the specified buffers' endianness are different from each
|
||||
* other
|
||||
*/
|
||||
public static ByteBuf wrappedBuffer(ByteBuffer... buffers) {
|
||||
public static ByteBuf wrappedBuffer(int maxNumComponents, ByteBuffer... buffers) {
|
||||
switch (buffers.length) {
|
||||
case 0:
|
||||
break;
|
||||
case 1:
|
||||
if (buffers[0].hasRemaining()) {
|
||||
return wrappedBuffer(buffers[0]);
|
||||
return wrappedBuffer(buffers[0].order(BIG_ENDIAN));
|
||||
}
|
||||
break;
|
||||
default:
|
||||
ByteOrder order = null;
|
||||
// Get the list of the component, while guessing the byte order.
|
||||
final List<ByteBuf> components = new ArrayList<ByteBuf>(buffers.length);
|
||||
for (ByteBuffer b: buffers) {
|
||||
if (b == null) {
|
||||
break;
|
||||
}
|
||||
if (b.hasRemaining()) {
|
||||
if (order != null) {
|
||||
if (!order.equals(b.order())) {
|
||||
throw new IllegalArgumentException("inconsistent byte order");
|
||||
}
|
||||
} else {
|
||||
order = b.order();
|
||||
}
|
||||
components.add(wrappedBuffer(b));
|
||||
if (b.remaining() > 0) {
|
||||
components.add(wrappedBuffer(b.order(BIG_ENDIAN)));
|
||||
}
|
||||
}
|
||||
return compositeBuffer(order, components);
|
||||
|
||||
if (!components.isEmpty()) {
|
||||
return new DefaultCompositeByteBuf(maxNumComponents, components);
|
||||
}
|
||||
}
|
||||
|
||||
return EMPTY_BUFFER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new big-endian composite buffer with no components.
|
||||
*/
|
||||
public static CompositeByteBuf compositeBuffer() {
|
||||
return compositeBuffer(16);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new big-endian composite buffer with no components.
|
||||
*/
|
||||
public static CompositeByteBuf compositeBuffer(int maxNumComponents) {
|
||||
return new DefaultCompositeByteBuf(maxNumComponents);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new big-endian buffer whose content is a copy of the
|
||||
* specified {@code array}. The new buffer's {@code readerIndex} and
|
||||
@ -408,7 +376,7 @@ public final class Unpooled {
|
||||
if (array.length == 0) {
|
||||
return EMPTY_BUFFER;
|
||||
}
|
||||
return new HeapByteBuf(array.clone());
|
||||
return wrappedBuffer(array.clone());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -51,7 +51,6 @@ public abstract class AbstractChannelBufferTest {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@Before
|
||||
public void init() {
|
||||
buffer = newBuffer(CAPACITY);
|
||||
|
@ -16,7 +16,6 @@
|
||||
package io.netty.buffer;
|
||||
|
||||
import static io.netty.buffer.Unpooled.*;
|
||||
import java.io.IOException;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
@ -47,31 +46,34 @@ public abstract class AbstractCompositeChannelBufferTest extends
|
||||
@Override
|
||||
protected ByteBuf newBuffer(int length) {
|
||||
buffers = new ArrayList<ByteBuf>();
|
||||
for (int i = 0; i < length; i += 10) {
|
||||
for (int i = 0; i < length + 45; i += 45) {
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[1]).order(order));
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[1]));
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[2]).order(order));
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[2]));
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[3]).order(order));
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[3]));
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[4]).order(order));
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[4]));
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[5]).order(order));
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[5]));
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[6]).order(order));
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[6]));
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[7]).order(order));
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[7]));
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[8]).order(order));
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[8]));
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[9]).order(order));
|
||||
buffers.add(Unpooled.wrappedBuffer(new byte[9]));
|
||||
buffers.add(Unpooled.EMPTY_BUFFER);
|
||||
}
|
||||
|
||||
buffer = Unpooled.wrappedBuffer(buffers.toArray(new ByteBuf[buffers.size()]));
|
||||
buffer.writerIndex(length);
|
||||
buffer = Unpooled.wrappedBuffer(buffer);
|
||||
buffer = Unpooled.wrappedBuffer(
|
||||
Integer.MAX_VALUE, buffers.toArray(new ByteBuf[buffers.size()])).order(order);
|
||||
|
||||
// Truncate to the requested capacity.
|
||||
buffer.capacity(length);
|
||||
|
||||
assertEquals(length, buffer.capacity());
|
||||
assertEquals(length, buffer.readableBytes());
|
||||
assertFalse(buffer.writable());
|
||||
@ -90,23 +92,23 @@ public abstract class AbstractCompositeChannelBufferTest extends
|
||||
protected boolean discardReadBytesDoesNotMoveWritableBytes() {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Tests the "getBufferFor" method
|
||||
*/
|
||||
@Test
|
||||
public void testGetBufferFor() throws IOException {
|
||||
public void testComponentAtOffset() {
|
||||
CompositeByteBuf buf = (CompositeByteBuf) Unpooled.wrappedBuffer(new byte[] { 1, 2, 3, 4, 5 }, new byte[] {4, 5, 6, 7, 8, 9, 26});
|
||||
|
||||
|
||||
//Ensure that a random place will be fine
|
||||
assertEquals(buf.getBuffer(2).capacity(), 5);
|
||||
|
||||
assertEquals(buf.componentAtOffset(2).capacity(), 5);
|
||||
|
||||
//Loop through each byte
|
||||
|
||||
|
||||
byte index = 0;
|
||||
|
||||
|
||||
while (index < buf.capacity()) {
|
||||
ByteBuf _buf = buf.getBuffer(index++);
|
||||
ByteBuf _buf = buf.componentAtOffset(index++);
|
||||
assertNotNull(_buf);
|
||||
assertTrue(_buf.capacity() > 0);
|
||||
assertNotNull(_buf.getByte(0));
|
||||
@ -155,8 +157,8 @@ public abstract class AbstractCompositeChannelBufferTest extends
|
||||
|
||||
@Test
|
||||
public void testCompositeWrappedBuffer() {
|
||||
ByteBuf header = dynamicBuffer(12).order(order);
|
||||
ByteBuf payload = dynamicBuffer(512).order(order);
|
||||
ByteBuf header = buffer(12).order(order);
|
||||
ByteBuf payload = buffer(512).order(order);
|
||||
|
||||
header.writeBytes(new byte[12]);
|
||||
payload.writeBytes(new byte[512]);
|
||||
|
@ -40,6 +40,6 @@ public class BigEndianHeapChannelBufferTest extends AbstractChannelBufferTest {
|
||||
|
||||
@Test(expected = NullPointerException.class)
|
||||
public void shouldNotAllowNullInConstructor() {
|
||||
new HeapByteBuf(null);
|
||||
new HeapByteBuf(null, 0);
|
||||
}
|
||||
}
|
||||
|
@ -1,44 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Tests ByteBuffer backed heap channel buffers
|
||||
*/
|
||||
public class ByteBufferBackedHeapChannelBufferTest extends AbstractChannelBufferTest {
|
||||
|
||||
private ByteBuf buffer;
|
||||
|
||||
@Override
|
||||
protected ByteBuf newBuffer(int length) {
|
||||
buffer = new NioBufferBackedByteBuf(ByteBuffer.allocate(length));
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ByteBuf[] components() {
|
||||
return new ByteBuf[] { buffer };
|
||||
}
|
||||
|
||||
@Test(expected = NullPointerException.class)
|
||||
public void shouldNotAllowNullInConstructor() {
|
||||
new NioBufferBackedByteBuf(null);
|
||||
}
|
||||
}
|
@ -29,7 +29,7 @@ public class ChannelBufferStreamTest {
|
||||
|
||||
@Test
|
||||
public void testAll() throws Exception {
|
||||
ByteBuf buf = Unpooled.dynamicBuffer();
|
||||
ByteBuf buf = Unpooled.buffer(0, 65536);
|
||||
|
||||
try {
|
||||
new ByteBufOutputStream(null);
|
||||
|
@ -37,8 +37,8 @@ public class ChannelBuffersTest {
|
||||
|
||||
@Test
|
||||
public void testCompositeWrappedBuffer() {
|
||||
ByteBuf header = dynamicBuffer(12);
|
||||
ByteBuf payload = dynamicBuffer(512);
|
||||
ByteBuf header = buffer(12);
|
||||
ByteBuf payload = buffer(512);
|
||||
|
||||
header.writeBytes(new byte[12]);
|
||||
payload.writeBytes(new byte[512]);
|
||||
@ -156,10 +156,6 @@ public class ChannelBuffersTest {
|
||||
|
||||
@Test
|
||||
public void shouldReturnEmptyBufferWhenLengthIsZero() {
|
||||
assertSame(EMPTY_BUFFER, buffer(0));
|
||||
assertSame(EMPTY_BUFFER, buffer(0).order(LITTLE_ENDIAN));
|
||||
assertSame(EMPTY_BUFFER, directBuffer(0));
|
||||
|
||||
assertSame(EMPTY_BUFFER, wrappedBuffer(new byte[0]));
|
||||
assertSame(EMPTY_BUFFER, wrappedBuffer(new byte[0]).order(LITTLE_ENDIAN));
|
||||
assertSame(EMPTY_BUFFER, wrappedBuffer(new byte[8], 0, 0));
|
||||
|
@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Tests dynamic channel buffers
|
||||
*/
|
||||
public class DynamicChannelBufferTest extends AbstractChannelBufferTest {
|
||||
|
||||
private ByteBuf buffer;
|
||||
|
||||
@Override
|
||||
protected ByteBuf newBuffer(int length) {
|
||||
buffer = Unpooled.dynamicBuffer(length);
|
||||
|
||||
assertEquals(0, buffer.readerIndex());
|
||||
assertEquals(0, buffer.writerIndex());
|
||||
assertEquals(length, buffer.capacity());
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ByteBuf[] components() {
|
||||
return new ByteBuf[] { buffer };
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldNotFailOnInitialIndexUpdate() {
|
||||
new DynamicByteBuf(10).setIndex(0, 10);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldNotFailOnInitialIndexUpdate2() {
|
||||
new DynamicByteBuf(10).writerIndex(10);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldNotFailOnInitialIndexUpdate3() {
|
||||
ByteBuf buf = new DynamicByteBuf(10);
|
||||
buf.writerIndex(10);
|
||||
buf.readerIndex(10);
|
||||
}
|
||||
}
|
@ -76,6 +76,7 @@ public class ReadOnlyChannelBufferTest {
|
||||
public void shouldForwardReadCallsBlindly() throws Exception {
|
||||
ByteBuf buf = createStrictMock(ByteBuf.class);
|
||||
expect(buf.order()).andReturn(BIG_ENDIAN).anyTimes();
|
||||
expect(buf.maxCapacity()).andReturn(65536).anyTimes();
|
||||
expect(buf.readerIndex()).andReturn(0).anyTimes();
|
||||
expect(buf.writerIndex()).andReturn(0).anyTimes();
|
||||
expect(buf.capacity()).andReturn(0).anyTimes();
|
||||
|
@ -1,46 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012 The Netty Project
|
||||
*
|
||||
* The Netty Project licenses this file to you under the Apache License,
|
||||
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package io.netty.buffer;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Tests truncated channel buffers
|
||||
*/
|
||||
public class TruncatedChannelBufferTest extends AbstractChannelBufferTest {
|
||||
|
||||
private ByteBuf buffer;
|
||||
|
||||
@Override
|
||||
protected ByteBuf newBuffer(int length) {
|
||||
buffer = Unpooled.wrappedBuffer(
|
||||
new byte[length * 2], 0, length);
|
||||
assertEquals(length, buffer.writerIndex());
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ByteBuf[] components() {
|
||||
return new ByteBuf[] { buffer };
|
||||
}
|
||||
|
||||
@Test(expected = NullPointerException.class)
|
||||
public void shouldNotAllowNullInConstructor() {
|
||||
new TruncatedByteBuf(null, 0);
|
||||
}
|
||||
}
|
@ -137,7 +137,7 @@ public class HttpChunkAggregator extends MessageToMessageDecoder<Object, HttpMes
|
||||
m.removeHeader(HttpHeaders.Names.TRANSFER_ENCODING);
|
||||
}
|
||||
m.setChunked(false);
|
||||
m.setContent(Unpooled.dynamicBuffer());
|
||||
m.setContent(Unpooled.compositeBuffer(maxCumulationBufferComponents));
|
||||
this.currentMessage = m;
|
||||
return null;
|
||||
} else {
|
||||
@ -198,26 +198,13 @@ public class HttpChunkAggregator extends MessageToMessageDecoder<Object, HttpMes
|
||||
}
|
||||
}
|
||||
|
||||
protected void appendToCumulation(ByteBuf input) {
|
||||
ByteBuf cumulation = this.currentMessage.getContent();
|
||||
if (cumulation instanceof CompositeByteBuf) {
|
||||
// Make sure the resulting cumulation buffer has no more than 4 components.
|
||||
CompositeByteBuf composite = (CompositeByteBuf) cumulation;
|
||||
if (composite.numComponents() >= maxCumulationBufferComponents) {
|
||||
currentMessage.setContent(Unpooled.wrappedBuffer(composite.copy(), input));
|
||||
} else {
|
||||
List<ByteBuf> decomposed = composite.decompose(0, composite.readableBytes());
|
||||
ByteBuf[] buffers = decomposed.toArray(new ByteBuf[decomposed.size() + 1]);
|
||||
buffers[buffers.length - 1] = input;
|
||||
|
||||
currentMessage.setContent(Unpooled.wrappedBuffer(buffers));
|
||||
}
|
||||
} else {
|
||||
currentMessage.setContent(Unpooled.wrappedBuffer(cumulation, input));
|
||||
}
|
||||
|
||||
private void appendToCumulation(ByteBuf input) {
|
||||
CompositeByteBuf cumulation = (CompositeByteBuf) currentMessage.getContent();
|
||||
cumulation.addComponent(input);
|
||||
cumulation.writerIndex(cumulation.capacity());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeAdd(ChannelHandlerContext ctx) throws Exception {
|
||||
this.ctx = ctx;
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ public abstract class HttpContentDecoder extends MessageToMessageDecoder<Object,
|
||||
if (!m.isChunked()) {
|
||||
ByteBuf content = m.getContent();
|
||||
// Decode the content
|
||||
ByteBuf newContent = Unpooled.dynamicBuffer();
|
||||
ByteBuf newContent = Unpooled.buffer();
|
||||
decode(content, newContent);
|
||||
finishDecode(newContent);
|
||||
|
||||
@ -104,7 +104,7 @@ public abstract class HttpContentDecoder extends MessageToMessageDecoder<Object,
|
||||
// Decode the chunk if necessary.
|
||||
if (decoder != null) {
|
||||
if (!c.isLast()) {
|
||||
ByteBuf newContent = Unpooled.dynamicBuffer();
|
||||
ByteBuf newContent = Unpooled.buffer();
|
||||
decode(content, newContent);
|
||||
if (newContent.readable()) {
|
||||
c.setContent(newContent);
|
||||
@ -112,7 +112,7 @@ public abstract class HttpContentDecoder extends MessageToMessageDecoder<Object,
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
ByteBuf lastProduct = Unpooled.dynamicBuffer();
|
||||
ByteBuf lastProduct = Unpooled.buffer();
|
||||
finishDecode(lastProduct);
|
||||
|
||||
// Generate an additional chunk if the decoder produced
|
||||
|
@ -117,7 +117,7 @@ public abstract class HttpContentEncoder extends MessageToMessageCodec<HttpMessa
|
||||
if (!m.isChunked()) {
|
||||
ByteBuf content = m.getContent();
|
||||
// Encode the content.
|
||||
ByteBuf newContent = Unpooled.dynamicBuffer();
|
||||
ByteBuf newContent = Unpooled.buffer();
|
||||
encode(content, newContent);
|
||||
finishEncode(newContent);
|
||||
|
||||
@ -136,7 +136,7 @@ public abstract class HttpContentEncoder extends MessageToMessageCodec<HttpMessa
|
||||
// Encode the chunk if necessary.
|
||||
if (encoder != null) {
|
||||
if (!c.isLast()) {
|
||||
ByteBuf newContent = Unpooled.dynamicBuffer();
|
||||
ByteBuf newContent = Unpooled.buffer();
|
||||
encode(content, newContent);
|
||||
if (content.readable()) {
|
||||
c.setContent(newContent);
|
||||
@ -144,7 +144,7 @@ public abstract class HttpContentEncoder extends MessageToMessageCodec<HttpMessa
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
ByteBuf lastProduct = Unpooled.dynamicBuffer();
|
||||
ByteBuf lastProduct = Unpooled.buffer();
|
||||
finishEncode(lastProduct);
|
||||
|
||||
// Generate an additional chunk if the decoder produced
|
||||
|
@ -554,7 +554,7 @@ public class SpdyFrameDecoder extends ByteToMessageDecoder<Object> {
|
||||
// Initialize header block decoding fields
|
||||
headerSize = 0;
|
||||
numHeaders = -1;
|
||||
decompressed = Unpooled.dynamicBuffer(8192);
|
||||
decompressed = Unpooled.buffer(8192);
|
||||
}
|
||||
|
||||
// Accumulate decompressed data
|
||||
|
@ -309,7 +309,7 @@ public class SpdyFrameEncoder extends MessageToByteEncoder<Object> {
|
||||
throw new IllegalArgumentException(
|
||||
"header block contains too many headers");
|
||||
}
|
||||
ByteBuf headerBlock = Unpooled.dynamicBuffer(256);
|
||||
ByteBuf headerBlock = Unpooled.buffer();
|
||||
writeLengthField(version, headerBlock, numHeaders);
|
||||
for (String name: names) {
|
||||
byte[] nameBytes = name.getBytes("UTF-8");
|
||||
@ -340,7 +340,7 @@ public class SpdyFrameEncoder extends MessageToByteEncoder<Object> {
|
||||
if (uncompressed.readableBytes() == 0) {
|
||||
return Unpooled.EMPTY_BUFFER;
|
||||
}
|
||||
ByteBuf compressed = Unpooled.dynamicBuffer();
|
||||
ByteBuf compressed = Unpooled.buffer();
|
||||
synchronized (headerBlockCompressor) {
|
||||
if (!finished) {
|
||||
headerBlockCompressor.setInput(uncompressed);
|
||||
|
@ -206,7 +206,7 @@ public class SpdyHttpDecoder extends MessageToMessageDecoder<Object, HttpMessage
|
||||
ByteBuf spdyDataFrameData = spdyDataFrame.getData();
|
||||
int spdyDataFrameDataLen = spdyDataFrameData.readableBytes();
|
||||
if (content == Unpooled.EMPTY_BUFFER) {
|
||||
content = Unpooled.dynamicBuffer(spdyDataFrameDataLen);
|
||||
content = Unpooled.buffer(spdyDataFrameDataLen);
|
||||
content.writeBytes(spdyDataFrameData, spdyDataFrameData.readerIndex(), spdyDataFrameDataLen);
|
||||
httpMessage.setContent(content);
|
||||
} else {
|
||||
|
@ -16,18 +16,17 @@
|
||||
package io.netty.handler.codec.http;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.CompositeByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.channel.embedded.EmbeddedMessageChannel;
|
||||
import io.netty.handler.codec.TooLongFrameException;
|
||||
import io.netty.util.CharsetUtil;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.easymock.EasyMock;
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import io.netty.buffer.CompositeByteBuf;
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.channel.embedded.EmbeddedMessageChannel;
|
||||
import io.netty.handler.codec.TooLongFrameException;
|
||||
|
||||
import io.netty.util.CharsetUtil;
|
||||
import org.junit.Test;
|
||||
|
||||
public class HttpChunkAggregatorTest {
|
||||
@ -46,13 +45,13 @@ public class HttpChunkAggregatorTest {
|
||||
assertFalse(embedder.writeInbound(message));
|
||||
assertFalse(embedder.writeInbound(chunk1));
|
||||
assertFalse(embedder.writeInbound(chunk2));
|
||||
|
||||
|
||||
// this should trigger a messageReceived event so return true
|
||||
assertTrue(embedder.writeInbound(chunk3));
|
||||
assertTrue(embedder.finish());
|
||||
HttpMessage aggratedMessage = (HttpMessage) embedder.readInbound();
|
||||
assertNotNull(aggratedMessage);
|
||||
|
||||
|
||||
assertEquals(chunk1.getContent().readableBytes() + chunk2.getContent().readableBytes(), HttpHeaders.getContentLength(aggratedMessage));
|
||||
assertEquals(aggratedMessage.getHeader("X-Test"), Boolean.TRUE.toString());
|
||||
checkContentBuffer(aggratedMessage);
|
||||
@ -82,17 +81,17 @@ public class HttpChunkAggregatorTest {
|
||||
HttpChunk chunk2 = new DefaultHttpChunk(Unpooled.copiedBuffer("test2", CharsetUtil.US_ASCII));
|
||||
HttpChunkTrailer trailer = new DefaultHttpChunkTrailer();
|
||||
trailer.setHeader("X-Trailer", true);
|
||||
|
||||
|
||||
assertFalse(embedder.writeInbound(message));
|
||||
assertFalse(embedder.writeInbound(chunk1));
|
||||
assertFalse(embedder.writeInbound(chunk2));
|
||||
|
||||
|
||||
// this should trigger a messageReceived event so return true
|
||||
assertTrue(embedder.writeInbound(trailer));
|
||||
assertTrue(embedder.finish());
|
||||
HttpMessage aggratedMessage = (HttpMessage) embedder.readInbound();
|
||||
assertNotNull(aggratedMessage);
|
||||
|
||||
|
||||
assertEquals(chunk1.getContent().readableBytes() + chunk2.getContent().readableBytes(), HttpHeaders.getContentLength(aggratedMessage));
|
||||
assertEquals(aggratedMessage.getHeader("X-Test"), Boolean.TRUE.toString());
|
||||
assertEquals(aggratedMessage.getHeader("X-Trailer"), Boolean.TRUE.toString());
|
||||
@ -122,13 +121,13 @@ public class HttpChunkAggregatorTest {
|
||||
public void testInvalidConstructorUsage() {
|
||||
new HttpChunkAggregator(0);
|
||||
}
|
||||
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void testInvalidMaxCumulationBufferComponents() {
|
||||
HttpChunkAggregator aggr= new HttpChunkAggregator(Integer.MAX_VALUE);
|
||||
aggr.setMaxCumulationBufferComponents(1);
|
||||
}
|
||||
|
||||
|
||||
@Test(expected = IllegalStateException.class)
|
||||
public void testSetMaxCumulationBufferComponentsAfterInit() throws Exception {
|
||||
HttpChunkAggregator aggr = new HttpChunkAggregator(Integer.MAX_VALUE);
|
||||
|
@ -35,7 +35,7 @@ public class HttpContentCompressorTest {
|
||||
"gzip; q=0.5, identity", "gzip",
|
||||
"gzip ; q=0.1", "gzip",
|
||||
"gzip; q=0, deflate", "deflate",
|
||||
" defalte ; q=0 , *;q=0.5", "gzip",
|
||||
" deflate ; q=0 , *;q=0.5", "gzip",
|
||||
};
|
||||
for (int i = 0; i < tests.length; i += 2) {
|
||||
String acceptEncoding = tests[i];
|
||||
|
@ -35,7 +35,7 @@ public abstract class ByteToMessageDecoder<O>
|
||||
|
||||
@Override
|
||||
public ByteBuf newInboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -68,7 +68,7 @@ public class FixedLengthFrameDecoder extends ByteToMessageDecoder<Object> {
|
||||
@Override
|
||||
public ByteBuf newInboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
if (allocateFullBuffer) {
|
||||
return Unpooled.dynamicBuffer(frameLength);
|
||||
return Unpooled.buffer(frameLength);
|
||||
} else {
|
||||
return super.newInboundBuffer(ctx);
|
||||
}
|
||||
|
@ -16,7 +16,6 @@
|
||||
package io.netty.handler.codec;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufFactory;
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.handler.codec.serialization.ObjectDecoder;
|
||||
|
||||
@ -427,7 +426,7 @@ public class LengthFieldBasedFrameDecoder extends ByteToMessageDecoder<Object> {
|
||||
* is overridden to avoid memory copy.
|
||||
*/
|
||||
protected ByteBuf extractFrame(ByteBuf buffer, int index, int length) {
|
||||
ByteBuf frame = buffer.factory().getBuffer(length);
|
||||
ByteBuf frame = buffer.unsafe().newBuffer(length);
|
||||
frame.writeBytes(buffer, index, length);
|
||||
return frame;
|
||||
}
|
||||
|
@ -16,7 +16,6 @@
|
||||
package io.netty.handler.codec;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufFactory;
|
||||
import io.netty.channel.ChannelHandler.Sharable;
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
|
||||
|
@ -281,7 +281,7 @@ public abstract class ReplayingDecoder<O, S> extends ByteToMessageDecoder<O> {
|
||||
|
||||
static final Signal REPLAY = new Signal(ReplayingDecoder.class.getName() + ".REPLAY");
|
||||
|
||||
private final ByteBuf cumulation = Unpooled.dynamicBuffer();
|
||||
private final ByteBuf cumulation = Unpooled.buffer();
|
||||
private final ReplayingDecoderBuffer replayable = new ReplayingDecoderBuffer(cumulation);
|
||||
private S state;
|
||||
private int checkpoint = -1;
|
||||
|
@ -16,7 +16,6 @@
|
||||
package io.netty.handler.codec;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufFactory;
|
||||
import io.netty.buffer.ByteBufIndexFinder;
|
||||
import io.netty.buffer.ChannelBufType;
|
||||
import io.netty.buffer.SwappedByteBuf;
|
||||
@ -64,6 +63,16 @@ class ReplayingDecoderBuffer implements ByteBuf {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void capacity(int newCapacity) {
|
||||
throw new UnreplayableOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int maxCapacity() {
|
||||
return capacity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChannelBufType type() {
|
||||
return ChannelBufType.BYTE;
|
||||
@ -349,11 +358,6 @@ class ReplayingDecoderBuffer implements ByteBuf {
|
||||
throw new UnreplayableOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBufFactory factory() {
|
||||
return buffer.factory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteOrder order() {
|
||||
return buffer.order();
|
||||
@ -812,4 +816,9 @@ class ReplayingDecoderBuffer implements ByteBuf {
|
||||
throw REPLAY;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Unsafe unsafe() {
|
||||
throw new UnreplayableOperationException();
|
||||
}
|
||||
}
|
||||
|
@ -20,8 +20,6 @@
|
||||
package io.netty.handler.codec.base64;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufFactory;
|
||||
import io.netty.buffer.HeapByteBufFactory;
|
||||
|
||||
/**
|
||||
* Utility class for {@link ByteBuf} that encodes and decodes to and from
|
||||
@ -78,39 +76,17 @@ public final class Base64 {
|
||||
return encode(src, breakLines(dialect), dialect);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, ByteBufFactory bufferFactory) {
|
||||
return encode(src, Base64Dialect.STANDARD, bufferFactory);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, Base64Dialect dialect, ByteBufFactory bufferFactory) {
|
||||
return encode(src, breakLines(dialect), dialect, bufferFactory);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(ByteBuf src, boolean breakLines) {
|
||||
return encode(src, breakLines, Base64Dialect.STANDARD);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, boolean breakLines, Base64Dialect dialect) {
|
||||
return encode(src, breakLines, dialect, HeapByteBufFactory.getInstance());
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, boolean breakLines, ByteBufFactory bufferFactory) {
|
||||
return encode(src, breakLines, Base64Dialect.STANDARD, bufferFactory);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, boolean breakLines, Base64Dialect dialect, ByteBufFactory bufferFactory) {
|
||||
public static ByteBuf encode(ByteBuf src, boolean breakLines, Base64Dialect dialect) {
|
||||
|
||||
if (src == null) {
|
||||
throw new NullPointerException("src");
|
||||
}
|
||||
|
||||
ByteBuf dest = encode(
|
||||
src, src.readerIndex(), src.readableBytes(), breakLines, dialect, bufferFactory);
|
||||
ByteBuf dest = encode(src, src.readerIndex(), src.readableBytes(), breakLines, dialect);
|
||||
src.readerIndex(src.writerIndex());
|
||||
return dest;
|
||||
}
|
||||
@ -123,35 +99,13 @@ public final class Base64 {
|
||||
return encode(src, off, len, breakLines(dialect), dialect);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(ByteBuf src, int off, int len, ByteBufFactory bufferFactory) {
|
||||
return encode(src, off, len, Base64Dialect.STANDARD, bufferFactory);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, int off, int len, Base64Dialect dialect, ByteBufFactory bufferFactory) {
|
||||
return encode(src, off, len, breakLines(dialect), dialect, bufferFactory);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, int off, int len, boolean breakLines) {
|
||||
return encode(src, off, len, breakLines, Base64Dialect.STANDARD);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, int off, int len,
|
||||
boolean breakLines, Base64Dialect dialect) {
|
||||
return encode(src, off, len, breakLines, dialect, HeapByteBufFactory.getInstance());
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, int off, int len,
|
||||
boolean breakLines, ByteBufFactory bufferFactory) {
|
||||
return encode(src, off, len, breakLines, Base64Dialect.STANDARD, bufferFactory);
|
||||
}
|
||||
|
||||
public static ByteBuf encode(
|
||||
ByteBuf src, int off, int len,
|
||||
boolean breakLines, Base64Dialect dialect, ByteBufFactory bufferFactory) {
|
||||
ByteBuf src, int off, int len, boolean breakLines, Base64Dialect dialect) {
|
||||
|
||||
if (src == null) {
|
||||
throw new NullPointerException("src");
|
||||
@ -159,16 +113,12 @@ public final class Base64 {
|
||||
if (dialect == null) {
|
||||
throw new NullPointerException("dialect");
|
||||
}
|
||||
if (bufferFactory == null) {
|
||||
throw new NullPointerException("bufferFactory");
|
||||
}
|
||||
|
||||
int len43 = len * 4 / 3;
|
||||
ByteBuf dest = bufferFactory.getBuffer(
|
||||
src.order(),
|
||||
ByteBuf dest = src.unsafe().newBuffer(
|
||||
len43 +
|
||||
(len % 3 > 0? 4 : 0) + // Account for padding
|
||||
(breakLines? len43 / MAX_LINE_LENGTH : 0)); // New lines
|
||||
(breakLines? len43 / MAX_LINE_LENGTH : 0)).order(src.order()); // New lines
|
||||
int d = 0;
|
||||
int e = 0;
|
||||
int len2 = len - 2;
|
||||
@ -241,20 +191,12 @@ public final class Base64 {
|
||||
}
|
||||
|
||||
public static ByteBuf decode(ByteBuf src, Base64Dialect dialect) {
|
||||
return decode(src, dialect, HeapByteBufFactory.getInstance());
|
||||
}
|
||||
|
||||
public static ByteBuf decode(ByteBuf src, ByteBufFactory bufferFactory) {
|
||||
return decode(src, Base64Dialect.STANDARD, bufferFactory);
|
||||
}
|
||||
|
||||
public static ByteBuf decode(ByteBuf src, Base64Dialect dialect, ByteBufFactory bufferFactory) {
|
||||
|
||||
if (src == null) {
|
||||
throw new NullPointerException("src");
|
||||
}
|
||||
|
||||
ByteBuf dest = decode(src, src.readerIndex(), src.readableBytes(), dialect, bufferFactory);
|
||||
ByteBuf dest = decode(src, src.readerIndex(), src.readableBytes(), dialect);
|
||||
src.readerIndex(src.writerIndex());
|
||||
return dest;
|
||||
}
|
||||
@ -266,17 +208,6 @@ public final class Base64 {
|
||||
|
||||
public static ByteBuf decode(
|
||||
ByteBuf src, int off, int len, Base64Dialect dialect) {
|
||||
return decode(src, off, len, dialect, HeapByteBufFactory.getInstance());
|
||||
}
|
||||
|
||||
public static ByteBuf decode(
|
||||
ByteBuf src, int off, int len, ByteBufFactory bufferFactory) {
|
||||
return decode(src, off, len, Base64Dialect.STANDARD, bufferFactory);
|
||||
}
|
||||
|
||||
public static ByteBuf decode(
|
||||
ByteBuf src, int off, int len, Base64Dialect dialect,
|
||||
ByteBufFactory bufferFactory) {
|
||||
|
||||
if (src == null) {
|
||||
throw new NullPointerException("src");
|
||||
@ -284,14 +215,11 @@ public final class Base64 {
|
||||
if (dialect == null) {
|
||||
throw new NullPointerException("dialect");
|
||||
}
|
||||
if (bufferFactory == null) {
|
||||
throw new NullPointerException("bufferFactory");
|
||||
}
|
||||
|
||||
byte[] DECODABET = decodabet(dialect);
|
||||
|
||||
int len34 = len * 3 / 4;
|
||||
ByteBuf dest = bufferFactory.getBuffer(src.order(), len34); // Upper limit on size of output
|
||||
ByteBuf dest = src.unsafe().newBuffer(len34).order(src.order()); // Upper limit on size of output
|
||||
int outBuffPosn = 0;
|
||||
|
||||
byte[] b4 = new byte[4];
|
||||
|
@ -235,7 +235,7 @@ public class JdkZlibEncoder extends ZlibEncoder {
|
||||
return future;
|
||||
}
|
||||
|
||||
ByteBuf footer = Unpooled.dynamicBuffer();
|
||||
ByteBuf footer = Unpooled.buffer();
|
||||
synchronized (deflater) {
|
||||
deflater.finish();
|
||||
while (!deflater.finished()) {
|
||||
|
@ -16,8 +16,6 @@
|
||||
package io.netty.handler.codec.marshalling;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufFactory;
|
||||
import io.netty.buffer.Unpooled;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -40,13 +38,6 @@ class ChannelBufferByteOutput implements ByteOutput {
|
||||
this.buffer = buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls {@link #ChannelBufferByteOutput(ByteBuf)} with a dynamic {@link ByteBuf}
|
||||
*/
|
||||
public ChannelBufferByteOutput(ByteBufFactory factory, int estimatedLength) {
|
||||
this(Unpooled.dynamicBuffer(estimatedLength, factory));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
// Nothing todo
|
||||
|
@ -80,8 +80,7 @@ public class ObjectEncoderOutputStream extends OutputStream implements
|
||||
|
||||
@Override
|
||||
public void writeObject(Object obj) throws IOException {
|
||||
ByteBufOutputStream bout = new ByteBufOutputStream(
|
||||
Unpooled.dynamicBuffer(estimatedLength));
|
||||
ByteBufOutputStream bout = new ByteBufOutputStream(Unpooled.buffer(estimatedLength));
|
||||
ObjectOutputStream oout = new CompactObjectOutputStream(bout);
|
||||
oout.writeObject(obj);
|
||||
oout.flush();
|
||||
|
@ -108,12 +108,12 @@ public class ByteLoggingHandler
|
||||
}
|
||||
@Override
|
||||
public ByteBuf newOutboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf newInboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -310,12 +310,12 @@ public class SslHandler
|
||||
|
||||
@Override
|
||||
public ByteBuf newOutboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuf newInboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
2
pom.xml
2
pom.xml
@ -267,6 +267,8 @@
|
||||
</signature>
|
||||
<ignores>
|
||||
<ignore>sun.misc.Unsafe</ignore>
|
||||
<ignore>sun.misc.Cleaner</ignore>
|
||||
|
||||
<ignore>java.util.zip.Deflater</ignore>
|
||||
|
||||
<!-- Used for NIO UDP multicast -->
|
||||
|
@ -24,7 +24,7 @@ public abstract class ChannelInboundByteHandlerAdapter
|
||||
|
||||
@Override
|
||||
public ByteBuf newInboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -22,6 +22,6 @@ public abstract class ChannelOutboundByteHandlerAdapter
|
||||
extends ChannelOutboundHandlerAdapter implements ChannelOutboundByteHandler {
|
||||
@Override
|
||||
public ByteBuf newOutboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
}
|
||||
}
|
||||
|
@ -770,7 +770,7 @@ final class DefaultChannelHandlerContext extends DefaultAttributeMap implements
|
||||
}
|
||||
|
||||
static final class ByteBridge {
|
||||
final ByteBuf byteBuf = Unpooled.dynamicBuffer();
|
||||
final ByteBuf byteBuf = Unpooled.buffer();
|
||||
final BlockingQueue<ByteBuf> exchangeBuf = QueueFactory.createQueue();
|
||||
|
||||
void fill() {
|
||||
|
@ -1443,7 +1443,7 @@ public class DefaultChannelPipeline implements ChannelPipeline {
|
||||
public ChannelBuf newOutboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
switch (channel.metadata().bufferType()) {
|
||||
case BYTE:
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
case MESSAGE:
|
||||
return Unpooled.messageBuffer();
|
||||
default:
|
||||
|
@ -35,7 +35,6 @@ import io.netty.logging.InternalLogger;
|
||||
import io.netty.logging.InternalLoggerFactory;
|
||||
|
||||
import java.net.SocketAddress;
|
||||
import java.util.Queue;
|
||||
|
||||
public abstract class AbstractEmbeddedChannel extends AbstractChannel {
|
||||
|
||||
@ -45,7 +44,7 @@ public abstract class AbstractEmbeddedChannel extends AbstractChannel {
|
||||
private final SocketAddress localAddress = new EmbeddedSocketAddress();
|
||||
private final SocketAddress remoteAddress = new EmbeddedSocketAddress();
|
||||
private final MessageBuf<Object> lastInboundMessageBuffer = Unpooled.messageBuffer();
|
||||
private final ByteBuf lastInboundByteBuffer = Unpooled.dynamicBuffer();
|
||||
private final ByteBuf lastInboundByteBuffer = Unpooled.buffer();
|
||||
protected final Object lastOutboundBuffer;
|
||||
private Throwable lastException;
|
||||
private int state; // 0 = OPEN, 1 = ACTIVE, 2 = CLOSED
|
||||
|
@ -26,7 +26,7 @@ public class EmbeddedByteChannel extends AbstractEmbeddedChannel {
|
||||
private static final ChannelMetadata METADATA = new ChannelMetadata(ChannelBufType.BYTE, false);
|
||||
|
||||
public EmbeddedByteChannel(ChannelHandler... handlers) {
|
||||
super(Unpooled.dynamicBuffer(), handlers);
|
||||
super(Unpooled.buffer(), handlers);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -394,7 +394,7 @@ public class LocalTransportThreadModelTest {
|
||||
|
||||
@Override
|
||||
public ByteBuf newOutboundBuffer(ChannelHandlerContext ctx) throws Exception {
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -473,7 +473,7 @@ public class LocalTransportThreadModelTest {
|
||||
@Override
|
||||
public ByteBuf newInboundBuffer(
|
||||
ChannelHandlerContext ctx) throws Exception {
|
||||
return Unpooled.dynamicBuffer();
|
||||
return Unpooled.buffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
Loading…
Reference in New Issue
Block a user