diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ElasticByteBufferPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ElasticByteBufferPool.java index e951a157750b0..7f7c614205373 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ElasticByteBufferPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ElasticByteBufferPool.java @@ -126,7 +126,5 @@ public synchronized void putBuffer(ByteBuffer buffer) { */ @InterfaceAudience.Private @InterfaceStability.Unstable - public int size(boolean direct) { - return getBufferTree(direct).size(); - } + public abstract int getCurrentBuffersCount(boolean direct); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestElasticByteBufferPool.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestElasticByteBufferPool.java new file mode 100644 index 0000000000000..24b2ab0fd80f1 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestElasticByteBufferPool.java @@ -0,0 +1,35 @@ +package org.apache.hadoop.io; + +import org.assertj.core.api.Assertions; +import org.junit.Test; + +import java.nio.ByteBuffer; + +public class TestElasticByteBufferPool { + @Test + public void testGarbageCollection() throws Exception { + final ElasticByteBufferPool ebbp = new WeakReferencedElasticByteBufferPool(); + ByteBuffer buffer1 = ebbp.getBuffer(true, 5); + ByteBuffer buffer2 = ebbp.getBuffer(true, 10); + ByteBuffer buffer3 = ebbp.getBuffer(true, 15); + + // the pool is empty yet + Assertions.assertThat(ebbp.getCurrentBuffersCount(true)).isEqualTo(0); + + // put the buffers back to the pool + ebbp.putBuffer(buffer2); + ebbp.putBuffer(buffer3); + Assertions.assertThat(ebbp.getCurrentBuffersCount(true)).isEqualTo(2); + + // release the references to be garbage-collected + buffer2 = null; + buffer3 = null; + System.gc(); + + // call getBuffer() to trigger the cleanup + ByteBuffer buffer4 = ebbp.getBuffer(true, 10); + + // the pool should be empty. buffer2, buffer3 should be garbage-collected + Assertions.assertThat(ebbp.getCurrentBuffersCount(true)).isEqualTo(0); + } +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStream.java index 9d3bdbf4e9520..edaa8c0877b13 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStream.java @@ -555,16 +555,16 @@ public void testCloseDoesNotAllocateNewBuffer() throws Exception { final ElasticByteBufferPool ebbp = (ElasticByteBufferPool) stream.getBufferPool(); // first clear existing pool - LOG.info("Current pool size: direct: " + ebbp.size(true) + ", indirect: " - + ebbp.size(false)); + LOG.info("Current pool size: direct: " + ebbp.getCurrentBuffersCount(true) + ", indirect: " + + ebbp.getCurrentBuffersCount(false)); emptyBufferPoolForCurrentPolicy(ebbp, true); emptyBufferPoolForCurrentPolicy(ebbp, false); - final int startSizeDirect = ebbp.size(true); - final int startSizeIndirect = ebbp.size(false); + final int startSizeDirect = ebbp.getCurrentBuffersCount(true); + final int startSizeIndirect = ebbp.getCurrentBuffersCount(false); // close should not allocate new buffers in the pool. stream.close(); - assertEquals(startSizeDirect, ebbp.size(true)); - assertEquals(startSizeIndirect, ebbp.size(false)); + assertEquals(startSizeDirect, ebbp.getCurrentBuffersCount(true)); + assertEquals(startSizeIndirect, ebbp.getCurrentBuffersCount(false)); } } @@ -621,10 +621,10 @@ public void testReadWhenLastIncompleteCellComeInToDecodeAlignedStripe() private void emptyBufferPoolForCurrentPolicy(ElasticByteBufferPool ebbp, boolean direct) { int size; - while ((size = ebbp.size(direct)) != 0) { + while ((size = ebbp.getCurrentBuffersCount(direct)) != 0) { ebbp.getBuffer(direct, ecPolicy.getCellSize() * ecPolicy.getNumDataUnits()); - if (size == ebbp.size(direct)) { + if (size == ebbp.getCurrentBuffersCount(direct)) { // if getBuffer didn't decrease size, it means the pool for the buffer // corresponding to current ecPolicy is empty break; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java index 2f734c62f420b..920ccdff1fafe 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java @@ -828,7 +828,7 @@ public void interceptFreeBlockReaderBuffer() { private void assertBufferPoolIsEmpty(ElasticByteBufferPool bufferPool, boolean direct) { - while (bufferPool.size(direct) != 0) { + while (bufferPool.getCurrentBuffersCount(direct) != 0) { // iterate all ByteBuffers in ElasticByteBufferPool ByteBuffer byteBuffer = bufferPool.getBuffer(direct, 0); Assert.assertEquals(0, byteBuffer.position()); @@ -837,7 +837,7 @@ private void assertBufferPoolIsEmpty(ElasticByteBufferPool bufferPool, private void emptyBufferPool(ElasticByteBufferPool bufferPool, boolean direct) { - while (bufferPool.size(direct) != 0) { + while (bufferPool.getCurrentBuffersCount(direct) != 0) { bufferPool.getBuffer(direct, 0); } }