2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org
.apache
.hadoop
.hbase
.regionserver
;
20 import static org
.junit
.Assert
.assertEquals
;
21 import static org
.junit
.Assert
.assertFalse
;
22 import static org
.junit
.Assert
.assertNotNull
;
23 import static org
.junit
.Assert
.assertTrue
;
25 import java
.lang
.management
.ManagementFactory
;
26 import java
.nio
.ByteBuffer
;
27 import java
.util
.Iterator
;
28 import java
.util
.NavigableMap
;
29 import java
.util
.NavigableSet
;
30 import java
.util
.SortedSet
;
31 import org
.apache
.hadoop
.conf
.Configuration
;
32 import org
.apache
.hadoop
.hbase
.Cell
;
33 import org
.apache
.hadoop
.hbase
.CellComparator
;
34 import org
.apache
.hadoop
.hbase
.CellUtil
;
35 import org
.apache
.hadoop
.hbase
.HBaseClassTestRule
;
36 import org
.apache
.hadoop
.hbase
.KeyValue
;
37 import org
.apache
.hadoop
.hbase
.KeyValueUtil
;
38 import org
.apache
.hadoop
.hbase
.io
.util
.MemorySizeUtil
;
39 import org
.apache
.hadoop
.hbase
.regionserver
.ChunkCreator
.ChunkType
;
40 import org
.apache
.hadoop
.hbase
.testclassification
.RegionServerTests
;
41 import org
.apache
.hadoop
.hbase
.testclassification
.SmallTests
;
42 import org
.apache
.hadoop
.hbase
.util
.ByteBufferUtils
;
43 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
44 import org
.apache
.hadoop
.hbase
.util
.ClassSize
;
45 import org
.junit
.Before
;
46 import org
.junit
.ClassRule
;
47 import org
.junit
.Test
;
48 import org
.junit
.experimental
.categories
.Category
;
49 import org
.junit
.runner
.RunWith
;
50 import org
.junit
.runners
.Parameterized
;
52 @Category({RegionServerTests
.class, SmallTests
.class})
53 @RunWith(Parameterized
.class)
54 public class TestCellFlatSet
{
57 public static final HBaseClassTestRule CLASS_RULE
=
58 HBaseClassTestRule
.forClass(TestCellFlatSet
.class);
60 @Parameterized.Parameters
61 public static Object
[] data() {
62 return new Object
[] { "SMALL_CHUNKS", "NORMAL_CHUNKS" }; // test with different chunk sizes
64 private static final int NUM_OF_CELLS
= 4;
65 private static final int SMALL_CHUNK_SIZE
= 64;
66 private Cell ascCells
[];
67 private CellArrayMap ascCbOnHeap
;
68 private Cell descCells
[];
69 private CellArrayMap descCbOnHeap
;
70 private final static Configuration CONF
= new Configuration();
71 private KeyValue lowerOuterCell
;
72 private KeyValue upperOuterCell
;
75 private CellChunkMap ascCCM
; // for testing ascending CellChunkMap with one chunk in array
76 private CellChunkMap descCCM
; // for testing descending CellChunkMap with one chunk in array
77 private final boolean smallChunks
;
78 private static ChunkCreator chunkCreator
;
81 public TestCellFlatSet(String chunkType
){
82 long globalMemStoreLimit
= (long) (ManagementFactory
.getMemoryMXBean().getHeapMemoryUsage()
83 .getMax() * MemorySizeUtil
.getGlobalMemStoreHeapPercent(CONF
, false));
84 if (chunkType
.equals("NORMAL_CHUNKS")) {
85 chunkCreator
= ChunkCreator
.initialize(MemStoreLAB
.CHUNK_SIZE_DEFAULT
, false,
86 globalMemStoreLimit
, 0.2f
, MemStoreLAB
.POOL_INITIAL_SIZE_DEFAULT
,
87 null, MemStoreLAB
.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT
);
88 assertNotNull(chunkCreator
);
91 // chunkCreator with smaller chunk size, so only 3 cell-representations can accommodate a chunk
92 chunkCreator
= ChunkCreator
.initialize(SMALL_CHUNK_SIZE
, false,
93 globalMemStoreLimit
, 0.2f
, MemStoreLAB
.POOL_INITIAL_SIZE_DEFAULT
,
94 null, MemStoreLAB
.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT
);
95 assertNotNull(chunkCreator
);
101 public void setUp() throws Exception
{
102 // create array of Cells to bass to the CellFlatMap under CellSet
103 final byte[] one
= Bytes
.toBytes(15);
104 final byte[] two
= Bytes
.toBytes(25);
105 final byte[] three
= Bytes
.toBytes(35);
106 final byte[] four
= Bytes
.toBytes(45);
108 final byte[] f
= Bytes
.toBytes("f");
109 final byte[] q
= Bytes
.toBytes("q");
110 final byte[] v
= Bytes
.toBytes(4);
112 final KeyValue kv1
= new KeyValue(one
, f
, q
, 10, v
);
113 final KeyValue kv2
= new KeyValue(two
, f
, q
, 20, v
);
114 final KeyValue kv3
= new KeyValue(three
, f
, q
, 30, v
);
115 final KeyValue kv4
= new KeyValue(four
, f
, q
, 40, v
);
116 lowerOuterCell
= new KeyValue(Bytes
.toBytes(10), f
, q
, 10, v
);
117 upperOuterCell
= new KeyValue(Bytes
.toBytes(50), f
, q
, 10, v
);
118 ascCells
= new Cell
[] {kv1
,kv2
,kv3
,kv4
};
119 ascCbOnHeap
= new CellArrayMap(CellComparator
.getInstance(), ascCells
,0, NUM_OF_CELLS
,false);
120 descCells
= new Cell
[] {kv4
,kv3
,kv2
,kv1
};
121 descCbOnHeap
= new CellArrayMap(CellComparator
.getInstance(), descCells
,0, NUM_OF_CELLS
,true);
123 CONF
.setBoolean(MemStoreLAB
.USEMSLAB_KEY
, true);
124 CONF
.setFloat(MemStoreLAB
.CHUNK_POOL_MAXSIZE_KEY
, 0.2f
);
125 ChunkCreator
.chunkPoolDisabled
= false;
127 // create ascending and descending CellChunkMaps
128 // according to parameter, once built with normal chunks and at second with small chunks
129 ascCCM
= setUpCellChunkMap(true);
130 descCCM
= setUpCellChunkMap(false);
132 if (smallChunks
) { // check jumbo chunks as well
133 ascCCM
= setUpJumboCellChunkMap(true);
137 /* Create and test ascending CellSet based on CellArrayMap */
139 public void testCellArrayMapAsc() throws Exception
{
140 CellSet cs
= new CellSet(ascCbOnHeap
);
145 /* Create and test ascending and descending CellSet based on CellChunkMap */
147 public void testCellChunkMap() throws Exception
{
148 CellSet cs
= new CellSet(ascCCM
);
152 cs
= new CellSet(descCCM
);
154 // cs = new CellSet(ascMultCCM);
155 // testCellBlocks(cs);
157 // cs = new CellSet(descMultCCM);
162 public void testAsc() throws Exception
{
163 CellSet ascCs
= new CellSet(ascCbOnHeap
);
164 assertEquals(NUM_OF_CELLS
, ascCs
.size());
168 public void testDesc() throws Exception
{
169 CellSet descCs
= new CellSet(descCbOnHeap
);
170 assertEquals(NUM_OF_CELLS
, descCs
.size());
174 private void testSubSet(CellSet cs
) throws Exception
{
175 for (int i
= 0; i
!= ascCells
.length
; ++i
) {
176 NavigableSet
<Cell
> excludeTail
= cs
.tailSet(ascCells
[i
], false);
177 NavigableSet
<Cell
> includeTail
= cs
.tailSet(ascCells
[i
], true);
178 assertEquals(ascCells
.length
- 1 - i
, excludeTail
.size());
179 assertEquals(ascCells
.length
- i
, includeTail
.size());
180 Iterator
<Cell
> excludeIter
= excludeTail
.iterator();
181 Iterator
<Cell
> includeIter
= includeTail
.iterator();
182 for (int j
= 1 + i
; j
!= ascCells
.length
; ++j
) {
183 assertEquals(true, CellUtil
.equals(excludeIter
.next(), ascCells
[j
]));
185 for (int j
= i
; j
!= ascCells
.length
; ++j
) {
186 assertEquals(true, CellUtil
.equals(includeIter
.next(), ascCells
[j
]));
189 assertEquals(NUM_OF_CELLS
, cs
.tailSet(lowerOuterCell
, false).size());
190 assertEquals(0, cs
.tailSet(upperOuterCell
, false).size());
191 for (int i
= 0; i
!= ascCells
.length
; ++i
) {
192 NavigableSet
<Cell
> excludeHead
= cs
.headSet(ascCells
[i
], false);
193 NavigableSet
<Cell
> includeHead
= cs
.headSet(ascCells
[i
], true);
194 assertEquals(i
, excludeHead
.size());
195 assertEquals(i
+ 1, includeHead
.size());
196 Iterator
<Cell
> excludeIter
= excludeHead
.iterator();
197 Iterator
<Cell
> includeIter
= includeHead
.iterator();
198 for (int j
= 0; j
!= i
; ++j
) {
199 assertEquals(true, CellUtil
.equals(excludeIter
.next(), ascCells
[j
]));
201 for (int j
= 0; j
!= i
+ 1; ++j
) {
202 assertEquals(true, CellUtil
.equals(includeIter
.next(), ascCells
[j
]));
205 assertEquals(0, cs
.headSet(lowerOuterCell
, false).size());
206 assertEquals(NUM_OF_CELLS
, cs
.headSet(upperOuterCell
, false).size());
208 NavigableMap
<Cell
, Cell
> sub
= cs
.getDelegatee().subMap(lowerOuterCell
, true, upperOuterCell
, true);
209 assertEquals(NUM_OF_CELLS
, sub
.size());
210 Iterator
<Cell
> iter
= sub
.values().iterator();
211 for (int i
= 0; i
!= ascCells
.length
; ++i
) {
212 assertEquals(true, CellUtil
.equals(iter
.next(), ascCells
[i
]));
216 /* Generic basic test for immutable CellSet */
217 private void testCellBlocks(CellSet cs
) throws Exception
{
218 final byte[] oneAndHalf
= Bytes
.toBytes(20);
219 final byte[] f
= Bytes
.toBytes("f");
220 final byte[] q
= Bytes
.toBytes("q");
221 final byte[] v
= Bytes
.toBytes(4);
222 final KeyValue outerCell
= new KeyValue(oneAndHalf
, f
, q
, 10, v
);
224 assertEquals(NUM_OF_CELLS
, cs
.size()); // check size
225 assertFalse(cs
.contains(outerCell
)); // check outer cell
227 assertTrue(cs
.contains(ascCells
[0])); // check existence of the first
228 Cell first
= cs
.first();
229 assertTrue(ascCells
[0].equals(first
));
231 assertTrue(cs
.contains(ascCells
[NUM_OF_CELLS
- 1])); // check last
232 Cell last
= cs
.last();
233 assertTrue(ascCells
[NUM_OF_CELLS
- 1].equals(last
));
235 SortedSet
<Cell
> tail
= cs
.tailSet(ascCells
[1]); // check tail abd head sizes
236 assertEquals(NUM_OF_CELLS
- 1, tail
.size());
237 SortedSet
<Cell
> head
= cs
.headSet(ascCells
[1]);
238 assertEquals(1, head
.size());
240 SortedSet
<Cell
> tailOuter
= cs
.tailSet(outerCell
); // check tail starting from outer cell
241 assertEquals(NUM_OF_CELLS
- 1, tailOuter
.size());
243 Cell tailFirst
= tail
.first();
244 assertTrue(ascCells
[1].equals(tailFirst
));
245 Cell tailLast
= tail
.last();
246 assertTrue(ascCells
[NUM_OF_CELLS
- 1].equals(tailLast
));
248 Cell headFirst
= head
.first();
249 assertTrue(ascCells
[0].equals(headFirst
));
250 Cell headLast
= head
.last();
251 assertTrue(ascCells
[0].equals(headLast
));
254 /* Generic iterators test for immutable CellSet */
255 private void testIterators(CellSet cs
) throws Exception
{
257 // Assert that we have NUM_OF_CELLS values and that they are in order
260 assertEquals("\n\n-------------------------------------------------------------------\n"
261 + "Comparing iteration number " + (count
+ 1) + " the returned cell: " + kv
262 + ", the first Cell in the CellBlocksMap: " + ascCells
[count
]
263 + ", and the same transformed to String: " + ascCells
[count
].toString()
264 + "\n-------------------------------------------------------------------\n",
265 ascCells
[count
], kv
);
268 assertEquals(NUM_OF_CELLS
, count
);
270 // Test descending iterator
272 for (Iterator
<Cell
> i
= cs
.descendingIterator(); i
.hasNext();) {
274 assertEquals(ascCells
[NUM_OF_CELLS
- (count
+ 1)], kv
);
277 assertEquals(NUM_OF_CELLS
, count
);
280 /* Create CellChunkMap with four cells inside the index chunk */
281 private CellChunkMap
setUpCellChunkMap(boolean asc
) {
283 // allocate new chunks and use the data chunk to hold the full data of the cells
284 // and the index chunk to hold the cell-representations
285 Chunk dataChunk
= chunkCreator
.getChunk();
286 Chunk idxChunk
= chunkCreator
.getChunk();
287 // the array of index chunks to be used as a basis for CellChunkMap
288 Chunk chunkArray
[] = new Chunk
[8]; // according to test currently written 8 is way enough
289 int chunkArrayIdx
= 0;
290 chunkArray
[chunkArrayIdx
++] = idxChunk
;
292 ByteBuffer idxBuffer
= idxChunk
.getData(); // the buffers of the chunks
293 ByteBuffer dataBuffer
= dataChunk
.getData();
294 int dataOffset
= ChunkCreator
.SIZEOF_CHUNK_HEADER
; // offset inside data buffer
295 int idxOffset
= ChunkCreator
.SIZEOF_CHUNK_HEADER
; // skip the space for chunk ID
297 Cell
[] cellArray
= asc ? ascCells
: descCells
;
299 for (Cell kv
: cellArray
) {
300 // do we have enough space to write the cell data on the data chunk?
301 if (dataOffset
+ kv
.getSerializedSize() > chunkCreator
.getChunkSize()) {
302 // allocate more data chunks if needed
303 dataChunk
= chunkCreator
.getChunk();
304 dataBuffer
= dataChunk
.getData();
305 dataOffset
= ChunkCreator
.SIZEOF_CHUNK_HEADER
;
307 int dataStartOfset
= dataOffset
;
308 dataOffset
= KeyValueUtil
.appendTo(kv
, dataBuffer
, dataOffset
, false); // write deep cell data
310 // do we have enough space to write the cell-representation on the index chunk?
311 if (idxOffset
+ ClassSize
.CELL_CHUNK_MAP_ENTRY
> chunkCreator
.getChunkSize()) {
312 // allocate more index chunks if needed
313 idxChunk
= chunkCreator
.getChunk();
314 idxBuffer
= idxChunk
.getData();
315 idxOffset
= ChunkCreator
.SIZEOF_CHUNK_HEADER
;
316 chunkArray
[chunkArrayIdx
++] = idxChunk
;
318 idxOffset
= ByteBufferUtils
.putInt(idxBuffer
, idxOffset
, dataChunk
.getId()); // write data chunk id
319 idxOffset
= ByteBufferUtils
.putInt(idxBuffer
, idxOffset
, dataStartOfset
); // offset
320 idxOffset
= ByteBufferUtils
.putInt(idxBuffer
, idxOffset
, kv
.getSerializedSize()); // length
321 idxOffset
= ByteBufferUtils
.putLong(idxBuffer
, idxOffset
, kv
.getSequenceId()); // seqId
324 return new CellChunkMap(CellComparator
.getInstance(),chunkArray
,0,NUM_OF_CELLS
,!asc
);
327 /* Create CellChunkMap with four cells inside the data jumbo chunk. This test is working only
328 ** with small chunks sized SMALL_CHUNK_SIZE (64) bytes */
329 private CellChunkMap
setUpJumboCellChunkMap(boolean asc
) {
330 int smallChunkSize
= SMALL_CHUNK_SIZE
+8;
331 // allocate new chunks and use the data JUMBO chunk to hold the full data of the cells
332 // and the normal index chunk to hold the cell-representations
333 Chunk dataJumboChunk
=
334 chunkCreator
.getChunk(ChunkType
.JUMBO_CHUNK
,
336 assertTrue(dataJumboChunk
.isJumbo());
337 Chunk idxChunk
= chunkCreator
.getChunk();
338 // the array of index chunks to be used as a basis for CellChunkMap
339 Chunk
[] chunkArray
= new Chunk
[8]; // according to test currently written 8 is way enough
340 int chunkArrayIdx
= 0;
341 chunkArray
[chunkArrayIdx
++] = idxChunk
;
343 ByteBuffer idxBuffer
= idxChunk
.getData(); // the buffers of the chunks
344 ByteBuffer dataBuffer
= dataJumboChunk
.getData();
345 int dataOffset
= ChunkCreator
.SIZEOF_CHUNK_HEADER
; // offset inside data buffer
346 int idxOffset
= ChunkCreator
.SIZEOF_CHUNK_HEADER
; // skip the space for chunk ID
348 Cell
[] cellArray
= asc ? ascCells
: descCells
;
350 for (Cell kv
: cellArray
) {
351 int dataStartOfset
= dataOffset
;
352 dataOffset
= KeyValueUtil
.appendTo(kv
, dataBuffer
, dataOffset
, false); // write deep cell data
354 // do we have enough space to write the cell-representation on the index chunk?
355 if (idxOffset
+ ClassSize
.CELL_CHUNK_MAP_ENTRY
> chunkCreator
.getChunkSize()) {
356 // allocate more index chunks if needed
357 idxChunk
= chunkCreator
.getChunk();
358 idxBuffer
= idxChunk
.getData();
359 idxOffset
= ChunkCreator
.SIZEOF_CHUNK_HEADER
;
360 chunkArray
[chunkArrayIdx
++] = idxChunk
;
362 // write data chunk id
363 idxOffset
= ByteBufferUtils
.putInt(idxBuffer
, idxOffset
, dataJumboChunk
.getId());
364 idxOffset
= ByteBufferUtils
.putInt(idxBuffer
, idxOffset
, dataStartOfset
); // offset
365 idxOffset
= ByteBufferUtils
.putInt(idxBuffer
, idxOffset
, kv
.getSerializedSize()); // length
366 idxOffset
= ByteBufferUtils
.putLong(idxBuffer
, idxOffset
, kv
.getSequenceId()); // seqId
368 // Jumbo chunks are working only with one cell per chunk, thus always allocate a new jumbo
369 // data chunk for next cell
371 chunkCreator
.getChunk(ChunkType
.JUMBO_CHUNK
,
373 assertTrue(dataJumboChunk
.isJumbo());
374 dataBuffer
= dataJumboChunk
.getData();
375 dataOffset
= ChunkCreator
.SIZEOF_CHUNK_HEADER
;
378 return new CellChunkMap(CellComparator
.getInstance(),chunkArray
,0,NUM_OF_CELLS
,!asc
);