2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org
.apache
.hadoop
.hbase
.regionserver
;
19 import java
.util
.ArrayList
;
20 import java
.util
.List
;
22 import org
.apache
.hadoop
.hbase
.Cell
;
23 import org
.apache
.hadoop
.hbase
.HBaseTestCase
;
24 import org
.apache
.hadoop
.hbase
.HBaseTestingUtility
;
25 import org
.apache
.hadoop
.hbase
.HColumnDescriptor
;
26 import org
.apache
.hadoop
.hbase
.HTableDescriptor
;
27 import org
.apache
.hadoop
.hbase
.KeyValueUtil
;
28 import org
.apache
.hadoop
.hbase
.TableName
;
29 import org
.apache
.hadoop
.hbase
.client
.Scan
;
30 import org
.apache
.hadoop
.hbase
.io
.compress
.Compression
;
31 import org
.apache
.hadoop
.hbase
.io
.encoding
.DataBlockEncoding
;
32 import org
.apache
.hadoop
.hbase
.io
.hfile
.CacheConfig
;
33 import org
.apache
.hadoop
.hbase
.io
.hfile
.CacheStats
;
34 import org
.apache
.hadoop
.hbase
.testclassification
.RegionServerTests
;
35 import org
.apache
.hadoop
.hbase
.testclassification
.SmallTests
;
36 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
37 import org
.junit
.Assert
;
38 import org
.junit
.Before
;
39 import org
.junit
.Test
;
40 import org
.junit
.experimental
.categories
.Category
;
42 @SuppressWarnings("deprecation")
43 @Category({RegionServerTests
.class, SmallTests
.class})
44 public class TestBlocksScanned
extends HBaseTestCase
{
45 private static byte [] FAMILY
= Bytes
.toBytes("family");
46 private static byte [] COL
= Bytes
.toBytes("col");
47 private static byte [] START_KEY
= Bytes
.toBytes("aaa");
48 private static byte [] END_KEY
= Bytes
.toBytes("zzz");
49 private static int BLOCK_SIZE
= 70;
51 private static HBaseTestingUtility TEST_UTIL
= null;
55 public void setUp() throws Exception
{
58 TEST_UTIL
= new HBaseTestingUtility();
62 public void testBlocksScanned() throws Exception
{
63 byte [] tableName
= Bytes
.toBytes("TestBlocksScanned");
64 HTableDescriptor table
= new HTableDescriptor(TableName
.valueOf(tableName
));
67 new HColumnDescriptor(FAMILY
)
69 .setBlockCacheEnabled(true)
70 .setBlocksize(BLOCK_SIZE
)
71 .setCompressionType(Compression
.Algorithm
.NONE
)
73 _testBlocksScanned(table
);
77 public void testBlocksScannedWithEncoding() throws Exception
{
78 byte [] tableName
= Bytes
.toBytes("TestBlocksScannedWithEncoding");
79 HTableDescriptor table
= new HTableDescriptor(TableName
.valueOf(tableName
));
82 new HColumnDescriptor(FAMILY
)
84 .setBlockCacheEnabled(true)
85 .setDataBlockEncoding(DataBlockEncoding
.FAST_DIFF
)
86 .setBlocksize(BLOCK_SIZE
)
87 .setCompressionType(Compression
.Algorithm
.NONE
)
89 _testBlocksScanned(table
);
92 private void _testBlocksScanned(HTableDescriptor table
) throws Exception
{
93 HRegion r
= createNewHRegion(table
, START_KEY
, END_KEY
, TEST_UTIL
.getConfiguration());
94 addContent(r
, FAMILY
, COL
);
97 CacheStats stats
= new CacheConfig(TEST_UTIL
.getConfiguration()).getBlockCache().getStats();
98 long before
= stats
.getHitCount() + stats
.getMissCount();
99 // Do simple test of getting one row only first.
100 Scan scan
= new Scan().withStartRow(Bytes
.toBytes("aaa")).withStopRow(Bytes
.toBytes("aaz"))
101 .setReadType(Scan
.ReadType
.PREAD
);
102 scan
.addColumn(FAMILY
, COL
);
103 scan
.setMaxVersions(1);
105 InternalScanner s
= r
.getScanner(scan
);
106 List
<Cell
> results
= new ArrayList
<>();
107 while (s
.next(results
));
110 int expectResultSize
= 'z' - 'a';
111 assertEquals(expectResultSize
, results
.size());
113 int kvPerBlock
= (int) Math
.ceil(BLOCK_SIZE
/
114 (double) KeyValueUtil
.ensureKeyValue(results
.get(0)).getLength());
115 Assert
.assertEquals(2, kvPerBlock
);
117 long expectDataBlockRead
= (long) Math
.ceil(expectResultSize
/ (double) kvPerBlock
);
118 long expectIndexBlockRead
= expectDataBlockRead
;
120 assertEquals(expectIndexBlockRead
+expectDataBlockRead
, stats
.getHitCount() + stats
.getMissCount() - before
);