2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org
.apache
.hadoop
.hbase
.regionserver
;
20 import static org
.apache
.hadoop
.hbase
.HBaseTestCase
.addContent
;
21 import static org
.junit
.Assert
.assertEquals
;
23 import java
.util
.ArrayList
;
24 import java
.util
.List
;
26 import org
.apache
.hadoop
.conf
.Configuration
;
27 import org
.apache
.hadoop
.fs
.Path
;
28 import org
.apache
.hadoop
.hbase
.Cell
;
29 import org
.apache
.hadoop
.hbase
.HBaseClassTestRule
;
30 import org
.apache
.hadoop
.hbase
.HBaseTestingUtility
;
31 import org
.apache
.hadoop
.hbase
.KeyValueUtil
;
32 import org
.apache
.hadoop
.hbase
.TableName
;
33 import org
.apache
.hadoop
.hbase
.client
.ColumnFamilyDescriptorBuilder
;
34 import org
.apache
.hadoop
.hbase
.client
.RegionInfo
;
35 import org
.apache
.hadoop
.hbase
.client
.RegionInfoBuilder
;
36 import org
.apache
.hadoop
.hbase
.client
.Scan
;
37 import org
.apache
.hadoop
.hbase
.client
.TableDescriptor
;
38 import org
.apache
.hadoop
.hbase
.client
.TableDescriptorBuilder
;
39 import org
.apache
.hadoop
.hbase
.io
.compress
.Compression
;
40 import org
.apache
.hadoop
.hbase
.io
.encoding
.DataBlockEncoding
;
41 import org
.apache
.hadoop
.hbase
.io
.hfile
.BlockCache
;
42 import org
.apache
.hadoop
.hbase
.io
.hfile
.BlockCacheFactory
;
43 import org
.apache
.hadoop
.hbase
.io
.hfile
.CacheStats
;
44 import org
.apache
.hadoop
.hbase
.testclassification
.RegionServerTests
;
45 import org
.apache
.hadoop
.hbase
.testclassification
.SmallTests
;
46 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
47 import org
.junit
.Before
;
48 import org
.junit
.ClassRule
;
49 import org
.junit
.Test
;
50 import org
.junit
.experimental
.categories
.Category
;
52 @SuppressWarnings("deprecation")
53 @Category({RegionServerTests
.class, SmallTests
.class})
54 public class TestBlocksScanned
{
57 public static final HBaseClassTestRule CLASS_RULE
=
58 HBaseClassTestRule
.forClass(TestBlocksScanned
.class);
60 private static byte [] FAMILY
= Bytes
.toBytes("family");
61 private static byte [] COL
= Bytes
.toBytes("col");
62 private static byte [] START_KEY
= Bytes
.toBytes("aaa");
63 private static byte [] END_KEY
= Bytes
.toBytes("zzz");
64 private static int BLOCK_SIZE
= 70;
66 private static HBaseTestingUtility TEST_UTIL
= null;
67 private Configuration conf
;
71 public void setUp() throws Exception
{
72 TEST_UTIL
= new HBaseTestingUtility();
73 conf
= TEST_UTIL
.getConfiguration();
74 testDir
= TEST_UTIL
.getDataTestDir("TestBlocksScanned");
78 public void testBlocksScanned() throws Exception
{
79 byte [] tableName
= Bytes
.toBytes("TestBlocksScanned");
80 TableDescriptorBuilder
.ModifyableTableDescriptor tableDescriptor
=
81 new TableDescriptorBuilder
.ModifyableTableDescriptor(TableName
.valueOf(tableName
));
83 tableDescriptor
.setColumnFamily(
84 new ColumnFamilyDescriptorBuilder
.ModifyableColumnFamilyDescriptor(FAMILY
)
86 .setBlockCacheEnabled(true)
87 .setBlocksize(BLOCK_SIZE
)
88 .setCompressionType(Compression
.Algorithm
.NONE
)
90 _testBlocksScanned(tableDescriptor
);
94 public void testBlocksScannedWithEncoding() throws Exception
{
95 byte [] tableName
= Bytes
.toBytes("TestBlocksScannedWithEncoding");
96 TableDescriptorBuilder
.ModifyableTableDescriptor tableDescriptor
=
97 new TableDescriptorBuilder
.ModifyableTableDescriptor(TableName
.valueOf(tableName
));
99 tableDescriptor
.setColumnFamily(
100 new ColumnFamilyDescriptorBuilder
.ModifyableColumnFamilyDescriptor(FAMILY
)
102 .setBlockCacheEnabled(true)
103 .setDataBlockEncoding(DataBlockEncoding
.FAST_DIFF
)
104 .setBlocksize(BLOCK_SIZE
)
105 .setCompressionType(Compression
.Algorithm
.NONE
)
107 _testBlocksScanned(tableDescriptor
);
110 private void _testBlocksScanned(TableDescriptor td
) throws Exception
{
111 BlockCache blockCache
= BlockCacheFactory
.createBlockCache(conf
);
112 RegionInfo regionInfo
=
113 RegionInfoBuilder
.newBuilder(td
.getTableName()).setStartKey(START_KEY
).setEndKey(END_KEY
)
115 HRegion r
= HBaseTestingUtility
.createRegionAndWAL(regionInfo
, testDir
, conf
, td
, blockCache
);
116 addContent(r
, FAMILY
, COL
);
119 CacheStats stats
= blockCache
.getStats();
120 long before
= stats
.getHitCount() + stats
.getMissCount();
121 // Do simple test of getting one row only first.
122 Scan scan
= new Scan().withStartRow(Bytes
.toBytes("aaa")).withStopRow(Bytes
.toBytes("aaz"))
123 .setReadType(Scan
.ReadType
.PREAD
);
124 scan
.addColumn(FAMILY
, COL
);
125 scan
.readVersions(1);
127 InternalScanner s
= r
.getScanner(scan
);
128 List
<Cell
> results
= new ArrayList
<>();
129 while (s
.next(results
));
132 int expectResultSize
= 'z' - 'a';
133 assertEquals(expectResultSize
, results
.size());
135 int kvPerBlock
= (int) Math
.ceil(BLOCK_SIZE
/
136 (double) KeyValueUtil
.ensureKeyValue(results
.get(0)).getLength());
137 assertEquals(2, kvPerBlock
);
139 long expectDataBlockRead
= (long) Math
.ceil(expectResultSize
/ (double) kvPerBlock
);
140 long expectIndexBlockRead
= expectDataBlockRead
;
142 assertEquals(expectIndexBlockRead
+ expectDataBlockRead
,
143 stats
.getHitCount() + stats
.getMissCount() - before
);