HBASE-26700 The way we bypass broken track file is not enough in StoreFileListFile...
[hbase.git] / hbase-server / src / test / java / org / apache / hadoop / hbase / regionserver / TestBlocksScanned.java
blobf18acdc7a8eb7a9795d989cfd04bc90039353b34
1 /*
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org.apache.hadoop.hbase.regionserver;
20 import static org.apache.hadoop.hbase.HTestConst.addContent;
21 import static org.junit.Assert.assertEquals;
23 import java.util.ArrayList;
24 import java.util.List;
25 import org.apache.hadoop.conf.Configuration;
26 import org.apache.hadoop.fs.Path;
27 import org.apache.hadoop.hbase.Cell;
28 import org.apache.hadoop.hbase.HBaseClassTestRule;
29 import org.apache.hadoop.hbase.HBaseTestingUtil;
30 import org.apache.hadoop.hbase.KeyValueUtil;
31 import org.apache.hadoop.hbase.TableName;
32 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
33 import org.apache.hadoop.hbase.client.RegionInfo;
34 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
35 import org.apache.hadoop.hbase.client.Scan;
36 import org.apache.hadoop.hbase.client.TableDescriptor;
37 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
38 import org.apache.hadoop.hbase.io.compress.Compression;
39 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
40 import org.apache.hadoop.hbase.io.hfile.BlockCache;
41 import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory;
42 import org.apache.hadoop.hbase.io.hfile.CacheStats;
43 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
44 import org.apache.hadoop.hbase.testclassification.SmallTests;
45 import org.apache.hadoop.hbase.util.Bytes;
46 import org.junit.Before;
47 import org.junit.ClassRule;
48 import org.junit.Test;
49 import org.junit.experimental.categories.Category;
51 @SuppressWarnings("deprecation")
52 @Category({RegionServerTests.class, SmallTests.class})
53 public class TestBlocksScanned {
55 @ClassRule
56 public static final HBaseClassTestRule CLASS_RULE =
57 HBaseClassTestRule.forClass(TestBlocksScanned.class);
59 private static byte [] FAMILY = Bytes.toBytes("family");
60 private static byte [] COL = Bytes.toBytes("col");
61 private static byte [] START_KEY = Bytes.toBytes("aaa");
62 private static byte [] END_KEY = Bytes.toBytes("zzz");
63 private static int BLOCK_SIZE = 70;
65 private static HBaseTestingUtil TEST_UTIL = null;
66 private Configuration conf;
67 private Path testDir;
69 @Before
70 public void setUp() throws Exception {
71 TEST_UTIL = new HBaseTestingUtil();
72 conf = TEST_UTIL.getConfiguration();
73 testDir = TEST_UTIL.getDataTestDir("TestBlocksScanned");
76 @Test
77 public void testBlocksScanned() throws Exception {
78 byte[] tableName = Bytes.toBytes("TestBlocksScanned");
79 TableDescriptor tableDescriptor =
80 TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
81 .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setMaxVersions(10)
82 .setBlockCacheEnabled(true).setBlocksize(BLOCK_SIZE)
83 .setCompressionType(Compression.Algorithm.NONE).build())
84 .build();
85 _testBlocksScanned(tableDescriptor);
88 @Test
89 public void testBlocksScannedWithEncoding() throws Exception {
90 byte[] tableName = Bytes.toBytes("TestBlocksScannedWithEncoding");
91 TableDescriptor tableDescriptor =
92 TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
93 .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setMaxVersions(10)
94 .setBlockCacheEnabled(true).setDataBlockEncoding(DataBlockEncoding.FAST_DIFF)
95 .setBlocksize(BLOCK_SIZE).setCompressionType(Compression.Algorithm.NONE).build())
96 .build();
97 _testBlocksScanned(tableDescriptor);
100 private void _testBlocksScanned(TableDescriptor td) throws Exception {
101 BlockCache blockCache = BlockCacheFactory.createBlockCache(conf);
102 RegionInfo regionInfo =
103 RegionInfoBuilder.newBuilder(td.getTableName()).setStartKey(START_KEY).setEndKey(END_KEY)
104 .build();
105 HRegion r = HBaseTestingUtil.createRegionAndWAL(regionInfo, testDir, conf, td, blockCache);
106 addContent(r, FAMILY, COL);
107 r.flush(true);
109 CacheStats stats = blockCache.getStats();
110 long before = stats.getHitCount() + stats.getMissCount();
111 // Do simple test of getting one row only first.
112 Scan scan = new Scan().withStartRow(Bytes.toBytes("aaa")).withStopRow(Bytes.toBytes("aaz"))
113 .setReadType(Scan.ReadType.PREAD);
114 scan.addColumn(FAMILY, COL);
115 scan.readVersions(1);
117 InternalScanner s = r.getScanner(scan);
118 List<Cell> results = new ArrayList<>();
119 while (s.next(results));
120 s.close();
122 int expectResultSize = 'z' - 'a';
123 assertEquals(expectResultSize, results.size());
125 int kvPerBlock = (int) Math.ceil(BLOCK_SIZE /
126 (double) KeyValueUtil.ensureKeyValue(results.get(0)).getLength());
127 assertEquals(2, kvPerBlock);
129 long expectDataBlockRead = (long) Math.ceil(expectResultSize / (double) kvPerBlock);
130 long expectIndexBlockRead = expectDataBlockRead;
132 assertEquals(expectIndexBlockRead + expectDataBlockRead,
133 stats.getHitCount() + stats.getMissCount() - before);