2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org
.apache
.hadoop
.hbase
.regionserver
;
20 import static org
.junit
.Assert
.assertEquals
;
21 import static org
.junit
.Assert
.assertTrue
;
23 import java
.io
.IOException
;
24 import java
.util
.ArrayList
;
25 import java
.util
.Collections
;
26 import java
.util
.Iterator
;
27 import java
.util
.List
;
28 import java
.util
.concurrent
.ThreadLocalRandom
;
29 import org
.apache
.hadoop
.fs
.Path
;
30 import org
.apache
.hadoop
.hbase
.Cell
;
31 import org
.apache
.hadoop
.hbase
.CellUtil
;
32 import org
.apache
.hadoop
.hbase
.HBaseClassTestRule
;
33 import org
.apache
.hadoop
.hbase
.HBaseTestingUtil
;
34 import org
.apache
.hadoop
.hbase
.TableName
;
35 import org
.apache
.hadoop
.hbase
.client
.ColumnFamilyDescriptorBuilder
;
36 import org
.apache
.hadoop
.hbase
.client
.Durability
;
37 import org
.apache
.hadoop
.hbase
.client
.Put
;
38 import org
.apache
.hadoop
.hbase
.client
.RegionInfo
;
39 import org
.apache
.hadoop
.hbase
.client
.RegionInfoBuilder
;
40 import org
.apache
.hadoop
.hbase
.client
.Scan
;
41 import org
.apache
.hadoop
.hbase
.client
.TableDescriptor
;
42 import org
.apache
.hadoop
.hbase
.client
.TableDescriptorBuilder
;
43 import org
.apache
.hadoop
.hbase
.testclassification
.RegionServerTests
;
44 import org
.apache
.hadoop
.hbase
.testclassification
.SmallTests
;
45 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
46 import org
.apache
.hadoop
.hbase
.util
.EnvironmentEdgeManager
;
47 import org
.junit
.AfterClass
;
48 import org
.junit
.BeforeClass
;
49 import org
.junit
.ClassRule
;
50 import org
.junit
.Test
;
51 import org
.junit
.experimental
.categories
.Category
;
52 import org
.slf4j
.Logger
;
53 import org
.slf4j
.LoggerFactory
;
55 @Category({ RegionServerTests
.class, SmallTests
.class })
56 public class TestWideScanner
{
59 public static final HBaseClassTestRule CLASS_RULE
=
60 HBaseClassTestRule
.forClass(TestWideScanner
.class);
62 private static final HBaseTestingUtil UTIL
= new HBaseTestingUtil();
64 private static final Logger LOG
= LoggerFactory
.getLogger(TestWideScanner
.class);
66 private static final byte[] A
= Bytes
.toBytes("A");
67 private static final byte[] B
= Bytes
.toBytes("B");
68 private static final byte[] C
= Bytes
.toBytes("C");
69 private static byte[][] COLUMNS
= { A
, B
, C
};
71 private static final TableDescriptor TESTTABLEDESC
;
73 TableDescriptorBuilder builder
=
74 TableDescriptorBuilder
.newBuilder(TableName
.valueOf("testwidescan"));
75 for (byte[] cfName
: new byte[][] { A
, B
, C
}) {
76 // Keep versions to help debugging.
77 builder
.setColumnFamily(ColumnFamilyDescriptorBuilder
.newBuilder(cfName
).setMaxVersions(100)
78 .setBlocksize(8 * 1024).build());
80 TESTTABLEDESC
= builder
.build();
83 /** HRegionInfo for root region */
84 private static HRegion REGION
;
87 public static void setUp() throws IOException
{
88 Path testDir
= UTIL
.getDataTestDir();
89 RegionInfo hri
= RegionInfoBuilder
.newBuilder(TESTTABLEDESC
.getTableName()).build();
91 HBaseTestingUtil
.createRegionAndWAL(hri
, testDir
, UTIL
.getConfiguration(), TESTTABLEDESC
);
95 public static void tearDown() throws IOException
{
97 HBaseTestingUtil
.closeRegionAndWAL(REGION
);
100 UTIL
.cleanupTestDir();
103 private int addWideContent(HRegion region
) throws IOException
{
105 for (char c
= 'a'; c
<= 'c'; c
++) {
106 byte[] row
= Bytes
.toBytes("ab" + c
);
108 long ts
= EnvironmentEdgeManager
.currentTime();
109 for (i
= 0; i
< 100; i
++) {
110 byte[] b
= Bytes
.toBytes(String
.format("%10d", i
));
111 for (j
= 0; j
< 100; j
++) {
112 Put put
= new Put(row
);
113 put
.setDurability(Durability
.SKIP_WAL
);
115 put
.addColumn(COLUMNS
[ThreadLocalRandom
.current().nextInt(COLUMNS
.length
)], b
, ts1
, b
);
125 public void testWideScanBatching() throws IOException
{
126 final int batch
= 256;
127 int inserted
= addWideContent(REGION
);
128 List
<Cell
> results
= new ArrayList
<>();
129 Scan scan
= new Scan();
133 scan
.readVersions(100);
134 scan
.setBatch(batch
);
135 try (InternalScanner s
= REGION
.getScanner(scan
)) {
140 more
= s
.next(results
);
142 LOG
.info("iteration #" + i
+ ", results.size=" + results
.size());
144 // assert that the result set is no larger
145 assertTrue(results
.size() <= batch
);
147 total
+= results
.size();
149 if (results
.size() > 0) {
150 // assert that all results are from the same row
151 byte[] row
= CellUtil
.cloneRow(results
.get(0));
152 for (Cell kv
: results
) {
153 assertTrue(Bytes
.equals(row
, CellUtil
.cloneRow(kv
)));
159 // trigger ChangedReadersObservers
160 Iterator
<KeyValueScanner
> scanners
=
161 ((RegionScannerImpl
) s
).storeHeap
.getHeap().iterator();
162 while (scanners
.hasNext()) {
163 StoreScanner ss
= (StoreScanner
) scanners
.next();
164 ss
.updateReaders(Collections
.emptyList(), Collections
.emptyList());
168 // assert that the scanner returned all values
169 LOG
.info("inserted " + inserted
+ ", scanned " + total
);
170 assertEquals(total
, inserted
);