2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org
.apache
.hadoop
.hbase
.regionserver
;
20 import static org
.junit
.Assert
.assertEquals
;
21 import static org
.junit
.Assert
.assertTrue
;
23 import java
.io
.IOException
;
24 import java
.util
.ArrayList
;
25 import java
.util
.Collection
;
26 import java
.util
.HashMap
;
27 import java
.util
.HashSet
;
28 import java
.util
.List
;
30 import org
.apache
.hadoop
.hbase
.Cell
;
31 import org
.apache
.hadoop
.hbase
.HBaseClassTestRule
;
32 import org
.apache
.hadoop
.hbase
.HBaseTestingUtility
;
33 import org
.apache
.hadoop
.hbase
.HRegionInfo
;
34 import org
.apache
.hadoop
.hbase
.KeyValue
;
35 import org
.apache
.hadoop
.hbase
.KeyValueTestUtil
;
36 import org
.apache
.hadoop
.hbase
.TableName
;
37 import org
.apache
.hadoop
.hbase
.client
.ColumnFamilyDescriptor
;
38 import org
.apache
.hadoop
.hbase
.client
.ColumnFamilyDescriptorBuilder
;
39 import org
.apache
.hadoop
.hbase
.client
.Durability
;
40 import org
.apache
.hadoop
.hbase
.client
.Put
;
41 import org
.apache
.hadoop
.hbase
.client
.Scan
;
42 import org
.apache
.hadoop
.hbase
.client
.TableDescriptorBuilder
;
43 import org
.apache
.hadoop
.hbase
.testclassification
.MediumTests
;
44 import org
.apache
.hadoop
.hbase
.testclassification
.RegionServerTests
;
45 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
46 import org
.junit
.ClassRule
;
47 import org
.junit
.Rule
;
48 import org
.junit
.Test
;
49 import org
.junit
.experimental
.categories
.Category
;
50 import org
.junit
.rules
.TestName
;
51 import org
.slf4j
.Logger
;
52 import org
.slf4j
.LoggerFactory
;
54 @Category({RegionServerTests
.class, MediumTests
.class})
55 public class TestColumnSeeking
{
58 public static final HBaseClassTestRule CLASS_RULE
=
59 HBaseClassTestRule
.forClass(TestColumnSeeking
.class);
61 @Rule public TestName name
= new TestName();
62 private final static HBaseTestingUtility TEST_UTIL
= new HBaseTestingUtility();
64 private static final Logger LOG
= LoggerFactory
.getLogger(TestColumnSeeking
.class);
66 @SuppressWarnings("unchecked")
68 public void testDuplicateVersions() throws IOException
{
69 String family
= "Family";
70 byte[] familyBytes
= Bytes
.toBytes("Family");
71 TableName table
= TableName
.valueOf(name
.getMethodName());
73 ColumnFamilyDescriptorBuilder
.ModifyableColumnFamilyDescriptor familyDescriptor
=
74 new ColumnFamilyDescriptorBuilder
.ModifyableColumnFamilyDescriptor(familyBytes
)
75 .setMaxVersions(1000);
76 familyDescriptor
.setMaxVersions(3);
77 TableDescriptorBuilder
.ModifyableTableDescriptor tableDescriptor
=
78 new TableDescriptorBuilder
.ModifyableTableDescriptor(table
);
80 tableDescriptor
.setColumnFamily(familyDescriptor
);
81 HRegionInfo info
= new HRegionInfo(table
, null, null, false);
82 // Set this so that the archiver writes to the temp dir as well.
83 HRegion region
= TEST_UTIL
.createLocalHRegion(info
, tableDescriptor
);
85 List
<String
> rows
= generateRandomWords(10, "row");
86 List
<String
> allColumns
= generateRandomWords(10, "column");
87 List
<String
> values
= generateRandomWords(100, "value");
89 long maxTimestamp
= 2;
90 double selectPercent
= 0.5;
91 int numberOfTests
= 5;
92 double flushPercentage
= 0.2;
93 double minorPercentage
= 0.2;
94 double majorPercentage
= 0.2;
95 double putPercentage
= 0.2;
97 HashMap
<String
, KeyValue
> allKVMap
= new HashMap
<>();
99 HashMap
<String
, KeyValue
>[] kvMaps
= new HashMap
[numberOfTests
];
100 ArrayList
<String
>[] columnLists
= new ArrayList
[numberOfTests
];
102 for (int i
= 0; i
< numberOfTests
; i
++) {
103 kvMaps
[i
] = new HashMap
<>();
104 columnLists
[i
] = new ArrayList
<>();
105 for (String column
: allColumns
) {
106 if (Math
.random() < selectPercent
) {
107 columnLists
[i
].add(column
);
112 for (String value
: values
) {
113 for (String row
: rows
) {
114 Put p
= new Put(Bytes
.toBytes(row
));
115 p
.setDurability(Durability
.SKIP_WAL
);
116 for (String column
: allColumns
) {
117 for (long timestamp
= 1; timestamp
<= maxTimestamp
; timestamp
++) {
119 KeyValueTestUtil
.create(row
, family
, column
, timestamp
, value
);
120 if (Math
.random() < putPercentage
) {
122 allKVMap
.put(kv
.getKeyString(), kv
);
123 for (int i
= 0; i
< numberOfTests
; i
++) {
124 if (columnLists
[i
].contains(column
)) {
125 kvMaps
[i
].put(kv
.getKeyString(), kv
);
132 if (Math
.random() < flushPercentage
) {
133 LOG
.info("Flushing... ");
137 if (Math
.random() < minorPercentage
) {
138 LOG
.info("Minor compacting... ");
139 region
.compact(false);
142 if (Math
.random() < majorPercentage
) {
143 LOG
.info("Major compacting... ");
144 region
.compact(true);
149 for (int i
= 0; i
< numberOfTests
+ 1; i
++) {
150 Collection
<KeyValue
> kvSet
;
151 Scan scan
= new Scan();
152 scan
.setMaxVersions();
153 if (i
< numberOfTests
) {
154 if (columnLists
[i
].isEmpty()) continue; // HBASE-7700
155 kvSet
= kvMaps
[i
].values();
156 for (String column
: columnLists
[i
]) {
157 scan
.addColumn(familyBytes
, Bytes
.toBytes(column
));
159 LOG
.info("ExplicitColumns scanner");
160 LOG
.info("Columns: " + columnLists
[i
].size() + " Keys: "
163 kvSet
= allKVMap
.values();
164 LOG
.info("Wildcard scanner");
165 LOG
.info("Columns: " + allColumns
.size() + " Keys: " + kvSet
.size());
168 InternalScanner scanner
= region
.getScanner(scan
);
169 List
<Cell
> results
= new ArrayList
<>();
170 while (scanner
.next(results
))
172 assertEquals(kvSet
.size(), results
.size());
173 assertTrue(KeyValueTestUtil
.containsIgnoreMvccVersion(results
, kvSet
));
176 HBaseTestingUtility
.closeRegionAndWAL(region
);
179 HBaseTestingUtility
.closeRegionAndWAL(region
);
182 @SuppressWarnings("unchecked")
184 public void testReseeking() throws IOException
{
185 String family
= "Family";
186 byte[] familyBytes
= Bytes
.toBytes("Family");
187 TableName table
= TableName
.valueOf(name
.getMethodName());
189 TableDescriptorBuilder tableDescriptorBuilder
=
190 TableDescriptorBuilder
.newBuilder(table
);
191 ColumnFamilyDescriptor columnFamilyDescriptor
=
192 ColumnFamilyDescriptorBuilder
193 .newBuilder(Bytes
.toBytes(family
))
194 .setMaxVersions(3).build();
195 tableDescriptorBuilder
.setColumnFamily(columnFamilyDescriptor
);
197 HRegionInfo info
= new HRegionInfo(table
, null, null, false);
198 HRegion region
= TEST_UTIL
.createLocalHRegion(info
, tableDescriptorBuilder
.build());
200 List
<String
> rows
= generateRandomWords(10, "row");
201 List
<String
> allColumns
= generateRandomWords(100, "column");
203 long maxTimestamp
= 2;
204 double selectPercent
= 0.5;
205 int numberOfTests
= 5;
206 double flushPercentage
= 0.2;
207 double minorPercentage
= 0.2;
208 double majorPercentage
= 0.2;
209 double putPercentage
= 0.2;
211 HashMap
<String
, KeyValue
> allKVMap
= new HashMap
<>();
213 HashMap
<String
, KeyValue
>[] kvMaps
= new HashMap
[numberOfTests
];
214 ArrayList
<String
>[] columnLists
= new ArrayList
[numberOfTests
];
215 String valueString
= "Value";
217 for (int i
= 0; i
< numberOfTests
; i
++) {
218 kvMaps
[i
] = new HashMap
<>();
219 columnLists
[i
] = new ArrayList
<>();
220 for (String column
: allColumns
) {
221 if (Math
.random() < selectPercent
) {
222 columnLists
[i
].add(column
);
227 for (String row
: rows
) {
228 Put p
= new Put(Bytes
.toBytes(row
));
229 p
.setDurability(Durability
.SKIP_WAL
);
230 for (String column
: allColumns
) {
231 for (long timestamp
= 1; timestamp
<= maxTimestamp
; timestamp
++) {
233 KeyValueTestUtil
.create(row
, family
, column
, timestamp
,
235 if (Math
.random() < putPercentage
) {
237 allKVMap
.put(kv
.getKeyString(), kv
);
238 for (int i
= 0; i
< numberOfTests
; i
++) {
239 if (columnLists
[i
].contains(column
)) {
240 kvMaps
[i
].put(kv
.getKeyString(), kv
);
248 if (Math
.random() < flushPercentage
) {
249 LOG
.info("Flushing... ");
253 if (Math
.random() < minorPercentage
) {
254 LOG
.info("Minor compacting... ");
255 region
.compact(false);
258 if (Math
.random() < majorPercentage
) {
259 LOG
.info("Major compacting... ");
260 region
.compact(true);
264 for (int i
= 0; i
< numberOfTests
+ 1; i
++) {
265 Collection
<KeyValue
> kvSet
;
266 Scan scan
= new Scan();
267 scan
.setMaxVersions();
268 if (i
< numberOfTests
) {
269 if (columnLists
[i
].isEmpty()) continue; // HBASE-7700
270 kvSet
= kvMaps
[i
].values();
271 for (String column
: columnLists
[i
]) {
272 scan
.addColumn(familyBytes
, Bytes
.toBytes(column
));
274 LOG
.info("ExplicitColumns scanner");
275 LOG
.info("Columns: " + columnLists
[i
].size() + " Keys: "
278 kvSet
= allKVMap
.values();
279 LOG
.info("Wildcard scanner");
280 LOG
.info("Columns: " + allColumns
.size() + " Keys: " + kvSet
.size());
283 InternalScanner scanner
= region
.getScanner(scan
);
284 List
<Cell
> results
= new ArrayList
<>();
285 while (scanner
.next(results
))
287 assertEquals(kvSet
.size(), results
.size());
288 assertTrue(KeyValueTestUtil
.containsIgnoreMvccVersion(results
, kvSet
));
291 HBaseTestingUtility
.closeRegionAndWAL(region
);
294 List
<String
> generateRandomWords(int numberOfWords
, String suffix
) {
295 Set
<String
> wordSet
= new HashSet
<>();
296 for (int i
= 0; i
< numberOfWords
; i
++) {
297 int lengthOfWords
= (int) (Math
.random() * 5) + 1;
298 char[] wordChar
= new char[lengthOfWords
];
299 for (int j
= 0; j
< wordChar
.length
; j
++) {
300 wordChar
[j
] = (char) (Math
.random() * 26 + 97);
303 if (suffix
== null) {
304 word
= new String(wordChar
);
306 word
= new String(wordChar
) + suffix
;
310 List
<String
> wordList
= new ArrayList
<>(wordSet
);