2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org
.apache
.hadoop
.hbase
.regionserver
;
20 import static org
.junit
.Assert
.assertEquals
;
21 import static org
.junit
.Assert
.assertTrue
;
23 import java
.io
.IOException
;
24 import org
.apache
.hadoop
.conf
.Configuration
;
25 import org
.apache
.hadoop
.fs
.FileSystem
;
26 import org
.apache
.hadoop
.fs
.Path
;
27 import org
.apache
.hadoop
.hbase
.Cell
;
28 import org
.apache
.hadoop
.hbase
.CellUtil
;
29 import org
.apache
.hadoop
.hbase
.HBaseClassTestRule
;
30 import org
.apache
.hadoop
.hbase
.HBaseTestingUtil
;
31 import org
.apache
.hadoop
.hbase
.HConstants
;
32 import org
.apache
.hadoop
.hbase
.TableName
;
33 import org
.apache
.hadoop
.hbase
.client
.ColumnFamilyDescriptorBuilder
;
34 import org
.apache
.hadoop
.hbase
.client
.Durability
;
35 import org
.apache
.hadoop
.hbase
.client
.Increment
;
36 import org
.apache
.hadoop
.hbase
.client
.RegionInfo
;
37 import org
.apache
.hadoop
.hbase
.client
.RegionInfoBuilder
;
38 import org
.apache
.hadoop
.hbase
.client
.Result
;
39 import org
.apache
.hadoop
.hbase
.client
.TableDescriptor
;
40 import org
.apache
.hadoop
.hbase
.client
.TableDescriptorBuilder
;
41 import org
.apache
.hadoop
.hbase
.testclassification
.RegionServerTests
;
42 import org
.apache
.hadoop
.hbase
.testclassification
.SmallTests
;
43 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
44 import org
.junit
.ClassRule
;
45 import org
.junit
.Rule
;
46 import org
.junit
.Test
;
47 import org
.junit
.experimental
.categories
.Category
;
48 import org
.junit
.rules
.TestName
;
50 @Category({RegionServerTests
.class, SmallTests
.class})
51 public class TestResettingCounters
{
54 public static final HBaseClassTestRule CLASS_RULE
=
55 HBaseClassTestRule
.forClass(TestResettingCounters
.class);
58 public TestName name
= new TestName();
61 public void testResettingCounters() throws Exception
{
62 HBaseTestingUtil htu
= new HBaseTestingUtil();
63 Configuration conf
= htu
.getConfiguration();
64 FileSystem fs
= FileSystem
.get(conf
);
65 byte [] table
= Bytes
.toBytes(name
.getMethodName());
66 byte [][] families
= new byte [][] {
67 Bytes
.toBytes("family1"),
68 Bytes
.toBytes("family2"),
69 Bytes
.toBytes("family3")
71 int numQualifiers
= 10;
72 byte [][] qualifiers
= new byte [numQualifiers
][];
73 for (int i
=0; i
<numQualifiers
; i
++) qualifiers
[i
] = Bytes
.toBytes("qf" + i
);
75 byte [][] rows
= new byte [numRows
][];
76 for (int i
=0; i
<numRows
; i
++) rows
[i
] = Bytes
.toBytes("r" + i
);
78 TableDescriptorBuilder builder
=
79 TableDescriptorBuilder
.newBuilder(TableName
.valueOf(table
));
80 for (byte[] family
: families
) {
81 builder
.setColumnFamily(
82 ColumnFamilyDescriptorBuilder
.of(family
));
84 TableDescriptor tableDescriptor
= builder
.build();
85 RegionInfo hri
= RegionInfoBuilder
.newBuilder(tableDescriptor
.getTableName()).build();
86 String testDir
= htu
.getDataTestDir() + "/TestResettingCounters/";
87 Path path
= new Path(testDir
);
88 if (fs
.exists(path
)) {
89 if (!fs
.delete(path
, true)) {
90 throw new IOException("Failed delete of " + path
);
93 HRegion region
= HBaseTestingUtil
.createRegionAndWAL(hri
, path
, conf
, tableDescriptor
);
95 Increment odd
= new Increment(rows
[0]);
96 odd
.setDurability(Durability
.SKIP_WAL
);
97 Increment even
= new Increment(rows
[0]);
98 even
.setDurability(Durability
.SKIP_WAL
);
99 Increment all
= new Increment(rows
[0]);
100 all
.setDurability(Durability
.SKIP_WAL
);
101 for (int i
=0;i
<numQualifiers
;i
++) {
102 if (i
% 2 == 0) even
.addColumn(families
[0], qualifiers
[i
], 1);
103 else odd
.addColumn(families
[0], qualifiers
[i
], 1);
104 all
.addColumn(families
[0], qualifiers
[i
], 1);
107 // increment odd qualifiers 5 times and flush
108 for (int i
=0;i
<5;i
++) region
.increment(odd
, HConstants
.NO_NONCE
, HConstants
.NO_NONCE
);
111 // increment even qualifiers 5 times
112 for (int i
=0;i
<5;i
++) region
.increment(even
, HConstants
.NO_NONCE
, HConstants
.NO_NONCE
);
114 // increment all qualifiers, should have value=6 for all
115 Result result
= region
.increment(all
, HConstants
.NO_NONCE
, HConstants
.NO_NONCE
);
116 assertEquals(numQualifiers
, result
.size());
117 Cell
[] kvs
= result
.rawCells();
118 for (int i
=0;i
<kvs
.length
;i
++) {
119 System
.out
.println(kvs
[i
].toString());
120 assertTrue(CellUtil
.matchingQualifier(kvs
[i
], qualifiers
[i
]));
121 assertEquals(6, Bytes
.toLong(CellUtil
.cloneValue(kvs
[i
])));
124 HBaseTestingUtil
.closeRegionAndWAL(region
);
126 HBaseTestingUtil
.closeRegionAndWAL(region
);