HBASE-26921 Rewrite the counting cells part in TestMultiVersions (#4316)
[hbase.git] / hbase-server / src / test / java / org / apache / hadoop / hbase / regionserver / TestRowTooBig.java
blob0f07959ae00af22cd06e44af975df2fad20e12ea
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org.apache.hadoop.hbase.regionserver;
20 import java.io.IOException;
21 import org.apache.hadoop.fs.Path;
22 import org.apache.hadoop.hbase.HBaseClassTestRule;
23 import org.apache.hadoop.hbase.HBaseTestingUtil;
24 import org.apache.hadoop.hbase.HConstants;
25 import org.apache.hadoop.hbase.TableName;
26 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
27 import org.apache.hadoop.hbase.client.Get;
28 import org.apache.hadoop.hbase.client.Put;
29 import org.apache.hadoop.hbase.client.RegionInfo;
30 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
31 import org.apache.hadoop.hbase.client.RowTooBigException;
32 import org.apache.hadoop.hbase.client.TableDescriptor;
33 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
34 import org.apache.hadoop.hbase.testclassification.MediumTests;
35 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
36 import org.apache.hadoop.hbase.util.Bytes;
37 import org.junit.AfterClass;
38 import org.junit.BeforeClass;
39 import org.junit.ClassRule;
40 import org.junit.Test;
41 import org.junit.experimental.categories.Category;
43 /**
44 * Test case to check HRS throws {@link org.apache.hadoop.hbase.client.RowTooBigException}
45 * when row size exceeds configured limits.
47 @Category({RegionServerTests.class, MediumTests.class})
48 public class TestRowTooBig {
50 @ClassRule
51 public static final HBaseClassTestRule CLASS_RULE =
52 HBaseClassTestRule.forClass(TestRowTooBig.class);
54 private final static HBaseTestingUtil HTU = new HBaseTestingUtil();
55 private static Path rootRegionDir;
56 private static final TableDescriptor TEST_TD = TableDescriptorBuilder
57 .newBuilder(TableName.valueOf(TestRowTooBig.class.getSimpleName())).build();
59 @BeforeClass
60 public static void before() throws Exception {
61 HTU.startMiniCluster();
62 HTU.getConfiguration().setLong(HConstants.TABLE_MAX_ROWSIZE_KEY,
63 10 * 1024 * 1024L);
64 rootRegionDir = HTU.getDataTestDirOnTestFS("TestRowTooBig");
67 @AfterClass
68 public static void after() throws Exception {
69 HTU.shutdownMiniCluster();
72 /**
73 * Usecase:
74 * - create a row with 5 large cells (5 Mb each)
75 * - flush memstore but don't compact storefiles.
76 * - try to Get whole row.
78 * OOME happened before we actually get to reading results, but
79 * during seeking, as each StoreFile gets it's own scanner,
80 * and each scanner seeks after the first KV.
82 @Test(expected = RowTooBigException.class)
83 public void testScannersSeekOnFewLargeCells() throws IOException {
84 byte[] row1 = Bytes.toBytes("row1");
85 byte[] fam1 = Bytes.toBytes("fam1");
87 TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TEST_TD)
88 .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam1)).build();
90 final RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build();
91 HRegion region = HBaseTestingUtil.createRegionAndWAL(hri, rootRegionDir,
92 HTU.getConfiguration(), tableDescriptor);
93 try {
94 // Add 5 cells to memstore
95 for (int i = 0; i < 5 ; i++) {
96 Put put = new Put(row1);
98 byte[] value = new byte[5 * 1024 * 1024];
99 put.addColumn(fam1, Bytes.toBytes("col_" + i), value);
100 region.put(put);
101 region.flush(true);
104 Get get = new Get(row1);
105 region.get(get);
106 } finally {
107 HBaseTestingUtil.closeRegionAndWAL(region);
112 * Usecase:
114 * - create a row with 1M cells, 10 bytes in each
115 * - flush & run major compaction
116 * - try to Get whole row.
118 * OOME happened in StoreScanner.next(..).
120 * @throws IOException
122 @Test(expected = RowTooBigException.class)
123 public void testScanAcrossManySmallColumns() throws IOException {
124 byte[] row1 = Bytes.toBytes("row1");
125 byte[] fam1 = Bytes.toBytes("fam1");
127 TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TEST_TD)
128 .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam1)).build();
130 final RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build();
131 HRegion region = HBaseTestingUtil.createRegionAndWAL(hri, rootRegionDir,
132 HTU.getConfiguration(), tableDescriptor);
133 try {
134 // Add to memstore
135 for (int i = 0; i < 10; i++) {
136 Put put = new Put(row1);
137 for (int j = 0; j < 10 * 10000; j++) {
138 byte[] value = new byte[10];
139 put.addColumn(fam1, Bytes.toBytes("col_" + i + "_" + j), value);
141 region.put(put);
142 region.flush(true);
144 region.compact(true);
146 Get get = new Get(row1);
147 region.get(get);
148 } finally {
149 HBaseTestingUtil.closeRegionAndWAL(region);