HBASE-24033 Add ut for loading the corrupt recovered hfiles (#1322)
[hbase.git] / hbase-server / src / test / java / org / apache / hadoop / hbase / regionserver / TestRowTooBig.java
blob35067b1512fa4dd6f349b449674c60ffe9e3c5aa
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org.apache.hadoop.hbase.regionserver;
20 import java.io.IOException;
21 import org.apache.hadoop.fs.Path;
22 import org.apache.hadoop.hbase.HBaseClassTestRule;
23 import org.apache.hadoop.hbase.HBaseTestingUtility;
24 import org.apache.hadoop.hbase.HConstants;
25 import org.apache.hadoop.hbase.HRegionInfo;
26 import org.apache.hadoop.hbase.TableName;
27 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
28 import org.apache.hadoop.hbase.client.Get;
29 import org.apache.hadoop.hbase.client.Put;
30 import org.apache.hadoop.hbase.client.RowTooBigException;
31 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
32 import org.apache.hadoop.hbase.testclassification.MediumTests;
33 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
34 import org.apache.hadoop.hbase.util.Bytes;
35 import org.junit.AfterClass;
36 import org.junit.BeforeClass;
37 import org.junit.ClassRule;
38 import org.junit.Test;
39 import org.junit.experimental.categories.Category;
41 /**
42 * Test case to check HRS throws {@link org.apache.hadoop.hbase.client.RowTooBigException}
43 * when row size exceeds configured limits.
45 @Category({RegionServerTests.class, MediumTests.class})
46 public class TestRowTooBig {
48 @ClassRule
49 public static final HBaseClassTestRule CLASS_RULE =
50 HBaseClassTestRule.forClass(TestRowTooBig.class);
52 private final static HBaseTestingUtility HTU = new HBaseTestingUtility();
53 private static Path rootRegionDir;
54 private static final TableDescriptorBuilder.ModifyableTableDescriptor TEST_TD =
55 new TableDescriptorBuilder.ModifyableTableDescriptor(
56 TableName.valueOf(TestRowTooBig.class.getSimpleName()));
58 @BeforeClass
59 public static void before() throws Exception {
60 HTU.startMiniCluster();
61 HTU.getConfiguration().setLong(HConstants.TABLE_MAX_ROWSIZE_KEY,
62 10 * 1024 * 1024L);
63 rootRegionDir = HTU.getDataTestDirOnTestFS("TestRowTooBig");
66 @AfterClass
67 public static void after() throws Exception {
68 HTU.shutdownMiniCluster();
71 /**
72 * Usecase:
73 * - create a row with 5 large cells (5 Mb each)
74 * - flush memstore but don't compact storefiles.
75 * - try to Get whole row.
77 * OOME happened before we actually get to reading results, but
78 * during seeking, as each StoreFile gets it's own scanner,
79 * and each scanner seeks after the first KV.
80 * @throws IOException
82 @Test(expected = RowTooBigException.class)
83 public void testScannersSeekOnFewLargeCells() throws IOException {
84 byte[] row1 = Bytes.toBytes("row1");
85 byte[] fam1 = Bytes.toBytes("fam1");
87 TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = TEST_TD;
88 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor familyDescriptor =
89 new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(fam1);
90 if (tableDescriptor.hasColumnFamily(familyDescriptor.getName())) {
91 tableDescriptor.modifyColumnFamily(familyDescriptor);
92 } else {
93 tableDescriptor.setColumnFamily(familyDescriptor);
96 final HRegionInfo hri =
97 new HRegionInfo(tableDescriptor.getTableName(), HConstants.EMPTY_END_ROW,
98 HConstants.EMPTY_END_ROW);
99 HRegion region = HBaseTestingUtility.createRegionAndWAL(hri,
100 rootRegionDir, HTU.getConfiguration(), tableDescriptor);
101 try {
102 // Add 5 cells to memstore
103 for (int i = 0; i < 5 ; i++) {
104 Put put = new Put(row1);
106 byte[] value = new byte[5 * 1024 * 1024];
107 put.addColumn(fam1, Bytes.toBytes("col_" + i), value);
108 region.put(put);
109 region.flush(true);
112 Get get = new Get(row1);
113 region.get(get);
114 } finally {
115 HBaseTestingUtility.closeRegionAndWAL(region);
120 * Usecase:
122 * - create a row with 1M cells, 10 bytes in each
123 * - flush & run major compaction
124 * - try to Get whole row.
126 * OOME happened in StoreScanner.next(..).
128 * @throws IOException
130 @Test(expected = RowTooBigException.class)
131 public void testScanAcrossManySmallColumns() throws IOException {
132 byte[] row1 = Bytes.toBytes("row1");
133 byte[] fam1 = Bytes.toBytes("fam1");
135 TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = TEST_TD;
136 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor hcd =
137 new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(fam1);
138 if (tableDescriptor.hasColumnFamily(hcd.getName())) {
139 tableDescriptor.modifyColumnFamily(hcd);
140 } else {
141 tableDescriptor.setColumnFamily(hcd);
144 final HRegionInfo hri =
145 new HRegionInfo(tableDescriptor.getTableName(), HConstants.EMPTY_END_ROW,
146 HConstants.EMPTY_END_ROW);
147 HRegion region = HBaseTestingUtility.createRegionAndWAL(hri,
148 rootRegionDir, HTU.getConfiguration(), tableDescriptor);
149 try {
150 // Add to memstore
151 for (int i = 0; i < 10; i++) {
152 Put put = new Put(row1);
153 for (int j = 0; j < 10 * 10000; j++) {
154 byte[] value = new byte[10];
155 put.addColumn(fam1, Bytes.toBytes("col_" + i + "_" + j), value);
157 region.put(put);
158 region.flush(true);
160 region.compact(true);
162 Get get = new Get(row1);
163 region.get(get);
164 } finally {
165 HBaseTestingUtility.closeRegionAndWAL(region);