2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org
.apache
.hadoop
.hbase
;
20 import static org
.junit
.Assert
.assertEquals
;
21 import static org
.junit
.Assert
.assertNotNull
;
22 import static org
.junit
.Assert
.assertTrue
;
23 import static org
.junit
.Assert
.fail
;
25 import java
.io
.ByteArrayInputStream
;
26 import java
.io
.ByteArrayOutputStream
;
27 import java
.io
.DataInputStream
;
28 import java
.io
.DataOutputStream
;
29 import java
.io
.IOException
;
30 import java
.util
.List
;
32 import java
.util
.NavigableSet
;
34 import org
.apache
.hadoop
.hbase
.client
.ColumnFamilyDescriptor
;
35 import org
.apache
.hadoop
.hbase
.client
.ColumnFamilyDescriptorBuilder
;
36 import org
.apache
.hadoop
.hbase
.client
.Get
;
37 import org
.apache
.hadoop
.hbase
.client
.RegionInfo
;
38 import org
.apache
.hadoop
.hbase
.client
.RegionInfoBuilder
;
39 import org
.apache
.hadoop
.hbase
.client
.Scan
;
40 import org
.apache
.hadoop
.hbase
.client
.TableDescriptor
;
41 import org
.apache
.hadoop
.hbase
.client
.TableDescriptorBuilder
;
42 import org
.apache
.hadoop
.hbase
.filter
.BinaryComparator
;
43 import org
.apache
.hadoop
.hbase
.filter
.Filter
;
44 import org
.apache
.hadoop
.hbase
.filter
.PrefixFilter
;
45 import org
.apache
.hadoop
.hbase
.filter
.RowFilter
;
46 import org
.apache
.hadoop
.hbase
.io
.TimeRange
;
47 import org
.apache
.hadoop
.hbase
.testclassification
.MiscTests
;
48 import org
.apache
.hadoop
.hbase
.testclassification
.SmallTests
;
49 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
50 import org
.apache
.hadoop
.hbase
.util
.EnvironmentEdgeManager
;
51 import org
.apache
.hadoop
.io
.DataInputBuffer
;
52 import org
.junit
.ClassRule
;
53 import org
.junit
.Test
;
54 import org
.junit
.experimental
.categories
.Category
;
56 import org
.apache
.hadoop
.hbase
.shaded
.protobuf
.ProtobufUtil
;
57 import org
.apache
.hadoop
.hbase
.shaded
.protobuf
.generated
.ClientProtos
;
60 * Test HBase Writables serializations
62 @Category({ MiscTests
.class, SmallTests
.class })
63 public class TestSerialization
{
66 public static final HBaseClassTestRule CLASS_RULE
=
67 HBaseClassTestRule
.forClass(TestSerialization
.class);
70 public void testKeyValue() throws Exception
{
71 final String name
= "testKeyValue2";
72 byte[] row
= Bytes
.toBytes(name
);
73 byte[] fam
= Bytes
.toBytes("fam");
74 byte[] qf
= Bytes
.toBytes("qf");
75 long ts
= EnvironmentEdgeManager
.currentTime();
76 byte[] val
= Bytes
.toBytes("val");
77 KeyValue kv
= new KeyValue(row
, fam
, qf
, ts
, val
);
78 ByteArrayOutputStream baos
= new ByteArrayOutputStream();
79 DataOutputStream dos
= new DataOutputStream(baos
);
80 KeyValueUtil
.write(kv
, dos
);
82 byte[] mb
= baos
.toByteArray();
83 ByteArrayInputStream bais
= new ByteArrayInputStream(mb
);
84 DataInputStream dis
= new DataInputStream(bais
);
85 KeyValue deserializedKv
= KeyValueUtil
.create(dis
);
86 assertTrue(Bytes
.equals(kv
.getBuffer(), deserializedKv
.getBuffer()));
87 assertEquals(kv
.getOffset(), deserializedKv
.getOffset());
88 assertEquals(kv
.getLength(), deserializedKv
.getLength());
92 public void testCreateKeyValueInvalidNegativeLength() {
94 KeyValue kv_0
= new KeyValue(Bytes
.toBytes("myRow"), Bytes
.toBytes("myCF"), // 51 bytes
95 Bytes
.toBytes("myQualifier"), 12345L, Bytes
.toBytes("my12345"));
97 KeyValue kv_1
= new KeyValue(Bytes
.toBytes("myRow"), Bytes
.toBytes("myCF"), // 49 bytes
98 Bytes
.toBytes("myQualifier"), 12345L, Bytes
.toBytes("my123"));
100 ByteArrayOutputStream baos
= new ByteArrayOutputStream();
101 DataOutputStream dos
= new DataOutputStream(baos
);
105 l
= KeyValue
.oswrite(kv_0
, dos
, false);
106 l
+= KeyValue
.oswrite(kv_1
, dos
, false);
107 assertEquals(100L, l
);
108 } catch (IOException e
) {
109 fail("Unexpected IOException" + e
.getMessage());
112 ByteArrayInputStream bais
= new ByteArrayInputStream(baos
.toByteArray());
113 DataInputStream dis
= new DataInputStream(bais
);
116 KeyValueUtil
.create(dis
);
117 assertTrue(kv_0
.equals(kv_1
));
118 } catch (Exception e
) {
119 fail("Unexpected Exception" + e
.getMessage());
124 // even if we have a good kv now in dis we will just pass length with -1 for simplicity
125 KeyValueUtil
.create(-1, dis
);
126 fail("Expected corrupt stream");
127 } catch (Exception e
) {
128 assertEquals("Failed read -1 bytes, stream corrupt?", e
.getMessage());
134 public void testCompareFilter() throws Exception
{
136 new RowFilter(CompareOperator
.EQUAL
, new BinaryComparator(Bytes
.toBytes("testRowOne-2")));
137 byte[] bytes
= f
.toByteArray();
138 Filter ff
= RowFilter
.parseFrom(bytes
);
143 public void testTableDescriptor() throws Exception
{
144 final String name
= "testTableDescriptor";
145 TableDescriptor htd
= createTableDescriptor(name
);
146 byte[] mb
= TableDescriptorBuilder
.toByteArray(htd
);
147 TableDescriptor deserializedHtd
= TableDescriptorBuilder
.parseFrom(mb
);
148 assertEquals(htd
.getTableName(), deserializedHtd
.getTableName());
152 * Test RegionInfo serialization
156 public void testRegionInfo() throws Exception
{
157 RegionInfo hri
= createRandomRegion("testRegionInfo");
159 // test toByteArray()
160 byte[] hrib
= RegionInfo
.toByteArray(hri
);
161 RegionInfo deserializedHri
= RegionInfo
.parseFrom(hrib
);
162 assertEquals(hri
.getEncodedName(), deserializedHri
.getEncodedName());
163 assertEquals(hri
, deserializedHri
);
165 // test toDelimitedByteArray()
166 hrib
= RegionInfo
.toDelimitedByteArray(hri
);
167 DataInputBuffer buf
= new DataInputBuffer();
169 buf
.reset(hrib
, hrib
.length
);
170 deserializedHri
= RegionInfo
.parseFrom(buf
);
171 assertEquals(hri
.getEncodedName(), deserializedHri
.getEncodedName());
172 assertEquals(hri
, deserializedHri
);
179 public void testRegionInfos() throws Exception
{
180 RegionInfo hri
= createRandomRegion("testRegionInfos");
181 byte[] triple
= RegionInfo
.toDelimitedByteArray(hri
, hri
, hri
);
182 List
<RegionInfo
> regions
= RegionInfo
.parseDelimitedFrom(triple
, 0, triple
.length
);
183 assertTrue(regions
.size() == 3);
184 assertTrue(regions
.get(0).equals(regions
.get(1)));
185 assertTrue(regions
.get(0).equals(regions
.get(2)));
188 private RegionInfo
createRandomRegion(final String name
) {
189 TableDescriptorBuilder tableDescriptorBuilder
=
190 TableDescriptorBuilder
.newBuilder(TableName
.valueOf(name
));
191 String
[] families
= new String
[] { "info", "anchor" };
192 for (int i
= 0; i
< families
.length
; i
++) {
193 ColumnFamilyDescriptor columnFamilyDescriptor
=
194 ColumnFamilyDescriptorBuilder
.newBuilder(Bytes
.toBytes(families
[i
])).build();
195 tableDescriptorBuilder
.setColumnFamily(columnFamilyDescriptor
);
197 TableDescriptor tableDescriptor
= tableDescriptorBuilder
.build();
198 return RegionInfoBuilder
.newBuilder(tableDescriptor
.getTableName()).build();
202 public void testGet() throws Exception
{
203 byte[] row
= Bytes
.toBytes("row");
204 byte[] fam
= Bytes
.toBytes("fam");
205 byte[] qf1
= Bytes
.toBytes("qf1");
206 long ts
= EnvironmentEdgeManager
.currentTime();
209 Get get
= new Get(row
);
210 get
.addColumn(fam
, qf1
);
211 get
.setTimeRange(ts
, ts
+ 1);
212 get
.readVersions(maxVersions
);
214 ClientProtos
.Get getProto
= ProtobufUtil
.toGet(get
);
215 Get desGet
= ProtobufUtil
.toGet(getProto
);
217 assertTrue(Bytes
.equals(get
.getRow(), desGet
.getRow()));
218 Set
<byte[]> set
= null;
219 Set
<byte[]> desSet
= null;
221 for (Map
.Entry
<byte[], NavigableSet
<byte[]>> entry
: get
.getFamilyMap().entrySet()) {
222 assertTrue(desGet
.getFamilyMap().containsKey(entry
.getKey()));
223 set
= entry
.getValue();
224 desSet
= desGet
.getFamilyMap().get(entry
.getKey());
225 for (byte[] qualifier
: set
) {
226 assertTrue(desSet
.contains(qualifier
));
230 assertEquals(get
.getMaxVersions(), desGet
.getMaxVersions());
231 TimeRange tr
= get
.getTimeRange();
232 TimeRange desTr
= desGet
.getTimeRange();
233 assertEquals(tr
.getMax(), desTr
.getMax());
234 assertEquals(tr
.getMin(), desTr
.getMin());
238 public void testScan() throws Exception
{
240 byte[] startRow
= Bytes
.toBytes("startRow");
241 byte[] stopRow
= Bytes
.toBytes("stopRow");
242 byte[] fam
= Bytes
.toBytes("fam");
243 byte[] qf1
= Bytes
.toBytes("qf1");
244 long ts
= EnvironmentEdgeManager
.currentTime();
247 Scan scan
= new Scan().withStartRow(startRow
).withStopRow(stopRow
);
248 scan
.addColumn(fam
, qf1
);
249 scan
.setTimeRange(ts
, ts
+ 1);
250 scan
.readVersions(maxVersions
);
252 ClientProtos
.Scan scanProto
= ProtobufUtil
.toScan(scan
);
253 Scan desScan
= ProtobufUtil
.toScan(scanProto
);
255 assertTrue(Bytes
.equals(scan
.getStartRow(), desScan
.getStartRow()));
256 assertTrue(Bytes
.equals(scan
.getStopRow(), desScan
.getStopRow()));
257 assertEquals(scan
.getCacheBlocks(), desScan
.getCacheBlocks());
258 Set
<byte[]> set
= null;
259 Set
<byte[]> desSet
= null;
261 for (Map
.Entry
<byte[], NavigableSet
<byte[]>> entry
: scan
.getFamilyMap().entrySet()) {
262 assertTrue(desScan
.getFamilyMap().containsKey(entry
.getKey()));
263 set
= entry
.getValue();
264 desSet
= desScan
.getFamilyMap().get(entry
.getKey());
265 for (byte[] column
: set
) {
266 assertTrue(desSet
.contains(column
));
269 // Test filters are serialized properly.
270 scan
= new Scan().withStartRow(startRow
);
271 final String name
= "testScan";
272 byte[] prefix
= Bytes
.toBytes(name
);
273 scan
.setFilter(new PrefixFilter(prefix
));
274 scanProto
= ProtobufUtil
.toScan(scan
);
275 desScan
= ProtobufUtil
.toScan(scanProto
);
276 Filter f
= desScan
.getFilter();
277 assertTrue(f
instanceof PrefixFilter
);
280 assertEquals(scan
.getMaxVersions(), desScan
.getMaxVersions());
281 TimeRange tr
= scan
.getTimeRange();
282 TimeRange desTr
= desScan
.getTimeRange();
283 assertEquals(tr
.getMax(), desTr
.getMax());
284 assertEquals(tr
.getMin(), desTr
.getMin());
287 protected static final int MAXVERSIONS
= 3;
288 protected final static byte[] fam1
= Bytes
.toBytes("colfamily1");
289 protected final static byte[] fam2
= Bytes
.toBytes("colfamily2");
290 protected final static byte[] fam3
= Bytes
.toBytes("colfamily3");
291 protected static final byte[][] COLUMNS
= { fam1
, fam2
, fam3
};
294 * Create a table of name <code>name</code> with {@link #COLUMNS} for families.
295 * @param name Name to give table.
296 * @return Column descriptor.
298 protected TableDescriptor
createTableDescriptor(final String name
) {
299 return createTableDescriptor(name
, MAXVERSIONS
);
303 * Create a table of name <code>name</code> with {@link #COLUMNS} for families.
304 * @param name Name to give table.
305 * @param versions How many versions to allow per column.
306 * @return Column descriptor.
308 protected TableDescriptor
createTableDescriptor(final String name
, final int versions
) {
309 TableDescriptorBuilder builder
= TableDescriptorBuilder
.newBuilder(TableName
.valueOf(name
));
311 .setColumnFamily(ColumnFamilyDescriptorBuilder
.newBuilder(fam1
).setMaxVersions(versions
)
312 .setBlockCacheEnabled(false).build())
313 .setColumnFamily(ColumnFamilyDescriptorBuilder
.newBuilder(fam2
).setMaxVersions(versions
)
314 .setBlockCacheEnabled(false).build())
315 .setColumnFamily(ColumnFamilyDescriptorBuilder
.newBuilder(fam3
).setMaxVersions(versions
)
316 .setBlockCacheEnabled(false).build());
317 return builder
.build();