2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org
.apache
.hadoop
.hbase
.regionserver
;
20 import static org
.junit
.Assert
.assertNotNull
;
21 import static org
.junit
.Assert
.assertTrue
;
23 import java
.security
.Key
;
24 import java
.util
.ArrayList
;
25 import java
.util
.List
;
26 import org
.apache
.hadoop
.conf
.Configuration
;
27 import org
.apache
.hadoop
.fs
.Path
;
28 import org
.apache
.hadoop
.hbase
.HBaseClassTestRule
;
29 import org
.apache
.hadoop
.hbase
.HBaseTestingUtility
;
30 import org
.apache
.hadoop
.hbase
.HConstants
;
31 import org
.apache
.hadoop
.hbase
.TableName
;
32 import org
.apache
.hadoop
.hbase
.client
.ColumnFamilyDescriptorBuilder
;
33 import org
.apache
.hadoop
.hbase
.client
.Put
;
34 import org
.apache
.hadoop
.hbase
.client
.Table
;
35 import org
.apache
.hadoop
.hbase
.client
.TableDescriptorBuilder
;
36 import org
.apache
.hadoop
.hbase
.io
.crypto
.Encryption
;
37 import org
.apache
.hadoop
.hbase
.io
.crypto
.KeyProviderForTesting
;
38 import org
.apache
.hadoop
.hbase
.io
.hfile
.CacheConfig
;
39 import org
.apache
.hadoop
.hbase
.io
.hfile
.HFile
;
40 import org
.apache
.hadoop
.hbase
.testclassification
.MediumTests
;
41 import org
.apache
.hadoop
.hbase
.testclassification
.RegionServerTests
;
42 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
43 import org
.junit
.AfterClass
;
44 import org
.junit
.BeforeClass
;
45 import org
.junit
.ClassRule
;
46 import org
.junit
.Test
;
47 import org
.junit
.experimental
.categories
.Category
;
49 @Category({RegionServerTests
.class, MediumTests
.class})
50 public class TestEncryptionRandomKeying
{
53 public static final HBaseClassTestRule CLASS_RULE
=
54 HBaseClassTestRule
.forClass(TestEncryptionRandomKeying
.class);
56 private static final HBaseTestingUtility TEST_UTIL
= new HBaseTestingUtility();
57 private static Configuration conf
= TEST_UTIL
.getConfiguration();
58 private static TableDescriptorBuilder tdb
;
60 private static List
<Path
> findStorefilePaths(TableName tableName
) throws Exception
{
61 List
<Path
> paths
= new ArrayList
<>();
63 TEST_UTIL
.getRSForFirstRegionInTable(tableName
).getRegions(tdb
.build().getTableName())) {
64 for (HStore store
: ((HRegion
) region
).getStores()) {
65 for (HStoreFile storefile
: store
.getStorefiles()) {
66 paths
.add(storefile
.getPath());
73 private static byte[] extractHFileKey(Path path
) throws Exception
{
74 HFile
.Reader reader
= HFile
.createReader(TEST_UTIL
.getTestFileSystem(), path
,
75 new CacheConfig(conf
), true, conf
);
77 Encryption
.Context cryptoContext
= reader
.getFileContext().getEncryptionContext();
78 assertNotNull("Reader has a null crypto context", cryptoContext
);
79 Key key
= cryptoContext
.getKey();
83 return key
.getEncoded();
90 public static void setUp() throws Exception
{
91 conf
.setInt("hfile.format.version", 3);
92 conf
.set(HConstants
.CRYPTO_KEYPROVIDER_CONF_KEY
, KeyProviderForTesting
.class.getName());
93 conf
.set(HConstants
.CRYPTO_MASTERKEY_NAME_CONF_KEY
, "hbase");
95 // Create the table schema
96 // Specify an encryption algorithm without a key
97 tdb
= TableDescriptorBuilder
.newBuilder(TableName
.valueOf("default",
98 "TestEncryptionRandomKeying"));
99 ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder
=
100 ColumnFamilyDescriptorBuilder
.newBuilder(Bytes
.toBytes("cf"));
101 String algorithm
= conf
.get(HConstants
.CRYPTO_KEY_ALGORITHM_CONF_KEY
, HConstants
.CIPHER_AES
);
102 columnFamilyDescriptorBuilder
.setEncryptionType(algorithm
);
103 tdb
.setColumnFamily(columnFamilyDescriptorBuilder
.build());
105 // Start the minicluster
106 TEST_UTIL
.startMiniCluster(1);
108 // Create the test table
109 TEST_UTIL
.getAdmin().createTable(tdb
.build());
110 TEST_UTIL
.waitTableAvailable(tdb
.build().getTableName(), 5000);
112 // Create a store file
113 Table table
= TEST_UTIL
.getConnection().getTable(tdb
.build().getTableName());
115 table
.put(new Put(Bytes
.toBytes("testrow"))
116 .addColumn(columnFamilyDescriptorBuilder
.build().getName(),
117 Bytes
.toBytes("q"), Bytes
.toBytes("value")));
121 TEST_UTIL
.getAdmin().flush(tdb
.build().getTableName());
125 public static void tearDown() throws Exception
{
126 TEST_UTIL
.shutdownMiniCluster();
130 public void testRandomKeying() throws Exception
{
131 // Verify we have store file(s) with a random key
132 final List
<Path
> initialPaths
= findStorefilePaths(tdb
.build().getTableName());
133 assertTrue(initialPaths
.size() > 0);
134 for (Path path
: initialPaths
) {
135 assertNotNull("Store file " + path
+ " is not encrypted", extractHFileKey(path
));