3 * Licensed to the Apache Software Foundation (ASF) under one
4 * or more contributor license agreements. See the NOTICE file
5 * distributed with this work for additional information
6 * regarding copyright ownership. The ASF licenses this file
7 * to you under the Apache License, Version 2.0 (the
8 * "License"); you may not use this file except in compliance
9 * with the License. You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
19 package org
.apache
.hadoop
.hbase
.util
;
21 import java
.io
.IOException
;
22 import java
.util
.Locale
;
24 import org
.apache
.commons
.lang3
.StringUtils
;
25 import org
.apache
.hadoop
.hbase
.CellComparator
;
26 import org
.apache
.yetus
.audience
.InterfaceAudience
;
27 import org
.apache
.yetus
.audience
.InterfaceStability
;
28 import org
.slf4j
.Logger
;
29 import org
.slf4j
.LoggerFactory
;
30 import org
.apache
.hadoop
.conf
.Configuration
;
31 import org
.apache
.hadoop
.fs
.FileSystem
;
32 import org
.apache
.hadoop
.fs
.Path
;
33 import org
.apache
.hadoop
.hbase
.Cell
;
34 import org
.apache
.hadoop
.hbase
.CellComparatorImpl
;
35 import org
.apache
.hadoop
.hbase
.CellUtil
;
36 import org
.apache
.hadoop
.hbase
.DoNotRetryIOException
;
37 import org
.apache
.hadoop
.hbase
.HBaseConfiguration
;
38 import org
.apache
.hadoop
.hbase
.HBaseInterfaceAudience
;
39 import org
.apache
.hadoop
.hbase
.io
.compress
.Compression
;
40 import org
.apache
.hadoop
.hbase
.io
.hfile
.HFileWriterImpl
;
41 import org
.apache
.hadoop
.hbase
.io
.hfile
.CacheConfig
;
42 import org
.apache
.hadoop
.hbase
.io
.hfile
.HFile
;
43 import org
.apache
.hadoop
.hbase
.io
.hfile
.HFileContext
;
44 import org
.apache
.hadoop
.hbase
.io
.hfile
.HFileContextBuilder
;
45 import org
.apache
.hadoop
.hbase
.io
.hfile
.HFileScanner
;
46 import org
.apache
.hadoop
.io
.compress
.Compressor
;
49 * Compression validation test. Checks compression is working. Be sure to run
50 * on every node in your cluster.
52 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience
.TOOLS
)
53 @InterfaceStability.Evolving
54 public class CompressionTest
{
55 private static final Logger LOG
= LoggerFactory
.getLogger(CompressionTest
.class);
57 public static boolean testCompression(String codec
) {
58 codec
= codec
.toLowerCase(Locale
.ROOT
);
60 Compression
.Algorithm a
;
63 a
= Compression
.getCompressionAlgorithmByName(codec
);
64 } catch (IllegalArgumentException e
) {
65 LOG
.warn("Codec type: " + codec
+ " is not known");
72 } catch (IOException ignored
) {
73 LOG
.warn("Can't instantiate codec: " + codec
, ignored
);
78 private final static Boolean
[] compressionTestResults
79 = new Boolean
[Compression
.Algorithm
.values().length
];
81 for (int i
= 0 ; i
< compressionTestResults
.length
; ++i
) {
82 compressionTestResults
[i
] = null;
86 public static void testCompression(Compression
.Algorithm algo
)
88 if (compressionTestResults
[algo
.ordinal()] != null) {
89 if (compressionTestResults
[algo
.ordinal()]) {
90 return ; // already passed test, dont do it again.
93 throw new DoNotRetryIOException("Compression algorithm '" + algo
.getName() + "'" +
94 " previously failed test.");
99 Compressor c
= algo
.getCompressor();
100 algo
.returnCompressor(c
);
101 compressionTestResults
[algo
.ordinal()] = true; // passes
102 } catch (Throwable t
) {
103 compressionTestResults
[algo
.ordinal()] = false; // failure
104 throw new DoNotRetryIOException(t
);
108 protected static Path path
= new Path(".hfile-comp-test");
110 public static void usage() {
113 "Usage: CompressionTest <path> " +
114 StringUtils
.join( Compression
.Algorithm
.values(), "|").toLowerCase(Locale
.ROOT
) +
117 " hbase " + CompressionTest
.class + " file:///tmp/testfile gz\n");
121 public static void doSmokeTest(FileSystem fs
, Path path
, String codec
)
123 Configuration conf
= HBaseConfiguration
.create();
124 HFileContext context
= new HFileContextBuilder()
125 .withCompression(HFileWriterImpl
.compressionByName(codec
)).build();
126 HFile
.Writer writer
= HFile
.getWriterFactoryNoCache(conf
)
128 .withFileContext(context
)
130 // Write any-old Cell...
131 final byte [] rowKey
= Bytes
.toBytes("compressiontestkey");
132 Cell c
= CellUtil
.createCell(rowKey
, Bytes
.toBytes("compressiontestval"));
134 writer
.appendFileInfo(Bytes
.toBytes("compressioninfokey"), Bytes
.toBytes("compressioninfoval"));
137 HFile
.Reader reader
= HFile
.createReader(fs
, path
, CacheConfig
.DISABLED
, true, conf
);
139 reader
.loadFileInfo();
140 HFileScanner scanner
= reader
.getScanner(false, true);
141 scanner
.seekTo(); // position to the start of file
142 // Scanner does not do Cells yet. Do below for now till fixed.
143 cc
= scanner
.getCell();
144 if (CellComparator
.getInstance().compareRows(c
, cc
) != 0) {
145 throw new Exception("Read back incorrect result: " + c
.toString() + " vs " + cc
.toString());
152 public static void main(String
[] args
) throws Exception
{
153 if (args
.length
!= 2) {
158 Configuration conf
= new Configuration();
159 Path path
= new Path(args
[0]);
160 FileSystem fs
= path
.getFileSystem(conf
);
161 if (fs
.exists(path
)) {
162 System
.err
.println("The specified path exists, aborting!");
167 doSmokeTest(fs
, path
, args
[1]);
169 fs
.delete(path
, false);
171 System
.out
.println("SUCCESS");