HBASE-23868 : Replace usages of HColumnDescriptor(byte [] familyName)… (#1222)
[hbase.git] / bin / hbase
blob8f1d0bb402e967de37c996daeaeb24602e42b769
1 #! /usr/bin/env bash
3 #/**
4 # * Licensed to the Apache Software Foundation (ASF) under one
5 # * or more contributor license agreements. See the NOTICE file
6 # * distributed with this work for additional information
7 # * regarding copyright ownership. The ASF licenses this file
8 # * to you under the Apache License, Version 2.0 (the
9 # * "License"); you may not use this file except in compliance
10 # * with the License. You may obtain a copy of the License at
11 # *
12 # * http://www.apache.org/licenses/LICENSE-2.0
13 # *
14 # * Unless required by applicable law or agreed to in writing, software
15 # * distributed under the License is distributed on an "AS IS" BASIS,
16 # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 # * See the License for the specific language governing permissions and
18 # * limitations under the License.
19 # */
21 # The hbase command script. Based on the hadoop command script putting
22 # in hbase classes, libs and configurations ahead of hadoop's.
24 # TODO: Narrow the amount of duplicated code.
26 # Environment Variables:
28 # JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
30 # HBASE_CLASSPATH Extra Java CLASSPATH entries.
32 # HBASE_CLASSPATH_PREFIX Extra Java CLASSPATH entries that should be
33 # prefixed to the system classpath.
35 # HBASE_HEAPSIZE The maximum amount of heap to use.
36 # Default is unset and uses the JVMs default setting
37 # (usually 1/4th of the available memory).
39 # HBASE_LIBRARY_PATH HBase additions to JAVA_LIBRARY_PATH for adding
40 # native libraries.
42 # HBASE_OPTS Extra Java runtime options.
44 # HBASE_CONF_DIR Alternate conf dir. Default is ${HBASE_HOME}/conf.
46 # HBASE_ROOT_LOGGER The root appender. Default is INFO,console
48 # JRUBY_HOME JRuby path: $JRUBY_HOME/lib/jruby.jar should exist.
49 # Defaults to the jar packaged with HBase.
51 # JRUBY_OPTS Extra options (eg '--1.9') passed to hbase.
52 # Empty by default.
54 # HBASE_SHELL_OPTS Extra options passed to the hbase shell.
55 # Empty by default.
57 bin=`dirname "$0"`
58 bin=`cd "$bin">/dev/null; pwd`
60 # This will set HBASE_HOME, etc.
61 . "$bin"/hbase-config.sh
63 cygwin=false
64 case "`uname`" in
65 CYGWIN*) cygwin=true;;
66 esac
68 # Detect if we are in hbase sources dir
69 in_dev_env=false
70 if [ -d "${HBASE_HOME}/target" ]; then
71 in_dev_env=true
74 # Detect if we are in the omnibus tarball
75 in_omnibus_tarball="false"
76 if [ -f "${HBASE_HOME}/bin/hbase-daemons.sh" ]; then
77 in_omnibus_tarball="true"
80 read -d '' options_string << EOF
81 Options:
82 --config DIR Configuration direction to use. Default: ./conf
83 --hosts HOSTS Override the list in 'regionservers' file
84 --auth-as-server Authenticate to ZooKeeper using servers configuration
85 --internal-classpath Skip attempting to use client facing jars (WARNING: unstable results between versions)
86 EOF
87 # if no args specified, show usage
88 if [ $# = 0 ]; then
89 echo "Usage: hbase [<options>] <command> [<args>]"
90 echo "$options_string"
91 echo ""
92 echo "Commands:"
93 echo "Some commands take arguments. Pass no args or -h for usage."
94 echo " shell Run the HBase shell"
95 echo " hbck Run the HBase 'fsck' tool. Defaults read-only hbck1."
96 echo " Pass '-j /path/to/HBCK2.jar' to run hbase-2.x HBCK2."
97 echo " snapshot Tool for managing snapshots"
98 if [ "${in_omnibus_tarball}" = "true" ]; then
99 echo " wal Write-ahead-log analyzer"
100 echo " hfile Store file analyzer"
101 echo " zkcli Run the ZooKeeper shell"
102 echo " master Run an HBase HMaster node"
103 echo " regionserver Run an HBase HRegionServer node"
104 echo " zookeeper Run a ZooKeeper server"
105 echo " rest Run an HBase REST server"
106 echo " thrift Run the HBase Thrift server"
107 echo " thrift2 Run the HBase Thrift2 server"
108 echo " clean Run the HBase clean up script"
110 echo " classpath Dump hbase CLASSPATH"
111 echo " mapredcp Dump CLASSPATH entries required by mapreduce"
112 echo " pe Run PerformanceEvaluation"
113 echo " ltt Run LoadTestTool"
114 echo " canary Run the Canary tool"
115 echo " version Print the version"
116 echo " backup Backup tables for recovery"
117 echo " restore Restore tables from existing backup image"
118 echo " completebulkload Run BulkLoadHFiles tool"
119 echo " regionsplitter Run RegionSplitter tool"
120 echo " rowcounter Run RowCounter tool"
121 echo " cellcounter Run CellCounter tool"
122 echo " pre-upgrade Run Pre-Upgrade validator tool"
123 echo " hbtop Run HBTop tool"
124 echo " CLASSNAME Run the class named CLASSNAME"
125 exit 1
128 # get arguments
129 COMMAND=$1
130 shift
132 JAVA=$JAVA_HOME/bin/java
134 # override default settings for this command, if applicable
135 if [ -f "$HBASE_HOME/conf/hbase-env-$COMMAND.sh" ]; then
136 . "$HBASE_HOME/conf/hbase-env-$COMMAND.sh"
139 add_size_suffix() {
140 # add an 'm' suffix if the argument is missing one, otherwise use whats there
141 local val="$1"
142 local lastchar=${val: -1}
143 if [[ "mMgG" == *$lastchar* ]]; then
144 echo $val
145 else
146 echo ${val}m
150 if [[ -n "$HBASE_HEAPSIZE" ]]; then
151 JAVA_HEAP_MAX="-Xmx$(add_size_suffix $HBASE_HEAPSIZE)"
154 if [[ -n "$HBASE_OFFHEAPSIZE" ]]; then
155 JAVA_OFFHEAP_MAX="-XX:MaxDirectMemorySize=$(add_size_suffix $HBASE_OFFHEAPSIZE)"
158 # so that filenames w/ spaces are handled correctly in loops below
159 ORIG_IFS=$IFS
160 IFS=
162 # CLASSPATH initially contains $HBASE_CONF_DIR
163 CLASSPATH="${HBASE_CONF_DIR}"
164 CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
166 add_to_cp_if_exists() {
167 if [ -d "$@" ]; then
168 CLASSPATH=${CLASSPATH}:"$@"
172 # For releases, add hbase & webapps to CLASSPATH
173 # Webapps must come first else it messes up Jetty
174 if [ -d "$HBASE_HOME/hbase-webapps" ]; then
175 add_to_cp_if_exists "${HBASE_HOME}"
177 #add if we are in a dev environment
178 if [ -d "$HBASE_HOME/hbase-server/target/hbase-webapps" ]; then
179 if [ "$COMMAND" = "thrift" ] ; then
180 add_to_cp_if_exists "${HBASE_HOME}/hbase-thrift/target"
181 elif [ "$COMMAND" = "thrift2" ] ; then
182 add_to_cp_if_exists "${HBASE_HOME}/hbase-thrift/target"
183 elif [ "$COMMAND" = "rest" ] ; then
184 add_to_cp_if_exists "${HBASE_HOME}/hbase-rest/target"
185 else
186 add_to_cp_if_exists "${HBASE_HOME}/hbase-server/target"
187 # Needed for GetJavaProperty check below
188 add_to_cp_if_exists "${HBASE_HOME}/hbase-server/target/classes"
192 #If avail, add Hadoop to the CLASSPATH and to the JAVA_LIBRARY_PATH
193 # Allow this functionality to be disabled
194 if [ "$HBASE_DISABLE_HADOOP_CLASSPATH_LOOKUP" != "true" ] ; then
195 HADOOP_IN_PATH=$(PATH="${HADOOP_HOME:-${HADOOP_PREFIX}}/bin:$PATH" which hadoop 2>/dev/null)
198 # Add libs to CLASSPATH
199 declare shaded_jar
201 if [ "${INTERNAL_CLASSPATH}" != "true" ]; then
202 # find our shaded jars
203 declare shaded_client
204 declare shaded_client_byo_hadoop
205 declare shaded_mapreduce
206 for f in "${HBASE_HOME}"/lib/shaded-clients/hbase-shaded-client*.jar; do
207 if [[ "${f}" =~ byo-hadoop ]]; then
208 shaded_client_byo_hadoop="${f}"
209 else
210 shaded_client="${f}"
212 done
213 for f in "${HBASE_HOME}"/lib/shaded-clients/hbase-shaded-mapreduce*.jar; do
214 shaded_mapreduce="${f}"
215 done
217 # If command can use our shaded client, use it
218 declare -a commands_in_client_jar=("classpath" "version" "hbtop")
219 for c in "${commands_in_client_jar[@]}"; do
220 if [ "${COMMAND}" = "${c}" ]; then
221 if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
222 # If we didn't find a jar above, this will just be blank and the
223 # check below will then default back to the internal classpath.
224 shaded_jar="${shaded_client_byo_hadoop}"
225 else
226 # If we didn't find a jar above, this will just be blank and the
227 # check below will then default back to the internal classpath.
228 shaded_jar="${shaded_client}"
230 break
232 done
234 # If command needs our shaded mapreduce, use it
235 # N.B "mapredcp" is not included here because in the shaded case it skips our built classpath
236 declare -a commands_in_mr_jar=("hbck" "snapshot" "canary" "regionsplitter" "pre-upgrade")
237 for c in "${commands_in_mr_jar[@]}"; do
238 if [ "${COMMAND}" = "${c}" ]; then
239 # If we didn't find a jar above, this will just be blank and the
240 # check below will then default back to the internal classpath.
241 shaded_jar="${shaded_mapreduce}"
242 break
244 done
246 # Some commands specifically only can use shaded mapreduce when we'll get a full hadoop classpath at runtime
247 if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
248 declare -a commands_in_mr_need_hadoop=("backup" "restore" "rowcounter" "cellcounter")
249 for c in "${commands_in_mr_need_hadoop[@]}"; do
250 if [ "${COMMAND}" = "${c}" ]; then
251 # If we didn't find a jar above, this will just be blank and the
252 # check below will then default back to the internal classpath.
253 shaded_jar="${shaded_mapreduce}"
254 break
256 done
261 if [ -n "${shaded_jar}" ] && [ -f "${shaded_jar}" ]; then
262 CLASSPATH="${CLASSPATH}:${shaded_jar}"
263 # fall through to grabbing all the lib jars and hope we're in the omnibus tarball
265 # N.B. shell specifically can't rely on the shaded artifacts because RSGroups is only
266 # available as non-shaded
268 # N.B. pe and ltt can't easily rely on shaded artifacts because they live in hbase-mapreduce:test-jar
269 # and need some other jars that haven't been relocated. Currently enumerating that list
270 # is too hard to be worth it.
272 else
273 for f in $HBASE_HOME/lib/*.jar; do
274 CLASSPATH=${CLASSPATH}:$f;
275 done
276 # make it easier to check for shaded/not later on.
277 shaded_jar=""
279 for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
280 if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
281 [ "${f}" != "htrace-core.jar$" ] && \
282 [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
283 CLASSPATH="${CLASSPATH}:${f}"
285 done
287 # default log directory & file
288 if [ "$HBASE_LOG_DIR" = "" ]; then
289 HBASE_LOG_DIR="$HBASE_HOME/logs"
291 if [ "$HBASE_LOGFILE" = "" ]; then
292 HBASE_LOGFILE='hbase.log'
295 function append_path() {
296 if [ -z "$1" ]; then
297 echo "$2"
298 else
299 echo "$1:$2"
303 JAVA_PLATFORM=""
305 # if HBASE_LIBRARY_PATH is defined lets use it as first or second option
306 if [ "$HBASE_LIBRARY_PATH" != "" ]; then
307 JAVA_LIBRARY_PATH=$(append_path "$JAVA_LIBRARY_PATH" "$HBASE_LIBRARY_PATH")
310 #If configured and available, add Hadoop to the CLASSPATH and to the JAVA_LIBRARY_PATH
311 if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
312 # If built hbase, temporarily add hbase-server*.jar to classpath for GetJavaProperty
313 # Exclude hbase-server*-tests.jar
314 temporary_cp=
315 for f in "${HBASE_HOME}"/lib/hbase-server*.jar; do
316 if [[ ! "${f}" =~ ^.*\-tests\.jar$ ]]; then
317 temporary_cp=":$f"
319 done
320 HADOOP_JAVA_LIBRARY_PATH=$(HADOOP_CLASSPATH="$CLASSPATH${temporary_cp}" "${HADOOP_IN_PATH}" \
321 org.apache.hadoop.hbase.util.GetJavaProperty java.library.path)
322 if [ -n "$HADOOP_JAVA_LIBRARY_PATH" ]; then
323 JAVA_LIBRARY_PATH=$(append_path "${JAVA_LIBRARY_PATH}" "$HADOOP_JAVA_LIBRARY_PATH")
325 CLASSPATH=$(append_path "${CLASSPATH}" "$(${HADOOP_IN_PATH} classpath 2>/dev/null)")
326 else
327 # Otherwise, if we're providing Hadoop we should include htrace 3 if we were built with a version that needs it.
328 for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/htrace-core-3*.jar "${HBASE_HOME}"/lib/client-facing-thirdparty/htrace-core.jar; do
329 if [ -f "${f}" ]; then
330 CLASSPATH="${CLASSPATH}:${f}"
331 break
333 done
334 # Some commands require special handling when using shaded jars. For these cases, we rely on hbase-shaded-mapreduce
335 # instead of hbase-shaded-client* because we make use of some IA.Private classes that aren't in the latter. However,
336 # we don't invoke them using the "hadoop jar" command so we need to ensure there are some Hadoop classes available
337 # when we're not doing runtime hadoop classpath lookup.
339 # luckily the set of classes we need are those packaged in the shaded-client.
340 for c in "${commands_in_mr_jar[@]}"; do
341 if [ "${COMMAND}" = "${c}" ] && [ -n "${shaded_jar}" ]; then
342 CLASSPATH="${CLASSPATH}:${shaded_client:?We couldn\'t find the shaded client jar even though we did find the shaded MR jar. for command ${COMMAND} we need both. please use --internal-classpath as a workaround.}"
343 break
345 done
348 # Add user-specified CLASSPATH last
349 if [ "$HBASE_CLASSPATH" != "" ]; then
350 CLASSPATH=${CLASSPATH}:${HBASE_CLASSPATH}
353 # Add user-specified CLASSPATH prefix first
354 if [ "$HBASE_CLASSPATH_PREFIX" != "" ]; then
355 CLASSPATH=${HBASE_CLASSPATH_PREFIX}:${CLASSPATH}
358 # cygwin path translation
359 if $cygwin; then
360 CLASSPATH=`cygpath -p -w "$CLASSPATH"`
361 HBASE_HOME=`cygpath -d "$HBASE_HOME"`
362 HBASE_LOG_DIR=`cygpath -d "$HBASE_LOG_DIR"`
365 if [ -d "${HBASE_HOME}/build/native" -o -d "${HBASE_HOME}/lib/native" ]; then
366 if [ -z $JAVA_PLATFORM ]; then
367 JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
369 if [ -d "$HBASE_HOME/build/native" ]; then
370 JAVA_LIBRARY_PATH=$(append_path "$JAVA_LIBRARY_PATH" "${HBASE_HOME}/build/native/${JAVA_PLATFORM}/lib")
373 if [ -d "${HBASE_HOME}/lib/native" ]; then
374 JAVA_LIBRARY_PATH=$(append_path "$JAVA_LIBRARY_PATH" "${HBASE_HOME}/lib/native/${JAVA_PLATFORM}")
378 # cygwin path translation
379 if $cygwin; then
380 JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
383 # restore ordinary behaviour
384 unset IFS
386 #Set the right GC options based on the what we are running
387 declare -a server_cmds=("master" "regionserver" "thrift" "thrift2" "rest" "avro" "zookeeper")
388 for cmd in ${server_cmds[@]}; do
389 if [[ $cmd == $COMMAND ]]; then
390 server=true
391 break
393 done
395 if [[ $server ]]; then
396 HBASE_OPTS="$HBASE_OPTS $SERVER_GC_OPTS"
397 else
398 HBASE_OPTS="$HBASE_OPTS $CLIENT_GC_OPTS"
401 if [ "$AUTH_AS_SERVER" == "true" ] || [ "$COMMAND" = "hbck" ]; then
402 if [ -n "$HBASE_SERVER_JAAS_OPTS" ]; then
403 HBASE_OPTS="$HBASE_OPTS $HBASE_SERVER_JAAS_OPTS"
404 else
405 HBASE_OPTS="$HBASE_OPTS $HBASE_REGIONSERVER_OPTS"
409 # check if the command needs jline
410 declare -a jline_cmds=("zkcli" "org.apache.hadoop.hbase.zookeeper.ZKMainServer")
411 for cmd in "${jline_cmds[@]}"; do
412 if [[ $cmd == "$COMMAND" ]]; then
413 jline_needed=true
414 break
416 done
418 # for jruby
419 # (1) for the commands which need jruby (see jruby_cmds defined below)
420 # A. when JRUBY_HOME is specified explicitly, eg. export JRUBY_HOME=/usr/local/share/jruby
421 # CLASSPATH and HBASE_OPTS are updated according to JRUBY_HOME specified
422 # B. when JRUBY_HOME is not specified explicitly
423 # add jruby packaged with HBase to CLASSPATH
424 # (2) for other commands, do nothing
426 # check if the commmand needs jruby
427 declare -a jruby_cmds=("shell" "org.jruby.Main")
428 for cmd in "${jruby_cmds[@]}"; do
429 if [[ $cmd == "$COMMAND" ]]; then
430 jruby_needed=true
431 break
433 done
435 add_maven_deps_to_classpath() {
436 f="${HBASE_HOME}/hbase-build-configuration/target/$1"
438 if [ ! -f "${f}" ]; then
439 echo "As this is a development environment, we need ${f} to be generated from maven (command: mvn install -DskipTests)"
440 exit 1
442 CLASSPATH=${CLASSPATH}:$(cat "${f}")
445 add_jdk11_deps_to_classpath() {
446 for f in ${HBASE_HOME}/lib/jdk11/*; do
447 if [ -f "${f}" ]; then
448 CLASSPATH="${CLASSPATH}:${f}"
450 done
453 #Add the development env class path stuff
454 if $in_dev_env; then
455 add_maven_deps_to_classpath "cached_classpath.txt"
457 if [[ $jline_needed ]]; then
458 add_maven_deps_to_classpath "cached_classpath_jline.txt"
459 elif [[ $jruby_needed ]]; then
460 add_maven_deps_to_classpath "cached_classpath_jruby.txt"
464 # the command needs jruby
465 if [[ $jruby_needed ]]; then
466 if [ "$JRUBY_HOME" != "" ]; then # JRUBY_HOME is specified explicitly, eg. export JRUBY_HOME=/usr/local/share/jruby
467 # add jruby.jar into CLASSPATH
468 CLASSPATH="$JRUBY_HOME/lib/jruby.jar:$CLASSPATH"
470 # add jruby to HBASE_OPTS
471 HBASE_OPTS="$HBASE_OPTS -Djruby.home=$JRUBY_HOME -Djruby.lib=$JRUBY_HOME/lib"
473 else # JRUBY_HOME is not specified explicitly
474 if ! $in_dev_env; then # not in dev environment
475 # add jruby packaged with HBase to CLASSPATH
476 JRUBY_PACKAGED_WITH_HBASE="$HBASE_HOME/lib/ruby/*.jar"
477 for jruby_jar in $JRUBY_PACKAGED_WITH_HBASE; do
478 CLASSPATH=$jruby_jar:$CLASSPATH;
479 done
484 # figure out which class to run
485 if [ "$COMMAND" = "shell" ] ; then
486 #find the hbase ruby sources
487 if [ -d "$HBASE_HOME/lib/ruby" ]; then
488 HBASE_OPTS="$HBASE_OPTS -Dhbase.ruby.sources=$HBASE_HOME/lib/ruby"
489 else
490 HBASE_OPTS="$HBASE_OPTS -Dhbase.ruby.sources=$HBASE_HOME/hbase-shell/src/main/ruby"
492 HBASE_OPTS="$HBASE_OPTS $HBASE_SHELL_OPTS"
493 CLASS="org.jruby.Main -X+O ${JRUBY_OPTS} ${HBASE_HOME}/bin/hirb.rb"
494 elif [ "$COMMAND" = "hbck" ] ; then
495 # Look for the -j /path/to/HBCK2.jar parameter. Else pass through to hbck.
496 case "${1}" in
498 # Found -j parameter. Add arg to CLASSPATH and set CLASS to HBCK2.
499 shift
500 JAR="${1}"
501 if [ ! -f "${JAR}" ]; then
502 echo "${JAR} file not found!"
503 echo "Usage: hbase [<options>] hbck -jar /path/to/HBCK2.jar [<args>]"
504 exit 1
506 CLASSPATH="${JAR}:${CLASSPATH}";
507 CLASS="org.apache.hbase.HBCK2"
508 shift # past argument=value
511 CLASS='org.apache.hadoop.hbase.util.HBaseFsck'
513 esac
514 elif [ "$COMMAND" = "wal" ] ; then
515 CLASS='org.apache.hadoop.hbase.wal.WALPrettyPrinter'
516 elif [ "$COMMAND" = "hfile" ] ; then
517 CLASS='org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter'
518 elif [ "$COMMAND" = "zkcli" ] ; then
519 CLASS="org.apache.hadoop.hbase.zookeeper.ZKMainServer"
520 for f in $HBASE_HOME/lib/zkcli/*.jar; do
521 CLASSPATH="${CLASSPATH}:$f";
522 done
523 elif [ "$COMMAND" = "backup" ] ; then
524 CLASS='org.apache.hadoop.hbase.backup.BackupDriver'
525 if [ -n "${shaded_jar}" ] ; then
526 for f in "${HBASE_HOME}"/lib/hbase-backup*.jar; do
527 if [ -f "${f}" ]; then
528 CLASSPATH="${CLASSPATH}:${f}"
529 break
531 done
533 elif [ "$COMMAND" = "restore" ] ; then
534 CLASS='org.apache.hadoop.hbase.backup.RestoreDriver'
535 if [ -n "${shaded_jar}" ] ; then
536 for f in "${HBASE_HOME}"/lib/hbase-backup*.jar; do
537 if [ -f "${f}" ]; then
538 CLASSPATH="${CLASSPATH}:${f}"
539 break
541 done
542 for f in "${HBASE_HOME}"/lib/commons-lang3*.jar; do
543 if [ -f "${f}" ]; then
544 CLASSPATH="${CLASSPATH}:${f}"
545 break
547 done
549 elif [ "$COMMAND" = "upgrade" ] ; then
550 echo "This command was used to upgrade to HBase 0.96, it was removed in HBase 2.0.0."
551 echo "Please follow the documentation at http://hbase.apache.org/book.html#upgrading."
552 exit 1
553 elif [ "$COMMAND" = "snapshot" ] ; then
554 SUBCOMMAND=$1
555 shift
556 if [ "$SUBCOMMAND" = "create" ] ; then
557 CLASS="org.apache.hadoop.hbase.snapshot.CreateSnapshot"
558 elif [ "$SUBCOMMAND" = "info" ] ; then
559 CLASS="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
560 elif [ "$SUBCOMMAND" = "export" ] ; then
561 CLASS="org.apache.hadoop.hbase.snapshot.ExportSnapshot"
562 else
563 echo "Usage: hbase [<options>] snapshot <subcommand> [<args>]"
564 echo "$options_string"
565 echo ""
566 echo "Subcommands:"
567 echo " create Create a new snapshot of a table"
568 echo " info Tool for dumping snapshot information"
569 echo " export Export an existing snapshot"
570 exit 1
572 elif [ "$COMMAND" = "master" ] ; then
573 CLASS='org.apache.hadoop.hbase.master.HMaster'
574 if [ "$1" != "stop" ] && [ "$1" != "clear" ] ; then
575 HBASE_OPTS="$HBASE_OPTS $HBASE_MASTER_OPTS"
577 elif [ "$COMMAND" = "regionserver" ] ; then
578 CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
579 if [ "$1" != "stop" ] ; then
580 HBASE_OPTS="$HBASE_OPTS $HBASE_REGIONSERVER_OPTS"
582 elif [ "$COMMAND" = "thrift" ] ; then
583 CLASS='org.apache.hadoop.hbase.thrift.ThriftServer'
584 if [ "$1" != "stop" ] ; then
585 HBASE_OPTS="$HBASE_OPTS $HBASE_THRIFT_OPTS"
587 elif [ "$COMMAND" = "thrift2" ] ; then
588 CLASS='org.apache.hadoop.hbase.thrift2.ThriftServer'
589 if [ "$1" != "stop" ] ; then
590 HBASE_OPTS="$HBASE_OPTS $HBASE_THRIFT_OPTS"
592 elif [ "$COMMAND" = "rest" ] ; then
593 CLASS='org.apache.hadoop.hbase.rest.RESTServer'
594 if [ "$1" != "stop" ] ; then
595 HBASE_OPTS="$HBASE_OPTS $HBASE_REST_OPTS"
597 elif [ "$COMMAND" = "zookeeper" ] ; then
598 CLASS='org.apache.hadoop.hbase.zookeeper.HQuorumPeer'
599 if [ "$1" != "stop" ] ; then
600 HBASE_OPTS="$HBASE_OPTS $HBASE_ZOOKEEPER_OPTS"
602 elif [ "$COMMAND" = "clean" ] ; then
603 case $1 in
604 --cleanZk|--cleanHdfs|--cleanAll)
605 matches="yes" ;;
606 *) ;;
607 esac
608 if [ $# -ne 1 -o "$matches" = "" ]; then
609 echo "Usage: hbase clean (--cleanZk|--cleanHdfs|--cleanAll)"
610 echo "Options: "
611 echo " --cleanZk cleans hbase related data from zookeeper."
612 echo " --cleanHdfs cleans hbase related data from hdfs."
613 echo " --cleanAll cleans hbase related data from both zookeeper and hdfs."
614 exit 1;
616 "$bin"/hbase-cleanup.sh --config ${HBASE_CONF_DIR} $@
617 exit $?
618 elif [ "$COMMAND" = "mapredcp" ] ; then
619 # If we didn't find a jar above, this will just be blank and the
620 # check below will then default back to the internal classpath.
621 shaded_jar="${shaded_mapreduce}"
622 if [ "${INTERNAL_CLASSPATH}" != "true" ] && [ -f "${shaded_jar}" ]; then
623 echo -n "${shaded_jar}"
624 for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
625 if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
626 [ "${f}" != "htrace-core.jar$" ] && \
627 [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
628 echo -n ":${f}"
630 done
631 echo ""
632 exit 0
634 CLASS='org.apache.hadoop.hbase.util.MapreduceDependencyClasspathTool'
635 elif [ "$COMMAND" = "classpath" ] ; then
636 echo "$CLASSPATH"
637 exit 0
638 elif [ "$COMMAND" = "pe" ] ; then
639 CLASS='org.apache.hadoop.hbase.PerformanceEvaluation'
640 HBASE_OPTS="$HBASE_OPTS $HBASE_PE_OPTS"
641 elif [ "$COMMAND" = "ltt" ] ; then
642 CLASS='org.apache.hadoop.hbase.util.LoadTestTool'
643 HBASE_OPTS="$HBASE_OPTS $HBASE_LTT_OPTS"
644 elif [ "$COMMAND" = "canary" ] ; then
645 CLASS='org.apache.hadoop.hbase.tool.CanaryTool'
646 HBASE_OPTS="$HBASE_OPTS $HBASE_CANARY_OPTS"
647 elif [ "$COMMAND" = "version" ] ; then
648 CLASS='org.apache.hadoop.hbase.util.VersionInfo'
649 elif [ "$COMMAND" = "regionsplitter" ] ; then
650 CLASS='org.apache.hadoop.hbase.util.RegionSplitter'
651 elif [ "$COMMAND" = "rowcounter" ] ; then
652 CLASS='org.apache.hadoop.hbase.mapreduce.RowCounter'
653 elif [ "$COMMAND" = "cellcounter" ] ; then
654 CLASS='org.apache.hadoop.hbase.mapreduce.CellCounter'
655 elif [ "$COMMAND" = "pre-upgrade" ] ; then
656 CLASS='org.apache.hadoop.hbase.tool.PreUpgradeValidator'
657 elif [ "$COMMAND" = "completebulkload" ] ; then
658 CLASS='org.apache.hadoop.hbase.tool.BulkLoadHFilesTool'
659 elif [ "$COMMAND" = "hbtop" ] ; then
660 CLASS='org.apache.hadoop.hbase.hbtop.HBTop'
661 if [ -n "${shaded_jar}" ] ; then
662 for f in "${HBASE_HOME}"/lib/hbase-hbtop*.jar; do
663 if [ -f "${f}" ]; then
664 CLASSPATH="${CLASSPATH}:${f}"
665 break
667 done
668 for f in "${HBASE_HOME}"/lib/commons-lang3*.jar; do
669 if [ -f "${f}" ]; then
670 CLASSPATH="${CLASSPATH}:${f}"
671 break
673 done
676 if [ -f "${HBASE_HOME}/conf/log4j-hbtop.properties" ] ; then
677 HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j.configuration=file:${HBASE_HOME}/conf/log4j-hbtop.properties"
679 HBASE_OPTS="${HBASE_OPTS} ${HBASE_HBTOP_OPTS}"
680 else
681 CLASS=$COMMAND
684 # Add lib/jdk11 jars to the classpath
686 if [ "${DEBUG}" = "true" ]; then
687 echo "Deciding on addition of lib/jdk11 jars to the classpath"
690 addJDK11Jars=false
692 if [ "${HBASE_JDK11}" != "" ]; then
693 # Use the passed Environment Variable HBASE_JDK11
694 if [ "${HBASE_JDK11}" = "include" ]; then
695 addJDK11Jars=true
696 if [ "${DEBUG}" = "true" ]; then
697 echo "HBASE_JDK11 set as 'include' hence adding JDK11 jars to classpath."
699 elif [ "${HBASE_JDK11}" = "exclude" ]; then
700 if [ "${DEBUG}" = "true" ]; then
701 echo "HBASE_JDK11 set as 'exclude' hence skipping JDK11 jars to classpath."
703 else
704 echo "[HBASE_JDK11] contains unsupported value(s) - ${HBASE_JDK11}. Ignoring passed value."
705 echo "[HBASE_JDK11] supported values: [include, exclude]."
707 else
708 # Use JDK detection
709 JAVA=$JAVA_HOME/bin/java
711 version=$($JAVA -version 2>&1 | awk -F '"' '/version/ {print $2}')
712 # '-' check is for cases such as "13-ea"
713 version_number=$(echo "$version" | cut -d'.' -f1 | cut -d'-' -f1)
715 if [ "${DEBUG}" = "true" ]; then
716 echo "HBASE_JDK11 not set hence using JDK detection."
717 echo "Extracted JDK version - ${version}, version_number - ${version_number}"
720 if [[ "$version_number" -ge "11" ]]; then
721 if [ "${DEBUG}" = "true" ]; then
722 echo "Version ${version} is greater-than/equal to 11 hence adding JDK11 jars to classpath."
724 addJDK11Jars=true
725 elif [ "${DEBUG}" = "true" ]; then
726 echo "Version ${version} is lesser than 11 hence skipping JDK11 jars from classpath."
730 if [ "${addJDK11Jars}" = "true" ]; then
731 add_jdk11_deps_to_classpath
732 if [ "${DEBUG}" = "true" ]; then
733 echo "Added JDK11 jars to classpath."
735 elif [ "${DEBUG}" = "true" ]; then
736 echo "JDK11 jars skipped from classpath."
739 # Have JVM dump heap if we run out of memory. Files will be 'launch directory'
740 # and are named like the following: java_pid21612.hprof. Apparently it doesn't
741 # 'cost' to have this flag enabled. Its a 1.6 flag only. See:
742 # http://blogs.sun.com/alanb/entry/outofmemoryerror_looks_a_bit_better
743 HBASE_OPTS="$HBASE_OPTS -Dhbase.log.dir=$HBASE_LOG_DIR"
744 HBASE_OPTS="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE"
745 HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
746 HBASE_OPTS="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING"
747 HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}"
748 if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
749 HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
750 export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH"
753 # Enable security logging on the master and regionserver only
754 if [ "$COMMAND" = "master" ] || [ "$COMMAND" = "regionserver" ]; then
755 HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,RFAS}"
756 else
757 HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}"
760 HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX"
761 # by now if we're running a command it means we need logging
762 for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do
763 if [ -f "${f}" ]; then
764 CLASSPATH="${CLASSPATH}:${f}"
765 break
767 done
769 # Exec unless HBASE_NOEXEC is set.
770 export CLASSPATH
771 if [ "${DEBUG}" = "true" ]; then
772 echo "classpath=${CLASSPATH}" >&2
773 HBASE_OPTS="${HBASE_OPTS} -Xdiag"
776 if [ "${HBASE_NOEXEC}" != "" ]; then
777 "$JAVA" -Dproc_$COMMAND -XX:OnOutOfMemoryError="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "$@"
778 else
779 export JVM_PID="$$"
780 exec "$JAVA" -Dproc_$COMMAND -XX:OnOutOfMemoryError="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "$@"