4 # * Licensed to the Apache Software Foundation (ASF) under one
5 # * or more contributor license agreements. See the NOTICE file
6 # * distributed with this work for additional information
7 # * regarding copyright ownership. The ASF licenses this file
8 # * to you under the Apache License, Version 2.0 (the
9 # * "License"); you may not use this file except in compliance
10 # * with the License. You may obtain a copy of the License at
12 # * http://www.apache.org/licenses/LICENSE-2.0
14 # * Unless required by applicable law or agreed to in writing, software
15 # * distributed under the License is distributed on an "AS IS" BASIS,
16 # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 # * See the License for the specific language governing permissions and
18 # * limitations under the License.
21 # The hbase command script. Based on the hadoop command script putting
22 # in hbase classes, libs and configurations ahead of hadoop's.
24 # TODO: Narrow the amount of duplicated code.
26 # Environment Variables:
28 # JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
30 # HBASE_CLASSPATH Extra Java CLASSPATH entries.
32 # HBASE_CLASSPATH_PREFIX Extra Java CLASSPATH entries that should be
33 # prefixed to the system classpath.
35 # HBASE_HEAPSIZE The maximum amount of heap to use.
36 # Default is unset and uses the JVMs default setting
37 # (usually 1/4th of the available memory).
39 # HBASE_LIBRARY_PATH HBase additions to JAVA_LIBRARY_PATH for adding
42 # HBASE_OPTS Extra Java runtime options.
44 # HBASE_CONF_DIR Alternate conf dir. Default is ${HBASE_HOME}/conf.
46 # HBASE_ROOT_LOGGER The root appender. Default is INFO,console
48 # JRUBY_HOME JRuby path: $JRUBY_HOME/lib/jruby.jar should exist.
49 # Defaults to the jar packaged with HBase.
51 # JRUBY_OPTS Extra options (eg '--1.9') passed to hbase.
54 # HBASE_SHELL_OPTS Extra options passed to the hbase shell.
57 # HBASE_JSHELL_ARGS Additional arguments passed to the jshell.
58 # Defaults to `--startup DEFAULT --startup PRINTING --startup hbase_startup.jsh`
60 # HBASE_HBCK_OPTS Extra options passed to hbck.
61 # Defaults to HBASE_SERVER_JAAS_OPTS if specified, or HBASE_REGIONSERVER_OPTS.
63 bin
=`cd "$bin">/dev/null; pwd`
65 read -d '' options_string
<< EOF
67 --config DIR Configuration direction to use. Default: ./conf
68 --hosts HOSTS Override the list in 'regionservers' file
69 --auth-as-server Authenticate to ZooKeeper using servers configuration
70 --internal-classpath Skip attempting to use client facing jars (WARNING: unstable results between versions)
71 --help or -h Print this help message
74 echo "Usage: hbase [<options>] <command> [<args>]"
75 echo "$options_string"
78 echo "Some commands take arguments. Pass no args or -h for usage."
79 echo " shell Run the HBase shell"
80 echo " hbck Run the HBase 'fsck' tool. Defaults read-only hbck1."
81 echo " Pass '-j /path/to/HBCK2.jar' to run hbase-2.x HBCK2."
82 echo " snapshot Tool for managing snapshots"
83 if [ "${in_omnibus_tarball}" = "true" ]; then
84 echo " wal Write-ahead-log analyzer"
85 echo " hfile Store file analyzer"
86 echo " zkcli Run the ZooKeeper shell"
87 echo " master Run an HBase HMaster node"
88 echo " regionserver Run an HBase HRegionServer node"
89 echo " zookeeper Run a ZooKeeper server"
90 echo " rest Run an HBase REST server"
91 echo " thrift Run the HBase Thrift server"
92 echo " thrift2 Run the HBase Thrift2 server"
93 echo " clean Run the HBase clean up script"
95 echo " jshell Run a jshell with HBase on the classpath"
96 echo " classpath Dump hbase CLASSPATH"
97 echo " mapredcp Dump CLASSPATH entries required by mapreduce"
98 echo " pe Run PerformanceEvaluation"
99 echo " ltt Run LoadTestTool"
100 echo " canary Run the Canary tool"
101 echo " version Print the version"
102 echo " backup Backup tables for recovery"
103 echo " restore Restore tables from existing backup image"
104 echo " completebulkload Run BulkLoadHFiles tool"
105 echo " regionsplitter Run RegionSplitter tool"
106 echo " rowcounter Run RowCounter tool"
107 echo " cellcounter Run CellCounter tool"
108 echo " pre-upgrade Run Pre-Upgrade validator tool"
109 echo " hbtop Run HBTop tool"
110 echo " CLASSNAME Run the class named CLASSNAME"
113 if [ "--help" = "$1" ] ||
[ "-h" = "$1" ]; then
118 # This will set HBASE_HOME, etc.
119 .
"$bin"/hbase-config.sh
123 CYGWIN
*) cygwin
=true
;;
126 # Detect if we are in hbase sources dir
128 if [ -d "${HBASE_HOME}/target" ]; then
132 # Detect if we are in the omnibus tarball
133 in_omnibus_tarball
="false"
134 if [ -f "${HBASE_HOME}/bin/hbase-daemons.sh" ]; then
135 in_omnibus_tarball
="true"
138 # if no args specified, show usage
148 JAVA
=$JAVA_HOME/bin
/java
150 # override default settings for this command, if applicable
151 if [ -f "$HBASE_HOME/conf/hbase-env-$COMMAND.sh" ]; then
152 .
"$HBASE_HOME/conf/hbase-env-$COMMAND.sh"
155 # establish a default value for HBASE_OPTS if it's not already set. For now,
156 # all we set is the garbage collector.
157 if [ -z "${HBASE_OPTS}" ] ; then
158 major_version_number
="$(parse_java_major_version "$
(read_java_version
)")"
159 case "$major_version_number" in
161 HBASE_OPTS
="-XX:+UseConcMarkSweepGC"
164 HBASE_OPTS
="-XX:+UseG1GC"
171 # add an 'm' suffix if the argument is missing one, otherwise use whats there
173 local lastchar
=${val: -1}
174 if [[ "mMgG" == *$lastchar* ]]; then
181 if [[ -n "$HBASE_HEAPSIZE" ]]; then
182 JAVA_HEAP_MAX
="-Xmx$(add_size_suffix $HBASE_HEAPSIZE)"
185 if [[ -n "$HBASE_OFFHEAPSIZE" ]]; then
186 JAVA_OFFHEAP_MAX
="-XX:MaxDirectMemorySize=$(add_size_suffix $HBASE_OFFHEAPSIZE)"
189 # so that filenames w/ spaces are handled correctly in loops below
193 # CLASSPATH initially contains $HBASE_CONF_DIR
194 CLASSPATH
="${HBASE_CONF_DIR}"
195 CLASSPATH
=${CLASSPATH}:$JAVA_HOME/lib
/tools.jar
197 add_to_cp_if_exists
() {
199 CLASSPATH
=${CLASSPATH}:"$@"
203 # For releases, add hbase & webapps to CLASSPATH
204 # Webapps must come first else it messes up Jetty
205 if [ -d "$HBASE_HOME/hbase-webapps" ]; then
206 add_to_cp_if_exists
"${HBASE_HOME}"
208 #add if we are in a dev environment
209 if [ -d "$HBASE_HOME/hbase-server/target/hbase-webapps" ]; then
210 if [ "$COMMAND" = "thrift" ] ; then
211 add_to_cp_if_exists
"${HBASE_HOME}/hbase-thrift/target"
212 elif [ "$COMMAND" = "thrift2" ] ; then
213 add_to_cp_if_exists
"${HBASE_HOME}/hbase-thrift/target"
214 elif [ "$COMMAND" = "rest" ] ; then
215 add_to_cp_if_exists
"${HBASE_HOME}/hbase-rest/target"
217 add_to_cp_if_exists
"${HBASE_HOME}/hbase-server/target"
218 # Needed for GetJavaProperty check below
219 add_to_cp_if_exists
"${HBASE_HOME}/hbase-server/target/classes"
223 #If avail, add Hadoop to the CLASSPATH and to the JAVA_LIBRARY_PATH
224 # Allow this functionality to be disabled
225 if [ "$HBASE_DISABLE_HADOOP_CLASSPATH_LOOKUP" != "true" ] ; then
226 HADOOP_IN_PATH
=$
(PATH
="${HADOOP_HOME:-${HADOOP_PREFIX}}/bin:$PATH" which hadoop
2>/dev
/null
)
229 # Add libs to CLASSPATH
232 if [ "${INTERNAL_CLASSPATH}" != "true" ]; then
233 # find our shaded jars
234 declare shaded_client
235 declare shaded_client_byo_hadoop
236 declare shaded_mapreduce
237 for f
in "${HBASE_HOME}"/lib
/shaded-clients
/hbase-shaded-client
*.jar
; do
238 if [[ "${f}" =~ byo-hadoop
]]; then
239 shaded_client_byo_hadoop
="${f}"
244 for f
in "${HBASE_HOME}"/lib
/shaded-clients
/hbase-shaded-mapreduce
*.jar
; do
245 shaded_mapreduce
="${f}"
248 # If command can use our shaded client, use it
249 declare -a commands_in_client_jar
=("classpath" "version" "hbtop")
250 for c
in "${commands_in_client_jar[@]}"; do
251 if [ "${COMMAND}" = "${c}" ]; then
252 if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
253 # If we didn't find a jar above, this will just be blank and the
254 # check below will then default back to the internal classpath.
255 shaded_jar
="${shaded_client_byo_hadoop}"
257 # If we didn't find a jar above, this will just be blank and the
258 # check below will then default back to the internal classpath.
259 shaded_jar
="${shaded_client}"
265 # If command needs our shaded mapreduce, use it
266 # N.B "mapredcp" is not included here because in the shaded case it skips our built classpath
267 declare -a commands_in_mr_jar
=("hbck" "snapshot" "regionsplitter" "pre-upgrade")
268 for c
in "${commands_in_mr_jar[@]}"; do
269 if [ "${COMMAND}" = "${c}" ]; then
270 # If we didn't find a jar above, this will just be blank and the
271 # check below will then default back to the internal classpath.
272 shaded_jar
="${shaded_mapreduce}"
277 # Some commands specifically only can use shaded mapreduce when we'll get a full hadoop classpath at runtime
278 if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
279 declare -a commands_in_mr_need_hadoop
=("backup" "restore" "rowcounter" "cellcounter")
280 for c
in "${commands_in_mr_need_hadoop[@]}"; do
281 if [ "${COMMAND}" = "${c}" ]; then
282 # If we didn't find a jar above, this will just be blank and the
283 # check below will then default back to the internal classpath.
284 shaded_jar
="${shaded_mapreduce}"
292 if [ -n "${shaded_jar}" ] && [ -f "${shaded_jar}" ]; then
293 CLASSPATH
="${CLASSPATH}:${shaded_jar}"
294 # fall through to grabbing all the lib jars and hope we're in the omnibus tarball
296 # N.B. shell specifically can't rely on the shaded artifacts because RSGroups is only
297 # available as non-shaded
299 # N.B. pe and ltt can't easily rely on shaded artifacts because they live in hbase-mapreduce:test-jar
300 # and need some other jars that haven't been relocated. Currently enumerating that list
301 # is too hard to be worth it.
304 for f
in $HBASE_HOME/lib
/*.jar
; do
305 CLASSPATH
=${CLASSPATH}:$f;
307 # make it easier to check for shaded/not later on.
310 # here we will add slf4j-api, commons-logging, jul-to-slf4j, jcl-over-slf4j
311 # to classpath, as they are all logging bridges. Only exclude log4j* so we
312 # will not actually log anything out. Add it later if necessary
313 for f
in "${HBASE_HOME}"/lib
/client-facing-thirdparty
/*.jar
; do
314 if [[ ! "${f}" =~ ^.
*/htrace-core-3.
*\.jar$
]] && \
315 [[ "${f}" != "htrace-core.jar$" ]] && \
316 [[ ! "${f}" =~ ^.
*/log4j.
*$
]]; then
317 CLASSPATH
="${CLASSPATH}:${f}"
320 # redirect java.util.logging to slf4j
321 HBASE_OPTS
="$HBASE_OPTS -Djava.util.logging.config.class=org.apache.hadoop.hbase.logging.JulToSlf4jInitializer"
323 # default log directory & file
324 if [ "$HBASE_LOG_DIR" = "" ]; then
325 HBASE_LOG_DIR
="$HBASE_HOME/logs"
327 if [ "$HBASE_LOGFILE" = "" ]; then
328 HBASE_LOGFILE
='hbase.log'
331 function append_path
() {
341 # if HBASE_LIBRARY_PATH is defined lets use it as first or second option
342 if [ "$HBASE_LIBRARY_PATH" != "" ]; then
343 JAVA_LIBRARY_PATH
=$
(append_path
"$JAVA_LIBRARY_PATH" "$HBASE_LIBRARY_PATH")
346 #If configured and available, add Hadoop to the CLASSPATH and to the JAVA_LIBRARY_PATH
347 if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
348 # If built hbase, temporarily add hbase-server*.jar to classpath for GetJavaProperty
349 # Exclude hbase-server*-tests.jar
351 for f
in "${HBASE_HOME}"/lib
/hbase-server
*.jar
; do
352 if [[ ! "${f}" =~ ^.
*\
-tests\.jar$
]]; then
356 HADOOP_JAVA_LIBRARY_PATH
=$
(HADOOP_CLASSPATH
="$CLASSPATH${temporary_cp}" "${HADOOP_IN_PATH}" \
357 org.apache.hadoop.hbase.util.GetJavaProperty java.library.path
)
358 if [ -n "$HADOOP_JAVA_LIBRARY_PATH" ]; then
359 JAVA_LIBRARY_PATH
=$
(append_path
"${JAVA_LIBRARY_PATH}" "$HADOOP_JAVA_LIBRARY_PATH")
361 CLASSPATH
=$
(append_path
"${CLASSPATH}" "$(${HADOOP_IN_PATH} classpath 2>/dev/null)")
363 # Otherwise, if we're providing Hadoop we should include htrace 3 if we were built with a version that needs it.
364 for f
in "${HBASE_HOME}"/lib
/client-facing-thirdparty
/htrace-core-3
*.jar
"${HBASE_HOME}"/lib
/client-facing-thirdparty
/htrace-core.jar
; do
365 if [ -f "${f}" ]; then
366 CLASSPATH
="${CLASSPATH}:${f}"
370 # Some commands require special handling when using shaded jars. For these cases, we rely on hbase-shaded-mapreduce
371 # instead of hbase-shaded-client* because we make use of some IA.Private classes that aren't in the latter. However,
372 # we don't invoke them using the "hadoop jar" command so we need to ensure there are some Hadoop classes available
373 # when we're not doing runtime hadoop classpath lookup.
375 # luckily the set of classes we need are those packaged in the shaded-client.
376 for c
in "${commands_in_mr_jar[@]}"; do
377 if [ "${COMMAND}" = "${c}" ] && [ -n "${shaded_jar}" ]; then
378 CLASSPATH
="${CLASSPATH}:${shaded_client:?We couldn\'t find the shaded client jar even though we did find the shaded MR jar. for command ${COMMAND} we need both. please use --internal-classpath as a workaround.}"
384 # Add user-specified CLASSPATH last
385 if [ "$HBASE_CLASSPATH" != "" ]; then
386 CLASSPATH
=${CLASSPATH}:${HBASE_CLASSPATH}
389 # Add user-specified CLASSPATH prefix first
390 if [ "$HBASE_CLASSPATH_PREFIX" != "" ]; then
391 CLASSPATH
=${HBASE_CLASSPATH_PREFIX}:${CLASSPATH}
394 # cygwin path translation
396 CLASSPATH
=`cygpath -p -w "$CLASSPATH"`
397 HBASE_HOME
=`cygpath -d "$HBASE_HOME"`
398 HBASE_LOG_DIR
=`cygpath -d "$HBASE_LOG_DIR"`
401 if [ -d "${HBASE_HOME}/build/native" -o -d "${HBASE_HOME}/lib/native" ]; then
402 if [ -z $JAVA_PLATFORM ]; then
403 JAVA_PLATFORM
=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
405 if [ -d "$HBASE_HOME/build/native" ]; then
406 JAVA_LIBRARY_PATH
=$
(append_path
"$JAVA_LIBRARY_PATH" "${HBASE_HOME}/build/native/${JAVA_PLATFORM}/lib")
409 if [ -d "${HBASE_HOME}/lib/native" ]; then
410 JAVA_LIBRARY_PATH
=$
(append_path
"$JAVA_LIBRARY_PATH" "${HBASE_HOME}/lib/native/${JAVA_PLATFORM}")
414 # cygwin path translation
416 JAVA_LIBRARY_PATH
=`cygpath -p "$JAVA_LIBRARY_PATH"`
419 # restore ordinary behaviour
422 #Set the right GC options based on the what we are running
423 declare -a server_cmds
=("master" "regionserver" "thrift" "thrift2" "rest" "avro" "zookeeper")
424 for cmd
in ${server_cmds[@]}; do
425 if [[ $cmd == $COMMAND ]]; then
431 if [[ $server ]]; then
432 HBASE_OPTS
="$HBASE_OPTS $SERVER_GC_OPTS"
434 HBASE_OPTS
="$HBASE_OPTS $CLIENT_GC_OPTS"
437 if [ -n "$HBASE_SERVER_JAAS_OPTS" ]; then
438 AUTH_AS_SERVER_OPTS
="$HBASE_SERVER_JAAS_OPTS"
440 AUTH_AS_SERVER_OPTS
="$HBASE_REGIONSERVER_OPTS"
443 if [ "$AUTH_AS_SERVER" == "true" ]; then
444 HBASE_OPTS
="$HBASE_OPTS $AUTH_AS_SERVER_OPTS"
445 elif [ -z "$HBASE_HBCK_OPTS" ]; then
446 # The default for hbck should be to use auth-as-server args, for compatibility
448 HBASE_HBCK_OPTS
="$AUTH_AS_SERVER_OPTS"
451 # check if the command needs jline
452 declare -a jline_cmds
=("zkcli" "org.apache.hadoop.hbase.zookeeper.ZKMainServer")
453 for cmd
in "${jline_cmds[@]}"; do
454 if [[ $cmd == "$COMMAND" ]]; then
461 # (1) for the commands which need jruby (see jruby_cmds defined below)
462 # A. when JRUBY_HOME is specified explicitly, eg. export JRUBY_HOME=/usr/local/share/jruby
463 # CLASSPATH and HBASE_OPTS are updated according to JRUBY_HOME specified
464 # B. when JRUBY_HOME is not specified explicitly
465 # add jruby packaged with HBase to CLASSPATH
466 # (2) for other commands, do nothing
468 # check if the commmand needs jruby
469 declare -a jruby_cmds
=("shell" "org.jruby.Main")
470 for cmd
in "${jruby_cmds[@]}"; do
471 if [[ $cmd == "$COMMAND" ]]; then
477 add_maven_deps_to_classpath
() {
478 f
="${HBASE_HOME}/hbase-build-configuration/target/$1"
480 if [ ! -f "${f}" ]; then
481 echo "As this is a development environment, we need ${f} to be generated from maven (command: mvn install -DskipTests)"
484 CLASSPATH
=${CLASSPATH}:$
(cat "${f}")
487 add_jdk11_deps_to_classpath
() {
488 for f
in ${HBASE_HOME}/lib
/jdk11
/*; do
489 if [ -f "${f}" ]; then
490 CLASSPATH
="${CLASSPATH}:${f}"
495 add_opentelemetry_agent
() {
496 if [ -e "${OPENTELEMETRY_JAVAAGENT_PATH}" ] ; then
497 agent_jar
="${OPENTELEMETRY_JAVAAGENT_PATH}"
498 elif ! agent_jar
=$
(find -L "${HBASE_HOME}/lib/trace" -type f
-name "opentelemetry-javaagent-*" 2>/dev
/null
); then
499 # must be dev environment
500 f
="${HBASE_HOME}/hbase-build-configuration/target/cached_classpath.txt"
501 if [ ! -f "${f}" ]; then
502 echo "As this is a development environment, we need ${f} to be generated from maven (command: mvn install -DskipTests)"
505 agent_jar
=$
(tr ':' '\n' < "${f}" |
grep opentelemetry-javaagent
)
507 HBASE_OPTS
="$HBASE_OPTS -javaagent:$agent_jar"
510 #Add the development env class path stuff
512 add_maven_deps_to_classpath
"cached_classpath.txt"
514 if [[ $jline_needed ]]; then
515 add_maven_deps_to_classpath
"cached_classpath_jline.txt"
516 elif [[ $jruby_needed ]]; then
517 add_maven_deps_to_classpath
"cached_classpath_jruby.txt"
521 # the command needs jruby
522 if [[ $jruby_needed ]]; then
523 if [ "$JRUBY_HOME" != "" ]; then # JRUBY_HOME is specified explicitly, eg. export JRUBY_HOME=/usr/local/share/jruby
524 # add jruby.jar into CLASSPATH
525 CLASSPATH
="$JRUBY_HOME/lib/jruby.jar:$CLASSPATH"
527 # add jruby to HBASE_OPTS
528 HBASE_OPTS
="$HBASE_OPTS -Djruby.home=$JRUBY_HOME -Djruby.lib=$JRUBY_HOME/lib"
530 else # JRUBY_HOME is not specified explicitly
531 if ! $in_dev_env; then # not in dev environment
532 # add jruby packaged with HBase to CLASSPATH
533 JRUBY_PACKAGED_WITH_HBASE
="$HBASE_HOME/lib/ruby/*.jar"
534 for jruby_jar
in $JRUBY_PACKAGED_WITH_HBASE; do
535 CLASSPATH
=$jruby_jar:$CLASSPATH;
541 # figure out which class to run
542 if [ "$COMMAND" = "shell" ] ; then
543 #find the hbase ruby sources
544 # assume we are in a binary install if lib/ruby exists
545 if [ -d "$HBASE_HOME/lib/ruby" ]; then
546 # We want jruby to consume these things rather than our bootstrap script;
547 # jruby will look for the env variable 'JRUBY_OPTS'.
548 JRUBY_OPTS
="${JRUBY_OPTS} -X+O"
550 # hbase-shell.jar contains a 'jar-bootstrap.rb'
552 # https://github.com/jruby/jruby/wiki/StandaloneJarsAndClasses#standalone-executable-jar-files
553 CLASS
="org.jruby.JarBootstrapMain"
554 # otherwise assume we are running in a source checkout
556 HBASE_OPTS
="$HBASE_OPTS -Dhbase.ruby.sources=$HBASE_HOME/hbase-shell/src/main/ruby"
557 CLASS
="org.jruby.Main -X+O ${JRUBY_OPTS} ${HBASE_HOME}/hbase-shell/src/main/ruby/jar-bootstrap.rb"
559 HBASE_OPTS
="$HBASE_OPTS $HBASE_SHELL_OPTS"
560 elif [ "$COMMAND" = 'jshell' ] ; then
561 java_version
="$(read_java_version)"
562 major_version_number
="$(parse_java_major_version "${java_version}")"
563 if [ "${major_version_number}" -lt 9 ] ; then
564 echo "JShell is available only with JDK9 and lated. Detected JDK version is ${java_version}".
567 CLASS
='jdk.internal.jshell.tool.JShellToolProvider'
568 # set default values for HBASE_JSHELL_ARGS
569 read -r -a JSHELL_ARGS
<<< "${HBASE_JSHELL_ARGS:-"--startup DEFAULT --startup PRINTING --startup ${HBASE_HOME}/bin/hbase_startup.jsh"}"
570 HBASE_OPTS
="$HBASE_OPTS $HBASE_JSHELL_OPTS"
571 elif [ "$COMMAND" = "hbck" ] ; then
572 # Look for the -j /path/to/HBCK2.jar parameter. Else pass through to hbck.
575 # Found -j parameter. Add arg to CLASSPATH and set CLASS to HBCK2.
578 if [ ! -f "${JAR}" ]; then
579 echo "${JAR} file not found!"
580 echo "Usage: hbase [<options>] hbck -jar /path/to/HBCK2.jar [<args>]"
583 CLASSPATH
="${JAR}:${CLASSPATH}";
584 CLASS
="org.apache.hbase.HBCK2"
585 shift # past argument=value
588 CLASS
='org.apache.hadoop.hbase.util.HBaseFsck'
591 HBASE_OPTS
="$HBASE_OPTS $HBASE_HBCK_OPTS"
592 elif [ "$COMMAND" = "wal" ] ; then
593 CLASS
='org.apache.hadoop.hbase.wal.WALPrettyPrinter'
594 elif [ "$COMMAND" = "hfile" ] ; then
595 CLASS
='org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter'
596 elif [ "$COMMAND" = "zkcli" ] ; then
597 CLASS
="org.apache.hadoop.hbase.zookeeper.ZKMainServer"
598 for f
in $HBASE_HOME/lib
/zkcli
/*.jar
; do
599 CLASSPATH
="${CLASSPATH}:$f";
601 elif [ "$COMMAND" = "backup" ] ; then
602 CLASS
='org.apache.hadoop.hbase.backup.BackupDriver'
603 if [ -n "${shaded_jar}" ] ; then
604 for f
in "${HBASE_HOME}"/lib
/hbase-backup
*.jar
; do
605 if [ -f "${f}" ]; then
606 CLASSPATH
="${CLASSPATH}:${f}"
611 elif [ "$COMMAND" = "restore" ] ; then
612 CLASS
='org.apache.hadoop.hbase.backup.RestoreDriver'
613 if [ -n "${shaded_jar}" ] ; then
614 for f
in "${HBASE_HOME}"/lib
/hbase-backup
*.jar
; do
615 if [ -f "${f}" ]; then
616 CLASSPATH
="${CLASSPATH}:${f}"
620 for f
in "${HBASE_HOME}"/lib
/commons-lang3
*.jar
; do
621 if [ -f "${f}" ]; then
622 CLASSPATH
="${CLASSPATH}:${f}"
627 elif [ "$COMMAND" = "upgrade" ] ; then
628 echo "This command was used to upgrade to HBase 0.96, it was removed in HBase 2.0.0."
629 echo "Please follow the documentation at http://hbase.apache.org/book.html#upgrading."
631 elif [ "$COMMAND" = "snapshot" ] ; then
634 if [ "$SUBCOMMAND" = "create" ] ; then
635 CLASS
="org.apache.hadoop.hbase.snapshot.CreateSnapshot"
636 elif [ "$SUBCOMMAND" = "info" ] ; then
637 CLASS
="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
638 elif [ "$SUBCOMMAND" = "export" ] ; then
639 CLASS
="org.apache.hadoop.hbase.snapshot.ExportSnapshot"
641 echo "Usage: hbase [<options>] snapshot <subcommand> [<args>]"
642 echo "$options_string"
645 echo " create Create a new snapshot of a table"
646 echo " info Tool for dumping snapshot information"
647 echo " export Export an existing snapshot"
650 elif [ "$COMMAND" = "master" ] ; then
651 CLASS
='org.apache.hadoop.hbase.master.HMaster'
652 if [ "$1" != "stop" ] && [ "$1" != "clear" ] ; then
653 HBASE_OPTS
="$HBASE_OPTS $HBASE_MASTER_OPTS"
655 elif [ "$COMMAND" = "regionserver" ] ; then
656 CLASS
='org.apache.hadoop.hbase.regionserver.HRegionServer'
657 if [ "$1" != "stop" ] ; then
658 HBASE_OPTS
="$HBASE_OPTS $HBASE_REGIONSERVER_OPTS"
660 elif [ "$COMMAND" = "thrift" ] ; then
661 CLASS
='org.apache.hadoop.hbase.thrift.ThriftServer'
662 if [ "$1" != "stop" ] ; then
663 HBASE_OPTS
="$HBASE_OPTS $HBASE_THRIFT_OPTS"
665 elif [ "$COMMAND" = "thrift2" ] ; then
666 CLASS
='org.apache.hadoop.hbase.thrift2.ThriftServer'
667 if [ "$1" != "stop" ] ; then
668 HBASE_OPTS
="$HBASE_OPTS $HBASE_THRIFT_OPTS"
670 elif [ "$COMMAND" = "rest" ] ; then
671 CLASS
='org.apache.hadoop.hbase.rest.RESTServer'
672 if [ "$1" != "stop" ] ; then
673 HBASE_OPTS
="$HBASE_OPTS $HBASE_REST_OPTS"
675 elif [ "$COMMAND" = "zookeeper" ] ; then
676 CLASS
='org.apache.hadoop.hbase.zookeeper.HQuorumPeer'
677 if [ "$1" != "stop" ] ; then
678 HBASE_OPTS
="$HBASE_OPTS $HBASE_ZOOKEEPER_OPTS"
680 elif [ "$COMMAND" = "clean" ] ; then
682 --cleanZk|
--cleanHdfs|
--cleanAll)
686 if [ $# -ne 1 -o "$matches" = "" ]; then
687 echo "Usage: hbase clean (--cleanZk|--cleanHdfs|--cleanAll)"
689 echo " --cleanZk cleans hbase related data from zookeeper."
690 echo " --cleanHdfs cleans hbase related data from hdfs."
691 echo " --cleanAll cleans hbase related data from both zookeeper and hdfs."
694 "$bin"/hbase-cleanup.sh
--config ${HBASE_CONF_DIR} $@
696 elif [ "$COMMAND" = "mapredcp" ] ; then
697 # If we didn't find a jar above, this will just be blank and the
698 # check below will then default back to the internal classpath.
699 shaded_jar
="${shaded_mapreduce}"
700 if [ "${INTERNAL_CLASSPATH}" != "true" ] && [ -f "${shaded_jar}" ]; then
701 echo -n "${shaded_jar}"
702 for f
in "${HBASE_HOME}"/lib
/client-facing-thirdparty
/*.jar
; do
703 if [[ ! "${f}" =~ ^.
*/htrace-core-3.
*\.jar$
]] && \
704 [ "${f}" != "htrace-core.jar$" ] && \
705 [[ ! "${f}" =~ ^.
*/log4j.
*$
]]; then
712 CLASS
='org.apache.hadoop.hbase.util.MapreduceDependencyClasspathTool'
713 elif [ "$COMMAND" = "classpath" ] ; then
716 elif [ "$COMMAND" = "pe" ] ; then
717 CLASS
='org.apache.hadoop.hbase.PerformanceEvaluation'
718 HBASE_OPTS
="$HBASE_OPTS $HBASE_PE_OPTS"
719 elif [ "$COMMAND" = "ltt" ] ; then
720 CLASS
='org.apache.hadoop.hbase.util.LoadTestTool'
721 HBASE_OPTS
="$HBASE_OPTS $HBASE_LTT_OPTS"
722 elif [ "$COMMAND" = "canary" ] ; then
723 CLASS
='org.apache.hadoop.hbase.tool.CanaryTool'
724 HBASE_OPTS
="$HBASE_OPTS $HBASE_CANARY_OPTS"
725 elif [ "$COMMAND" = "version" ] ; then
726 CLASS
='org.apache.hadoop.hbase.util.VersionInfo'
727 elif [ "$COMMAND" = "regionsplitter" ] ; then
728 CLASS
='org.apache.hadoop.hbase.util.RegionSplitter'
729 elif [ "$COMMAND" = "rowcounter" ] ; then
730 CLASS
='org.apache.hadoop.hbase.mapreduce.RowCounter'
731 elif [ "$COMMAND" = "cellcounter" ] ; then
732 CLASS
='org.apache.hadoop.hbase.mapreduce.CellCounter'
733 elif [ "$COMMAND" = "pre-upgrade" ] ; then
734 CLASS
='org.apache.hadoop.hbase.tool.PreUpgradeValidator'
735 elif [ "$COMMAND" = "completebulkload" ] ; then
736 CLASS
='org.apache.hadoop.hbase.tool.BulkLoadHFilesTool'
737 elif [ "$COMMAND" = "hbtop" ] ; then
738 CLASS
='org.apache.hadoop.hbase.hbtop.HBTop'
739 if [ -n "${shaded_jar}" ] ; then
740 for f
in "${HBASE_HOME}"/lib
/hbase-hbtop
*.jar
; do
741 if [ -f "${f}" ]; then
742 CLASSPATH
="${CLASSPATH}:${f}"
746 for f
in "${HBASE_HOME}"/lib
/commons-lang3
*.jar
; do
747 if [ -f "${f}" ]; then
748 CLASSPATH
="${CLASSPATH}:${f}"
754 if [ -f "${HBASE_HOME}/conf/log4j-hbtop.properties" ] ; then
755 HBASE_HBTOP_OPTS
="${HBASE_HBTOP_OPTS} -Dlog4j.configuration=file:${HBASE_HOME}/conf/log4j-hbtop.properties"
757 HBASE_OPTS
="${HBASE_OPTS} ${HBASE_HBTOP_OPTS}"
762 # Add lib/jdk11 jars to the classpath
764 if [ "${DEBUG}" = "true" ]; then
765 echo "Deciding on addition of lib/jdk11 jars to the classpath"
770 if [ "${HBASE_JDK11}" != "" ]; then
771 # Use the passed Environment Variable HBASE_JDK11
772 if [ "${HBASE_JDK11}" = "include" ]; then
774 if [ "${DEBUG}" = "true" ]; then
775 echo "HBASE_JDK11 set as 'include' hence adding JDK11 jars to classpath."
777 elif [ "${HBASE_JDK11}" = "exclude" ]; then
778 if [ "${DEBUG}" = "true" ]; then
779 echo "HBASE_JDK11 set as 'exclude' hence skipping JDK11 jars to classpath."
782 echo "[HBASE_JDK11] contains unsupported value(s) - ${HBASE_JDK11}. Ignoring passed value."
783 echo "[HBASE_JDK11] supported values: [include, exclude]."
787 version
="$(read_java_version)"
788 major_version_number
="$(parse_java_major_version "$version")"
790 if [ "${DEBUG}" = "true" ]; then
791 echo "HBASE_JDK11 not set hence using JDK detection."
792 echo "Extracted JDK version - ${version}, major_version_number - ${major_version_number}"
795 if [[ "$major_version_number" -ge "11" ]]; then
796 if [ "${DEBUG}" = "true" ]; then
797 echo "Version ${version} is greater-than/equal to 11 hence adding JDK11 jars to classpath."
800 elif [ "${DEBUG}" = "true" ]; then
801 echo "Version ${version} is lesser than 11 hence skipping JDK11 jars from classpath."
805 if [ "${addJDK11Jars}" = "true" ]; then
806 add_jdk11_deps_to_classpath
807 if [ "${DEBUG}" = "true" ]; then
808 echo "Added JDK11 jars to classpath."
810 elif [ "${DEBUG}" = "true" ]; then
811 echo "JDK11 jars skipped from classpath."
814 if [[ -n "${HBASE_TRACE_OPTS}" ]]; then
815 if [ "${DEBUG}" = "true" ]; then
816 echo "Attaching opentelemetry agent"
818 add_opentelemetry_agent
821 # Have JVM dump heap if we run out of memory. Files will be 'launch directory'
822 # and are named like the following: java_pid21612.hprof. Apparently it doesn't
823 # 'cost' to have this flag enabled. Its a 1.6 flag only. See:
824 # http://blogs.sun.com/alanb/entry/outofmemoryerror_looks_a_bit_better
825 HBASE_OPTS
="$HBASE_OPTS -Dhbase.log.dir=$HBASE_LOG_DIR"
826 HBASE_OPTS
="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE"
827 HBASE_OPTS
="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
828 HBASE_OPTS
="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING"
829 # log4j2 does not support setting log level and appender at once, so we need to split HBASE_ROOT_LOGGER
830 HBASE_ROOT_LOGGER
=${HBASE_ROOT_LOGGER:-INFO,console}
831 array
=(${HBASE_ROOT_LOGGER//,/ })
832 HBASE_OPTS
="$HBASE_OPTS -Dhbase.root.logger.level=${array[0]}"
833 HBASE_OPTS
="$HBASE_OPTS -Dhbase.root.logger.appender=${array[1]}"
834 if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
835 HBASE_OPTS
="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
836 export LD_LIBRARY_PATH
="$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH"
839 # Enable security logging on the master and regionserver only
840 if [ "$COMMAND" = "master" ] ||
[ "$COMMAND" = "regionserver" ]; then
841 HBASE_SECURITY_LOGGER
=${HBASE_SECURITY_LOGGER:-INFO,RFAS}
843 HBASE_SECURITY_LOGGER
=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}
845 array
=(${HBASE_SECURITY_LOGGER//,/ })
846 HBASE_OPTS
="$HBASE_OPTS -Dhbase.security.logger.level=${array[0]}"
847 HBASE_OPTS
="$HBASE_OPTS -Dhbase.security.logger.appender=${array[1]}"
849 HEAP_SETTINGS
="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX"
850 # by now if we're running a command it means we need logging
851 for f
in ${HBASE_HOME}/lib
/client-facing-thirdparty
/log4j
*.jar
; do
852 if [ -f "${f}" ]; then
853 CLASSPATH
="${CLASSPATH}:${f}"
857 # Exec unless HBASE_NOEXEC is set.
859 if [ "${DEBUG}" = "true" ]; then
860 echo "classpath=${CLASSPATH}" >&2
861 HBASE_OPTS
="${HBASE_OPTS} -Xdiag"
864 # resolve the command arguments
865 read -r -a CMD_ARGS
<<< "$@"
866 if [ "${#JSHELL_ARGS[@]}" -gt 0 ] ; then
867 CMD_ARGS
=("${JSHELL_ARGS[@]}" "${CMD_ARGS[@]}")
870 if [ "${HBASE_NOEXEC}" != "" ]; then
871 "$JAVA" -Dproc_$COMMAND -XX:OnOutOfMemoryError
="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "${CMD_ARGS[@]}"
874 exec "$JAVA" -Dproc_$COMMAND -XX:OnOutOfMemoryError
="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "${CMD_ARGS[@]}"