HBASE-26582 Prune use of Random and SecureRandom objects (#4118)
[hbase.git] / bin / hbase
blob6bf03f7257c3ad3f416d595be086bf904333338e
1 #! /usr/bin/env bash
3 #/**
4 # * Licensed to the Apache Software Foundation (ASF) under one
5 # * or more contributor license agreements. See the NOTICE file
6 # * distributed with this work for additional information
7 # * regarding copyright ownership. The ASF licenses this file
8 # * to you under the Apache License, Version 2.0 (the
9 # * "License"); you may not use this file except in compliance
10 # * with the License. You may obtain a copy of the License at
11 # *
12 # * http://www.apache.org/licenses/LICENSE-2.0
13 # *
14 # * Unless required by applicable law or agreed to in writing, software
15 # * distributed under the License is distributed on an "AS IS" BASIS,
16 # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 # * See the License for the specific language governing permissions and
18 # * limitations under the License.
19 # */
21 # The hbase command script. Based on the hadoop command script putting
22 # in hbase classes, libs and configurations ahead of hadoop's.
24 # TODO: Narrow the amount of duplicated code.
26 # Environment Variables:
28 # JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
30 # HBASE_CLASSPATH Extra Java CLASSPATH entries.
32 # HBASE_CLASSPATH_PREFIX Extra Java CLASSPATH entries that should be
33 # prefixed to the system classpath.
35 # HBASE_HEAPSIZE The maximum amount of heap to use.
36 # Default is unset and uses the JVMs default setting
37 # (usually 1/4th of the available memory).
39 # HBASE_LIBRARY_PATH HBase additions to JAVA_LIBRARY_PATH for adding
40 # native libraries.
42 # HBASE_OPTS Extra Java runtime options.
44 # HBASE_CONF_DIR Alternate conf dir. Default is ${HBASE_HOME}/conf.
46 # HBASE_ROOT_LOGGER The root appender. Default is INFO,console
48 # JRUBY_HOME JRuby path: $JRUBY_HOME/lib/jruby.jar should exist.
49 # Defaults to the jar packaged with HBase.
51 # JRUBY_OPTS Extra options (eg '--1.9') passed to hbase.
52 # Empty by default.
54 # HBASE_SHELL_OPTS Extra options passed to the hbase shell.
55 # Empty by default.
57 # HBASE_HBCK_OPTS Extra options passed to hbck.
58 # Defaults to HBASE_SERVER_JAAS_OPTS if specified, or HBASE_REGIONSERVER_OPTS.
59 bin=`dirname "$0"`
60 bin=`cd "$bin">/dev/null; pwd`
62 read -d '' options_string << EOF
63 Options:
64 --config DIR Configuration direction to use. Default: ./conf
65 --hosts HOSTS Override the list in 'regionservers' file
66 --auth-as-server Authenticate to ZooKeeper using servers configuration
67 --internal-classpath Skip attempting to use client facing jars (WARNING: unstable results between versions)
68 --help or -h Print this help message
69 EOF
70 show_usage() {
71 echo "Usage: hbase [<options>] <command> [<args>]"
72 echo "$options_string"
73 echo ""
74 echo "Commands:"
75 echo "Some commands take arguments. Pass no args or -h for usage."
76 echo " shell Run the HBase shell"
77 echo " hbck Run the HBase 'fsck' tool. Defaults read-only hbck1."
78 echo " Pass '-j /path/to/HBCK2.jar' to run hbase-2.x HBCK2."
79 echo " snapshot Tool for managing snapshots"
80 if [ "${in_omnibus_tarball}" = "true" ]; then
81 echo " wal Write-ahead-log analyzer"
82 echo " hfile Store file analyzer"
83 echo " zkcli Run the ZooKeeper shell"
84 echo " master Run an HBase HMaster node"
85 echo " regionserver Run an HBase HRegionServer node"
86 echo " zookeeper Run a ZooKeeper server"
87 echo " rest Run an HBase REST server"
88 echo " thrift Run the HBase Thrift server"
89 echo " thrift2 Run the HBase Thrift2 server"
90 echo " clean Run the HBase clean up script"
92 echo " classpath Dump hbase CLASSPATH"
93 echo " mapredcp Dump CLASSPATH entries required by mapreduce"
94 echo " pe Run PerformanceEvaluation"
95 echo " ltt Run LoadTestTool"
96 echo " canary Run the Canary tool"
97 echo " version Print the version"
98 echo " completebulkload Run BulkLoadHFiles tool"
99 echo " regionsplitter Run RegionSplitter tool"
100 echo " rowcounter Run RowCounter tool"
101 echo " cellcounter Run CellCounter tool"
102 echo " pre-upgrade Run Pre-Upgrade validator tool"
103 echo " hbtop Run HBTop tool"
104 echo " CLASSNAME Run the class named CLASSNAME"
107 if [ "--help" = "$1" ] || [ "-h" = "$1" ]; then
108 show_usage
109 exit 0
112 # This will set HBASE_HOME, etc.
113 . "$bin"/hbase-config.sh
115 cygwin=false
116 case "`uname`" in
117 CYGWIN*) cygwin=true;;
118 esac
120 # Detect if we are in hbase sources dir
121 in_dev_env=false
122 if [ -d "${HBASE_HOME}/target" ]; then
123 in_dev_env=true
126 # Detect if we are in the omnibus tarball
127 in_omnibus_tarball="false"
128 if [ -f "${HBASE_HOME}/bin/hbase-daemons.sh" ]; then
129 in_omnibus_tarball="true"
132 # if no args specified, show usage
133 if [ $# = 0 ]; then
134 show_usage
135 exit 1
138 # get arguments
139 COMMAND=$1
140 shift
142 JAVA=$JAVA_HOME/bin/java
144 # override default settings for this command, if applicable
145 if [ -f "$HBASE_HOME/conf/hbase-env-$COMMAND.sh" ]; then
146 . "$HBASE_HOME/conf/hbase-env-$COMMAND.sh"
149 # establish a default value for HBASE_OPTS if it's not already set. For now,
150 # all we set is the garbage collector.
151 if [ -z "${HBASE_OPTS}" ] ; then
152 major_version_number="$(parse_java_major_version "$(read_java_version)")"
153 case "$major_version_number" in
154 8|9|10)
155 HBASE_OPTS="-XX:+UseConcMarkSweepGC"
157 11|*)
158 HBASE_OPTS="-XX:+UseG1GC"
160 esac
161 export HBASE_OPTS
164 add_size_suffix() {
165 # add an 'm' suffix if the argument is missing one, otherwise use whats there
166 local val="$1"
167 local lastchar=${val: -1}
168 if [[ "mMgG" == *$lastchar* ]]; then
169 echo $val
170 else
171 echo ${val}m
175 if [[ -n "$HBASE_HEAPSIZE" ]]; then
176 JAVA_HEAP_MAX="-Xmx$(add_size_suffix $HBASE_HEAPSIZE)"
179 if [[ -n "$HBASE_OFFHEAPSIZE" ]]; then
180 JAVA_OFFHEAP_MAX="-XX:MaxDirectMemorySize=$(add_size_suffix $HBASE_OFFHEAPSIZE)"
183 # so that filenames w/ spaces are handled correctly in loops below
184 ORIG_IFS=$IFS
185 IFS=
187 # CLASSPATH initially contains $HBASE_CONF_DIR
188 CLASSPATH="${HBASE_CONF_DIR}"
189 CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
191 add_to_cp_if_exists() {
192 if [ -d "$@" ]; then
193 CLASSPATH=${CLASSPATH}:"$@"
197 # For releases, add hbase & webapps to CLASSPATH
198 # Webapps must come first else it messes up Jetty
199 if [ -d "$HBASE_HOME/hbase-webapps" ]; then
200 add_to_cp_if_exists "${HBASE_HOME}"
202 #add if we are in a dev environment
203 if [ -d "$HBASE_HOME/hbase-server/target/hbase-webapps" ]; then
204 if [ "$COMMAND" = "thrift" ] ; then
205 add_to_cp_if_exists "${HBASE_HOME}/hbase-thrift/target"
206 elif [ "$COMMAND" = "thrift2" ] ; then
207 add_to_cp_if_exists "${HBASE_HOME}/hbase-thrift/target"
208 elif [ "$COMMAND" = "rest" ] ; then
209 add_to_cp_if_exists "${HBASE_HOME}/hbase-rest/target"
210 else
211 add_to_cp_if_exists "${HBASE_HOME}/hbase-server/target"
212 # Needed for GetJavaProperty check below
213 add_to_cp_if_exists "${HBASE_HOME}/hbase-server/target/classes"
217 #If avail, add Hadoop to the CLASSPATH and to the JAVA_LIBRARY_PATH
218 # Allow this functionality to be disabled
219 if [ "$HBASE_DISABLE_HADOOP_CLASSPATH_LOOKUP" != "true" ] ; then
220 HADOOP_IN_PATH=$(PATH="${HADOOP_HOME:-${HADOOP_PREFIX}}/bin:$PATH" which hadoop 2>/dev/null)
223 # Add libs to CLASSPATH
224 declare shaded_jar
226 if [ "${INTERNAL_CLASSPATH}" != "true" ]; then
227 # find our shaded jars
228 declare shaded_client
229 declare shaded_client_byo_hadoop
230 declare shaded_mapreduce
231 for f in "${HBASE_HOME}"/lib/shaded-clients/hbase-shaded-client*.jar; do
232 if [[ "${f}" =~ byo-hadoop ]]; then
233 shaded_client_byo_hadoop="${f}"
234 else
235 shaded_client="${f}"
237 done
238 for f in "${HBASE_HOME}"/lib/shaded-clients/hbase-shaded-mapreduce*.jar; do
239 shaded_mapreduce="${f}"
240 done
242 # If command can use our shaded client, use it
243 declare -a commands_in_client_jar=("classpath" "version" "hbtop")
244 for c in "${commands_in_client_jar[@]}"; do
245 if [ "${COMMAND}" = "${c}" ]; then
246 if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
247 # If we didn't find a jar above, this will just be blank and the
248 # check below will then default back to the internal classpath.
249 shaded_jar="${shaded_client_byo_hadoop}"
250 else
251 # If we didn't find a jar above, this will just be blank and the
252 # check below will then default back to the internal classpath.
253 shaded_jar="${shaded_client}"
255 break
257 done
259 # If command needs our shaded mapreduce, use it
260 # N.B "mapredcp" is not included here because in the shaded case it skips our built classpath
261 declare -a commands_in_mr_jar=("hbck" "snapshot" "regionsplitter" "pre-upgrade")
262 for c in "${commands_in_mr_jar[@]}"; do
263 if [ "${COMMAND}" = "${c}" ]; then
264 # If we didn't find a jar above, this will just be blank and the
265 # check below will then default back to the internal classpath.
266 shaded_jar="${shaded_mapreduce}"
267 break
269 done
271 # Some commands specifically only can use shaded mapreduce when we'll get a full hadoop classpath at runtime
272 if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
273 declare -a commands_in_mr_need_hadoop=("backup" "restore" "rowcounter" "cellcounter")
274 for c in "${commands_in_mr_need_hadoop[@]}"; do
275 if [ "${COMMAND}" = "${c}" ]; then
276 # If we didn't find a jar above, this will just be blank and the
277 # check below will then default back to the internal classpath.
278 shaded_jar="${shaded_mapreduce}"
279 break
281 done
286 if [ -n "${shaded_jar}" ] && [ -f "${shaded_jar}" ]; then
287 CLASSPATH="${CLASSPATH}:${shaded_jar}"
288 # fall through to grabbing all the lib jars and hope we're in the omnibus tarball
290 # N.B. shell specifically can't rely on the shaded artifacts because RSGroups is only
291 # available as non-shaded
293 # N.B. pe and ltt can't easily rely on shaded artifacts because they live in hbase-mapreduce:test-jar
294 # and need some other jars that haven't been relocated. Currently enumerating that list
295 # is too hard to be worth it.
297 else
298 for f in $HBASE_HOME/lib/*.jar; do
299 CLASSPATH=${CLASSPATH}:$f;
300 done
301 # make it easier to check for shaded/not later on.
302 shaded_jar=""
304 for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
305 if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
306 [ "${f}" != "htrace-core.jar$" ] && \
307 [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
308 CLASSPATH="${CLASSPATH}:${f}"
310 done
311 # redirect java.util.logging to slf4j
312 HBASE_OPTS="$HBASE_OPTS -Djava.util.logging.config.class=org.apache.hadoop.hbase.logging.JulToSlf4jInitializer"
314 # default log directory & file
315 if [ "$HBASE_LOG_DIR" = "" ]; then
316 HBASE_LOG_DIR="$HBASE_HOME/logs"
318 if [ "$HBASE_LOGFILE" = "" ]; then
319 HBASE_LOGFILE='hbase.log'
322 function append_path() {
323 if [ -z "$1" ]; then
324 echo "$2"
325 else
326 echo "$1:$2"
330 JAVA_PLATFORM=""
332 # if HBASE_LIBRARY_PATH is defined lets use it as first or second option
333 if [ "$HBASE_LIBRARY_PATH" != "" ]; then
334 JAVA_LIBRARY_PATH=$(append_path "$JAVA_LIBRARY_PATH" "$HBASE_LIBRARY_PATH")
337 #If configured and available, add Hadoop to the CLASSPATH and to the JAVA_LIBRARY_PATH
338 if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
339 # If built hbase, temporarily add hbase-server*.jar to classpath for GetJavaProperty
340 # Exclude hbase-server*-tests.jar
341 temporary_cp=
342 for f in "${HBASE_HOME}"/lib/hbase-server*.jar; do
343 if [[ ! "${f}" =~ ^.*\-tests\.jar$ ]]; then
344 temporary_cp=":$f"
346 done
347 HADOOP_JAVA_LIBRARY_PATH=$(HADOOP_CLASSPATH="$CLASSPATH${temporary_cp}" "${HADOOP_IN_PATH}" \
348 org.apache.hadoop.hbase.util.GetJavaProperty java.library.path)
349 if [ -n "$HADOOP_JAVA_LIBRARY_PATH" ]; then
350 JAVA_LIBRARY_PATH=$(append_path "${JAVA_LIBRARY_PATH}" "$HADOOP_JAVA_LIBRARY_PATH")
352 CLASSPATH=$(append_path "${CLASSPATH}" "$(${HADOOP_IN_PATH} classpath 2>/dev/null)")
353 else
354 # Otherwise, if we're providing Hadoop we should include htrace 3 if we were built with a version that needs it.
355 for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/htrace-core-3*.jar "${HBASE_HOME}"/lib/client-facing-thirdparty/htrace-core.jar; do
356 if [ -f "${f}" ]; then
357 CLASSPATH="${CLASSPATH}:${f}"
358 break
360 done
361 # Some commands require special handling when using shaded jars. For these cases, we rely on hbase-shaded-mapreduce
362 # instead of hbase-shaded-client* because we make use of some IA.Private classes that aren't in the latter. However,
363 # we don't invoke them using the "hadoop jar" command so we need to ensure there are some Hadoop classes available
364 # when we're not doing runtime hadoop classpath lookup.
366 # luckily the set of classes we need are those packaged in the shaded-client.
367 for c in "${commands_in_mr_jar[@]}"; do
368 if [ "${COMMAND}" = "${c}" ] && [ -n "${shaded_jar}" ]; then
369 CLASSPATH="${CLASSPATH}:${shaded_client:?We couldn\'t find the shaded client jar even though we did find the shaded MR jar. for command ${COMMAND} we need both. please use --internal-classpath as a workaround.}"
370 break
372 done
375 # Add user-specified CLASSPATH last
376 if [ "$HBASE_CLASSPATH" != "" ]; then
377 CLASSPATH=${CLASSPATH}:${HBASE_CLASSPATH}
380 # Add user-specified CLASSPATH prefix first
381 if [ "$HBASE_CLASSPATH_PREFIX" != "" ]; then
382 CLASSPATH=${HBASE_CLASSPATH_PREFIX}:${CLASSPATH}
385 # cygwin path translation
386 if $cygwin; then
387 CLASSPATH=`cygpath -p -w "$CLASSPATH"`
388 HBASE_HOME=`cygpath -d "$HBASE_HOME"`
389 HBASE_LOG_DIR=`cygpath -d "$HBASE_LOG_DIR"`
392 if [ -d "${HBASE_HOME}/build/native" -o -d "${HBASE_HOME}/lib/native" ]; then
393 if [ -z $JAVA_PLATFORM ]; then
394 JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
396 if [ -d "$HBASE_HOME/build/native" ]; then
397 JAVA_LIBRARY_PATH=$(append_path "$JAVA_LIBRARY_PATH" "${HBASE_HOME}/build/native/${JAVA_PLATFORM}/lib")
400 if [ -d "${HBASE_HOME}/lib/native" ]; then
401 JAVA_LIBRARY_PATH=$(append_path "$JAVA_LIBRARY_PATH" "${HBASE_HOME}/lib/native/${JAVA_PLATFORM}")
405 # cygwin path translation
406 if $cygwin; then
407 JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
410 # restore ordinary behaviour
411 unset IFS
413 #Set the right GC options based on the what we are running
414 declare -a server_cmds=("master" "regionserver" "thrift" "thrift2" "rest" "avro" "zookeeper")
415 for cmd in ${server_cmds[@]}; do
416 if [[ $cmd == $COMMAND ]]; then
417 server=true
418 break
420 done
422 if [[ $server ]]; then
423 HBASE_OPTS="$HBASE_OPTS $SERVER_GC_OPTS"
424 else
425 HBASE_OPTS="$HBASE_OPTS $CLIENT_GC_OPTS"
428 if [ -n "$HBASE_SERVER_JAAS_OPTS" ]; then
429 AUTH_AS_SERVER_OPTS="$HBASE_SERVER_JAAS_OPTS"
430 else
431 AUTH_AS_SERVER_OPTS="$HBASE_REGIONSERVER_OPTS"
434 if [ "$AUTH_AS_SERVER" == "true" ]; then
435 HBASE_OPTS="$HBASE_OPTS $AUTH_AS_SERVER_OPTS"
436 elif [ -z "$HBASE_HBCK_OPTS" ]; then
437 # The default for hbck should be to use auth-as-server args, for compatibility
438 # with HBASE-15145
439 HBASE_HBCK_OPTS="$AUTH_AS_SERVER_OPTS"
442 # check if the command needs jline
443 declare -a jline_cmds=("zkcli" "org.apache.hadoop.hbase.zookeeper.ZKMainServer")
444 for cmd in "${jline_cmds[@]}"; do
445 if [[ $cmd == "$COMMAND" ]]; then
446 jline_needed=true
447 break
449 done
451 # for jruby
452 # (1) for the commands which need jruby (see jruby_cmds defined below)
453 # A. when JRUBY_HOME is specified explicitly, eg. export JRUBY_HOME=/usr/local/share/jruby
454 # CLASSPATH and HBASE_OPTS are updated according to JRUBY_HOME specified
455 # B. when JRUBY_HOME is not specified explicitly
456 # add jruby packaged with HBase to CLASSPATH
457 # (2) for other commands, do nothing
459 # check if the commmand needs jruby
460 declare -a jruby_cmds=("shell" "org.jruby.Main")
461 for cmd in "${jruby_cmds[@]}"; do
462 if [[ $cmd == "$COMMAND" ]]; then
463 jruby_needed=true
464 break
466 done
468 add_maven_deps_to_classpath() {
469 f="${HBASE_HOME}/hbase-build-configuration/target/$1"
471 if [ ! -f "${f}" ]; then
472 echo "As this is a development environment, we need ${f} to be generated from maven (command: mvn install -DskipTests)"
473 exit 1
475 CLASSPATH=${CLASSPATH}:$(cat "${f}")
478 add_jdk11_deps_to_classpath() {
479 for f in ${HBASE_HOME}/lib/jdk11/*; do
480 if [ -f "${f}" ]; then
481 CLASSPATH="${CLASSPATH}:${f}"
483 done
486 #Add the development env class path stuff
487 if $in_dev_env; then
488 add_maven_deps_to_classpath "cached_classpath.txt"
490 if [[ $jline_needed ]]; then
491 add_maven_deps_to_classpath "cached_classpath_jline.txt"
492 elif [[ $jruby_needed ]]; then
493 add_maven_deps_to_classpath "cached_classpath_jruby.txt"
497 # the command needs jruby
498 if [[ $jruby_needed ]]; then
499 if [ "$JRUBY_HOME" != "" ]; then # JRUBY_HOME is specified explicitly, eg. export JRUBY_HOME=/usr/local/share/jruby
500 # add jruby.jar into CLASSPATH
501 CLASSPATH="$JRUBY_HOME/lib/jruby.jar:$CLASSPATH"
503 # add jruby to HBASE_OPTS
504 HBASE_OPTS="$HBASE_OPTS -Djruby.home=$JRUBY_HOME -Djruby.lib=$JRUBY_HOME/lib"
506 else # JRUBY_HOME is not specified explicitly
507 if ! $in_dev_env; then # not in dev environment
508 # add jruby packaged with HBase to CLASSPATH
509 JRUBY_PACKAGED_WITH_HBASE="$HBASE_HOME/lib/ruby/*.jar"
510 for jruby_jar in $JRUBY_PACKAGED_WITH_HBASE; do
511 CLASSPATH=$jruby_jar:$CLASSPATH;
512 done
517 # figure out which class to run
518 if [ "$COMMAND" = "shell" ] ; then
519 #find the hbase ruby sources
520 # assume we are in a binary install if lib/ruby exists
521 if [ -d "$HBASE_HOME/lib/ruby" ]; then
522 # We want jruby to consume these things rather than our bootstrap script;
523 # jruby will look for the env variable 'JRUBY_OPTS'.
524 JRUBY_OPTS="${JRUBY_OPTS} -X+O"
525 export JRUBY_OPTS
526 # hbase-shell.jar contains a 'jar-bootstrap.rb'
527 # for more info see
528 # https://github.com/jruby/jruby/wiki/StandaloneJarsAndClasses#standalone-executable-jar-files
529 CLASS="org.jruby.JarBootstrapMain"
530 # otherwise assume we are running in a source checkout
531 else
532 HBASE_OPTS="$HBASE_OPTS -Dhbase.ruby.sources=$HBASE_HOME/hbase-shell/src/main/ruby"
533 CLASS="org.jruby.Main -X+O ${JRUBY_OPTS} ${HBASE_HOME}/hbase-shell/src/main/ruby/jar-bootstrap.rb"
535 HBASE_OPTS="$HBASE_OPTS $HBASE_SHELL_OPTS"
536 elif [ "$COMMAND" = "hbck" ] ; then
537 # Look for the -j /path/to/HBCK2.jar parameter. Else pass through to hbck.
538 case "${1}" in
540 # Found -j parameter. Add arg to CLASSPATH and set CLASS to HBCK2.
541 shift
542 JAR="${1}"
543 if [ ! -f "${JAR}" ]; then
544 echo "${JAR} file not found!"
545 echo "Usage: hbase [<options>] hbck -jar /path/to/HBCK2.jar [<args>]"
546 exit 1
548 CLASSPATH="${JAR}:${CLASSPATH}";
549 CLASS="org.apache.hbase.HBCK2"
550 shift # past argument=value
553 CLASS='org.apache.hadoop.hbase.util.HBaseFsck'
555 esac
556 HBASE_OPTS="$HBASE_OPTS $HBASE_HBCK_OPTS"
557 elif [ "$COMMAND" = "wal" ] ; then
558 CLASS='org.apache.hadoop.hbase.wal.WALPrettyPrinter'
559 elif [ "$COMMAND" = "hfile" ] ; then
560 CLASS='org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter'
561 elif [ "$COMMAND" = "zkcli" ] ; then
562 CLASS="org.apache.hadoop.hbase.zookeeper.ZKMainServer"
563 for f in $HBASE_HOME/lib/zkcli/*.jar; do
564 CLASSPATH="${CLASSPATH}:$f";
565 done
566 elif [ "$COMMAND" = "upgrade" ] ; then
567 echo "This command was used to upgrade to HBase 0.96, it was removed in HBase 2.0.0."
568 echo "Please follow the documentation at http://hbase.apache.org/book.html#upgrading."
569 exit 1
570 elif [ "$COMMAND" = "snapshot" ] ; then
571 SUBCOMMAND=$1
572 shift
573 if [ "$SUBCOMMAND" = "create" ] ; then
574 CLASS="org.apache.hadoop.hbase.snapshot.CreateSnapshot"
575 elif [ "$SUBCOMMAND" = "info" ] ; then
576 CLASS="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
577 elif [ "$SUBCOMMAND" = "export" ] ; then
578 CLASS="org.apache.hadoop.hbase.snapshot.ExportSnapshot"
579 else
580 echo "Usage: hbase [<options>] snapshot <subcommand> [<args>]"
581 echo "$options_string"
582 echo ""
583 echo "Subcommands:"
584 echo " create Create a new snapshot of a table"
585 echo " info Tool for dumping snapshot information"
586 echo " export Export an existing snapshot"
587 exit 1
589 elif [ "$COMMAND" = "master" ] ; then
590 CLASS='org.apache.hadoop.hbase.master.HMaster'
591 if [ "$1" != "stop" ] && [ "$1" != "clear" ] ; then
592 HBASE_OPTS="$HBASE_OPTS $HBASE_MASTER_OPTS"
594 elif [ "$COMMAND" = "regionserver" ] ; then
595 CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
596 if [ "$1" != "stop" ] ; then
597 HBASE_OPTS="$HBASE_OPTS $HBASE_REGIONSERVER_OPTS"
599 elif [ "$COMMAND" = "thrift" ] ; then
600 CLASS='org.apache.hadoop.hbase.thrift.ThriftServer'
601 if [ "$1" != "stop" ] ; then
602 HBASE_OPTS="$HBASE_OPTS $HBASE_THRIFT_OPTS"
604 elif [ "$COMMAND" = "thrift2" ] ; then
605 CLASS='org.apache.hadoop.hbase.thrift2.ThriftServer'
606 if [ "$1" != "stop" ] ; then
607 HBASE_OPTS="$HBASE_OPTS $HBASE_THRIFT_OPTS"
609 elif [ "$COMMAND" = "rest" ] ; then
610 CLASS='org.apache.hadoop.hbase.rest.RESTServer'
611 if [ "$1" != "stop" ] ; then
612 HBASE_OPTS="$HBASE_OPTS $HBASE_REST_OPTS"
614 elif [ "$COMMAND" = "zookeeper" ] ; then
615 CLASS='org.apache.hadoop.hbase.zookeeper.HQuorumPeer'
616 if [ "$1" != "stop" ] ; then
617 HBASE_OPTS="$HBASE_OPTS $HBASE_ZOOKEEPER_OPTS"
619 elif [ "$COMMAND" = "clean" ] ; then
620 case $1 in
621 --cleanZk|--cleanHdfs|--cleanAll)
622 matches="yes" ;;
623 *) ;;
624 esac
625 if [ $# -ne 1 -o "$matches" = "" ]; then
626 echo "Usage: hbase clean (--cleanZk|--cleanHdfs|--cleanAll)"
627 echo "Options: "
628 echo " --cleanZk cleans hbase related data from zookeeper."
629 echo " --cleanHdfs cleans hbase related data from hdfs."
630 echo " --cleanAll cleans hbase related data from both zookeeper and hdfs."
631 exit 1;
633 "$bin"/hbase-cleanup.sh --config ${HBASE_CONF_DIR} $@
634 exit $?
635 elif [ "$COMMAND" = "mapredcp" ] ; then
636 # If we didn't find a jar above, this will just be blank and the
637 # check below will then default back to the internal classpath.
638 shaded_jar="${shaded_mapreduce}"
639 if [ "${INTERNAL_CLASSPATH}" != "true" ] && [ -f "${shaded_jar}" ]; then
640 echo -n "${shaded_jar}"
641 for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
642 if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
643 [ "${f}" != "htrace-core.jar$" ] && \
644 [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
645 echo -n ":${f}"
647 done
648 echo ""
649 exit 0
651 CLASS='org.apache.hadoop.hbase.util.MapreduceDependencyClasspathTool'
652 elif [ "$COMMAND" = "classpath" ] ; then
653 echo "$CLASSPATH"
654 exit 0
655 elif [ "$COMMAND" = "pe" ] ; then
656 CLASS='org.apache.hadoop.hbase.PerformanceEvaluation'
657 HBASE_OPTS="$HBASE_OPTS $HBASE_PE_OPTS"
658 elif [ "$COMMAND" = "ltt" ] ; then
659 CLASS='org.apache.hadoop.hbase.util.LoadTestTool'
660 HBASE_OPTS="$HBASE_OPTS $HBASE_LTT_OPTS"
661 elif [ "$COMMAND" = "canary" ] ; then
662 CLASS='org.apache.hadoop.hbase.tool.CanaryTool'
663 HBASE_OPTS="$HBASE_OPTS $HBASE_CANARY_OPTS"
664 elif [ "$COMMAND" = "version" ] ; then
665 CLASS='org.apache.hadoop.hbase.util.VersionInfo'
666 elif [ "$COMMAND" = "regionsplitter" ] ; then
667 CLASS='org.apache.hadoop.hbase.util.RegionSplitter'
668 elif [ "$COMMAND" = "rowcounter" ] ; then
669 CLASS='org.apache.hadoop.hbase.mapreduce.RowCounter'
670 elif [ "$COMMAND" = "cellcounter" ] ; then
671 CLASS='org.apache.hadoop.hbase.mapreduce.CellCounter'
672 elif [ "$COMMAND" = "pre-upgrade" ] ; then
673 CLASS='org.apache.hadoop.hbase.tool.PreUpgradeValidator'
674 elif [ "$COMMAND" = "completebulkload" ] ; then
675 CLASS='org.apache.hadoop.hbase.tool.BulkLoadHFilesTool'
676 elif [ "$COMMAND" = "hbtop" ] ; then
677 CLASS='org.apache.hadoop.hbase.hbtop.HBTop'
678 if [ -n "${shaded_jar}" ] ; then
679 for f in "${HBASE_HOME}"/lib/hbase-hbtop*.jar; do
680 if [ -f "${f}" ]; then
681 CLASSPATH="${CLASSPATH}:${f}"
682 break
684 done
685 for f in "${HBASE_HOME}"/lib/commons-lang3*.jar; do
686 if [ -f "${f}" ]; then
687 CLASSPATH="${CLASSPATH}:${f}"
688 break
690 done
693 if [ -f "${HBASE_HOME}/conf/log4j-hbtop.properties" ] ; then
694 HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j.configuration=file:${HBASE_HOME}/conf/log4j-hbtop.properties"
696 HBASE_OPTS="${HBASE_OPTS} ${HBASE_HBTOP_OPTS}"
697 else
698 CLASS=$COMMAND
701 # Add lib/jdk11 jars to the classpath
703 if [ "${DEBUG}" = "true" ]; then
704 echo "Deciding on addition of lib/jdk11 jars to the classpath"
707 addJDK11Jars=false
709 if [ "${HBASE_JDK11}" != "" ]; then
710 # Use the passed Environment Variable HBASE_JDK11
711 if [ "${HBASE_JDK11}" = "include" ]; then
712 addJDK11Jars=true
713 if [ "${DEBUG}" = "true" ]; then
714 echo "HBASE_JDK11 set as 'include' hence adding JDK11 jars to classpath."
716 elif [ "${HBASE_JDK11}" = "exclude" ]; then
717 if [ "${DEBUG}" = "true" ]; then
718 echo "HBASE_JDK11 set as 'exclude' hence skipping JDK11 jars to classpath."
720 else
721 echo "[HBASE_JDK11] contains unsupported value(s) - ${HBASE_JDK11}. Ignoring passed value."
722 echo "[HBASE_JDK11] supported values: [include, exclude]."
724 else
725 # Use JDK detection
726 version="$(read_java_version)"
727 major_version_number="$(parse_java_major_version "$version")"
729 if [ "${DEBUG}" = "true" ]; then
730 echo "HBASE_JDK11 not set hence using JDK detection."
731 echo "Extracted JDK version - ${version}, major_version_number - ${major_version_number}"
734 if [[ "$major_version_number" -ge "11" ]]; then
735 if [ "${DEBUG}" = "true" ]; then
736 echo "Version ${version} is greater-than/equal to 11 hence adding JDK11 jars to classpath."
738 addJDK11Jars=true
739 elif [ "${DEBUG}" = "true" ]; then
740 echo "Version ${version} is lesser than 11 hence skipping JDK11 jars from classpath."
744 if [ "${addJDK11Jars}" = "true" ]; then
745 add_jdk11_deps_to_classpath
746 if [ "${DEBUG}" = "true" ]; then
747 echo "Added JDK11 jars to classpath."
749 elif [ "${DEBUG}" = "true" ]; then
750 echo "JDK11 jars skipped from classpath."
753 # Have JVM dump heap if we run out of memory. Files will be 'launch directory'
754 # and are named like the following: java_pid21612.hprof. Apparently it doesn't
755 # 'cost' to have this flag enabled. Its a 1.6 flag only. See:
756 # http://blogs.sun.com/alanb/entry/outofmemoryerror_looks_a_bit_better
757 HBASE_OPTS="$HBASE_OPTS -Dhbase.log.dir=$HBASE_LOG_DIR"
758 HBASE_OPTS="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE"
759 HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
760 HBASE_OPTS="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING"
761 HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}"
762 if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
763 HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
764 export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH"
767 # Enable security logging on the master and regionserver only
768 if [ "$COMMAND" = "master" ] || [ "$COMMAND" = "regionserver" ]; then
769 HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,RFAS}"
770 else
771 HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}"
774 HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX"
775 # by now if we're running a command it means we need logging
776 for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do
777 if [ -f "${f}" ]; then
778 CLASSPATH="${CLASSPATH}:${f}"
779 break
781 done
783 # Exec unless HBASE_NOEXEC is set.
784 export CLASSPATH
785 if [ "${DEBUG}" = "true" ]; then
786 echo "classpath=${CLASSPATH}" >&2
787 HBASE_OPTS="${HBASE_OPTS} -Xdiag"
790 if [ "${HBASE_NOEXEC}" != "" ]; then
791 "$JAVA" -Dproc_$COMMAND -XX:OnOutOfMemoryError="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "$@"
792 else
793 export JVM_PID="$$"
794 exec "$JAVA" -Dproc_$COMMAND -XX:OnOutOfMemoryError="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "$@"