Revert "HBASE-15572 Adding optional timestamp semantics to HBase-Spark (Weiqing Yang)"
[hbase.git] / hbase-spark / src / main / scala / org / apache / hadoop / hbase / spark / NewHBaseRDD.scala
blob8e5e8f9e98ff8f9afb683f75c7f646ad4bb3154c
1 /*
2 * Licensed to the Apache Software Foundation (ASF) under one or more
3 * contributor license agreements. See the NOTICE file distributed with
4 * this work for additional information regarding copyright ownership.
5 * The ASF licenses this file to You under the Apache License, Version 2.0
6 * (the "License"); you may not use this file except in compliance with
7 * the License. You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
18 package org.apache.hadoop.hbase.spark
20 import org.apache.hadoop.conf.Configuration
21 import org.apache.hadoop.mapreduce.InputFormat
22 import org.apache.spark.rdd.NewHadoopRDD
23 import org.apache.spark.{InterruptibleIterator, Partition, SparkContext, TaskContext}
25 class NewHBaseRDD[K,V](@transient sc : SparkContext,
26 @transient inputFormatClass: Class[_ <: InputFormat[K, V]],
27 @transient keyClass: Class[K],
28 @transient valueClass: Class[V],
29 @transient conf: Configuration,
30 val hBaseContext: HBaseContext) extends NewHadoopRDD(sc,inputFormatClass, keyClass, valueClass, conf) {
32 override def compute(theSplit: Partition, context: TaskContext): InterruptibleIterator[(K, V)] = {
33 hBaseContext.applyCreds()
34 super.compute(theSplit, context)