2 * This is a common dao with basic CRUD operations and is not limited to any
3 * persistent layer implementation
5 * Copyright (C) 2010 Imran M Yousuf (imyousuf@smartitengineering.com)
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 3 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 package com
.smartitengineering
.dao
.impl
.hbase
;
21 import com
.smartitengineering
.dao
.common
.CommonReadDao
;
22 import com
.smartitengineering
.dao
.common
.CommonWriteDao
;
23 import com
.smartitengineering
.dao
.common
.queryparam
.QueryParameter
;
24 import com
.smartitengineering
.dao
.impl
.hbase
.spi
.ObjectRowConverter
;
25 import com
.smartitengineering
.dao
.impl
.hbase
.spi
.SchemaInfoProvider
;
26 import com
.smartitengineering
.domain
.PersistentDTO
;
27 import java
.util
.ArrayList
;
28 import java
.util
.Arrays
;
29 import java
.util
.LinkedHashMap
;
30 import java
.util
.LinkedHashSet
;
31 import java
.util
.List
;
34 import org
.apache
.hadoop
.conf
.Configuration
;
35 import org
.apache
.hadoop
.hbase
.HBaseConfiguration
;
36 import org
.apache
.hadoop
.hbase
.client
.Delete
;
37 import org
.apache
.hadoop
.hbase
.client
.Get
;
38 import org
.apache
.hadoop
.hbase
.client
.HTableInterface
;
39 import org
.apache
.hadoop
.hbase
.client
.HTablePool
;
40 import org
.apache
.hadoop
.hbase
.client
.Put
;
41 import org
.apache
.hadoop
.hbase
.client
.Result
;
42 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
48 public class AbstractCommonDao
<Template
extends PersistentDTO
> implements CommonReadDao
<Template
>,
49 CommonWriteDao
<Template
> {
51 public static final int DEFAULT_MAX
= 1000;
52 private ObjectRowConverter
<Template
> converter
;
53 private SchemaInfoProvider infoProvider
;
54 private Configuration configuration
;
55 private HTablePool tablePool
;
57 public ObjectRowConverter
<Template
> getConverter() {
61 public void setConverter(ObjectRowConverter
<Template
> converter
) {
62 this.converter
= converter
;
65 public SchemaInfoProvider
getInfoProvider() {
69 public void setInfoProvider(SchemaInfoProvider infoProvider
) {
70 this.infoProvider
= infoProvider
;
73 public Configuration
getConfiguration() {
74 if (configuration
== null) {
75 configuration
= HBaseConfiguration
.create();
80 public HTablePool
getTablePool() {
81 if (tablePool
== null) {
82 tablePool
= new HTablePool(getConfiguration(), DEFAULT_MAX
);
92 * Unsupported read operations
95 public Set
<Template
> getAll() {
96 throw new UnsupportedOperationException("Not supported yet.");
100 public <OtherTemplate
> OtherTemplate
getOther(List
<QueryParameter
> query
) {
101 throw new UnsupportedOperationException("Not supported yet.");
105 public <OtherTemplate
> List
<OtherTemplate
> getOtherList(List
<QueryParameter
> query
) {
106 throw new UnsupportedOperationException("Not supported yet.");
110 * Supported read operations
113 public Set
<Template
> getByIds(List
<Integer
> ids
) {
114 LinkedHashSet
<Template
> set
= new LinkedHashSet
<Template
>(ids
.size());
115 for (Integer id
: ids
) {
116 set
.add(getById(id
));
122 public Template
getById(Integer id
) {
123 HTableInterface hTable
= null;
125 Get get
= new Get(Bytes
.toBytes(id
));
126 hTable
= getTablePool().getTable(infoProvider
.getMainTableName());
129 return getConverter().rowsToObject(result
);
131 catch (Exception ex
) {
132 throw new RuntimeException(ex
);
136 if (hTable
!= null) {
137 getTablePool().putTable(hTable
);
140 catch (Exception ex
) {
141 ex
.printStackTrace();
147 public Template
getSingle(List
<QueryParameter
> query
) {
148 throw new UnsupportedOperationException("Not supported yet.");
152 public List
<Template
> getList(List
<QueryParameter
> query
) {
153 throw new UnsupportedOperationException("Not supported yet.");
157 public Template
getSingle(QueryParameter
... query
) {
158 return getSingle(Arrays
.asList(query
));
162 public List
<Template
> getList(QueryParameter
... query
) {
163 return getList(Arrays
.asList(query
));
167 public <OtherTemplate
> OtherTemplate
getOther(QueryParameter
... query
) {
168 return this.<OtherTemplate
>getOther(Arrays
.asList(query
));
172 public <OtherTemplate
> List
<OtherTemplate
> getOtherList(QueryParameter
... query
) {
173 return this.<OtherTemplate
>getOtherList(Arrays
.asList(query
));
180 public void save(Template
... states
) {
181 LinkedHashMap
<String
, List
<Put
>> allPuts
= new LinkedHashMap
<String
, List
<Put
>>();
182 for (Template state
: states
) {
183 LinkedHashMap
<String
, Put
> puts
= getConverter().objectToRows(state
);
184 for (Map
.Entry
<String
, Put
> put
: puts
.entrySet()) {
185 final List
<Put
> putList
;
186 if (allPuts
.containsKey(put
.getKey())) {
187 putList
= allPuts
.get(put
.getKey());
190 putList
= new ArrayList
<Put
>();
191 allPuts
.put(put
.getKey(), putList
);
193 putList
.add(put
.getValue());
196 for (Map
.Entry
<String
, List
<Put
>> puts
: allPuts
.entrySet()) {
197 HTableInterface hTable
= null;
199 hTable
= getTablePool().getTable(puts
.getKey());
200 hTable
.put(puts
.getValue());
202 catch (Exception ex
) {
203 throw new RuntimeException(ex
);
207 if (hTable
!= null) {
208 getTablePool().putTable(hTable
);
211 catch (Exception ex
) {
212 ex
.printStackTrace();
219 public void update(Template
... states
) {
224 public void delete(Template
... states
) {
225 LinkedHashMap
<String
, List
<Delete
>> allPuts
= new LinkedHashMap
<String
, List
<Delete
>>();
226 for (Template state
: states
) {
227 LinkedHashMap
<String
, Delete
> puts
= getConverter().objectToDeleteableRows(state
);
228 for (Map
.Entry
<String
, Delete
> put
: puts
.entrySet()) {
229 final List
<Delete
> putList
;
230 if (allPuts
.containsKey(put
.getKey())) {
231 putList
= allPuts
.get(put
.getKey());
234 putList
= new ArrayList
<Delete
>();
235 allPuts
.put(put
.getKey(), putList
);
237 putList
.add(put
.getValue());
240 for (Map
.Entry
<String
, List
<Delete
>> puts
: allPuts
.entrySet()) {
241 HTableInterface hTable
= null;
243 hTable
= getTablePool().getTable(puts
.getKey());
244 hTable
.delete(puts
.getValue());
246 catch (Exception ex
) {
247 throw new RuntimeException(ex
);
251 if (hTable
!= null) {
252 getTablePool().putTable(hTable
);
255 catch (Exception ex
) {
256 ex
.printStackTrace();