Add abstract common dao implementation for common write dao
[smart-dao.git] / smart-hbase-dao / src / main / java / com / smartitengineering / dao / impl / hbase / AbstractCommonDao.java
blob7bb875defd186205678b968f239341924ee00f19
1 /*
2 * This is a common dao with basic CRUD operations and is not limited to any
3 * persistent layer implementation
5 * Copyright (C) 2010 Imran M Yousuf (imyousuf@smartitengineering.com)
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 3 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 package com.smartitengineering.dao.impl.hbase;
21 import com.smartitengineering.dao.common.CommonReadDao;
22 import com.smartitengineering.dao.common.CommonWriteDao;
23 import com.smartitengineering.dao.common.queryparam.QueryParameter;
24 import com.smartitengineering.dao.impl.hbase.spi.ObjectRowConverter;
25 import com.smartitengineering.dao.impl.hbase.spi.SchemaInfoProvider;
26 import com.smartitengineering.domain.PersistentDTO;
27 import java.util.ArrayList;
28 import java.util.Arrays;
29 import java.util.LinkedHashMap;
30 import java.util.LinkedHashSet;
31 import java.util.List;
32 import java.util.Map;
33 import java.util.Set;
34 import org.apache.hadoop.conf.Configuration;
35 import org.apache.hadoop.hbase.HBaseConfiguration;
36 import org.apache.hadoop.hbase.client.Delete;
37 import org.apache.hadoop.hbase.client.Get;
38 import org.apache.hadoop.hbase.client.HTableInterface;
39 import org.apache.hadoop.hbase.client.HTablePool;
40 import org.apache.hadoop.hbase.client.Put;
41 import org.apache.hadoop.hbase.client.Result;
42 import org.apache.hadoop.hbase.util.Bytes;
44 /**
46 * @author imyousuf
48 public class AbstractCommonDao<Template extends PersistentDTO> implements CommonReadDao<Template>,
49 CommonWriteDao<Template> {
51 public static final int DEFAULT_MAX = 1000;
52 private ObjectRowConverter<Template> converter;
53 private SchemaInfoProvider infoProvider;
54 private Configuration configuration;
55 private HTablePool tablePool;
57 public ObjectRowConverter<Template> getConverter() {
58 return converter;
61 public void setConverter(ObjectRowConverter<Template> converter) {
62 this.converter = converter;
65 public SchemaInfoProvider getInfoProvider() {
66 return infoProvider;
69 public void setInfoProvider(SchemaInfoProvider infoProvider) {
70 this.infoProvider = infoProvider;
73 public Configuration getConfiguration() {
74 if (configuration == null) {
75 configuration = HBaseConfiguration.create();
77 return configuration;
80 public HTablePool getTablePool() {
81 if (tablePool == null) {
82 tablePool = new HTablePool(getConfiguration(), DEFAULT_MAX);
84 return tablePool;
88 * READ OPERATIONS
92 * Unsupported read operations
94 @Override
95 public Set<Template> getAll() {
96 throw new UnsupportedOperationException("Not supported yet.");
99 @Override
100 public <OtherTemplate> OtherTemplate getOther(List<QueryParameter> query) {
101 throw new UnsupportedOperationException("Not supported yet.");
104 @Override
105 public <OtherTemplate> List<OtherTemplate> getOtherList(List<QueryParameter> query) {
106 throw new UnsupportedOperationException("Not supported yet.");
110 * Supported read operations
112 @Override
113 public Set<Template> getByIds(List<Integer> ids) {
114 LinkedHashSet<Template> set = new LinkedHashSet<Template>(ids.size());
115 for (Integer id : ids) {
116 set.add(getById(id));
118 return set;
121 @Override
122 public Template getById(Integer id) {
123 HTableInterface hTable = null;
124 try {
125 Get get = new Get(Bytes.toBytes(id));
126 hTable = getTablePool().getTable(infoProvider.getMainTableName());
127 Result result =
128 hTable.get(get);
129 return getConverter().rowsToObject(result);
131 catch (Exception ex) {
132 throw new RuntimeException(ex);
134 finally {
135 try {
136 if (hTable != null) {
137 getTablePool().putTable(hTable);
140 catch (Exception ex) {
141 ex.printStackTrace();
146 @Override
147 public Template getSingle(List<QueryParameter> query) {
148 throw new UnsupportedOperationException("Not supported yet.");
151 @Override
152 public List<Template> getList(List<QueryParameter> query) {
153 throw new UnsupportedOperationException("Not supported yet.");
156 @Override
157 public Template getSingle(QueryParameter... query) {
158 return getSingle(Arrays.asList(query));
161 @Override
162 public List<Template> getList(QueryParameter... query) {
163 return getList(Arrays.asList(query));
166 @Override
167 public <OtherTemplate> OtherTemplate getOther(QueryParameter... query) {
168 return this.<OtherTemplate>getOther(Arrays.asList(query));
171 @Override
172 public <OtherTemplate> List<OtherTemplate> getOtherList(QueryParameter... query) {
173 return this.<OtherTemplate>getOtherList(Arrays.asList(query));
177 * WRITE OPERATIONS
179 @Override
180 public void save(Template... states) {
181 LinkedHashMap<String, List<Put>> allPuts = new LinkedHashMap<String, List<Put>>();
182 for (Template state : states) {
183 LinkedHashMap<String, Put> puts = getConverter().objectToRows(state);
184 for (Map.Entry<String, Put> put : puts.entrySet()) {
185 final List<Put> putList;
186 if (allPuts.containsKey(put.getKey())) {
187 putList = allPuts.get(put.getKey());
189 else {
190 putList = new ArrayList<Put>();
191 allPuts.put(put.getKey(), putList);
193 putList.add(put.getValue());
196 for (Map.Entry<String, List<Put>> puts : allPuts.entrySet()) {
197 HTableInterface hTable = null;
198 try {
199 hTable = getTablePool().getTable(puts.getKey());
200 hTable.put(puts.getValue());
202 catch (Exception ex) {
203 throw new RuntimeException(ex);
205 finally {
206 try {
207 if (hTable != null) {
208 getTablePool().putTable(hTable);
211 catch (Exception ex) {
212 ex.printStackTrace();
218 @Override
219 public void update(Template... states) {
220 save(states);
223 @Override
224 public void delete(Template... states) {
225 LinkedHashMap<String, List<Delete>> allPuts = new LinkedHashMap<String, List<Delete>>();
226 for (Template state : states) {
227 LinkedHashMap<String, Delete> puts = getConverter().objectToDeleteableRows(state);
228 for (Map.Entry<String, Delete> put : puts.entrySet()) {
229 final List<Delete> putList;
230 if (allPuts.containsKey(put.getKey())) {
231 putList = allPuts.get(put.getKey());
233 else {
234 putList = new ArrayList<Delete>();
235 allPuts.put(put.getKey(), putList);
237 putList.add(put.getValue());
240 for (Map.Entry<String, List<Delete>> puts : allPuts.entrySet()) {
241 HTableInterface hTable = null;
242 try {
243 hTable = getTablePool().getTable(puts.getKey());
244 hTable.delete(puts.getValue());
246 catch (Exception ex) {
247 throw new RuntimeException(ex);
249 finally {
250 try {
251 if (hTable != null) {
252 getTablePool().putTable(hTable);
255 catch (Exception ex) {
256 ex.printStackTrace();